diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE.md similarity index 83% rename from .github/ISSUE_TEMPLATE/bug_report.md rename to .github/ISSUE_TEMPLATE.md index 9c0577106e..3a375bcbe9 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE.md @@ -1,19 +1,10 @@ ---- -name: Bug report -about: Report a bug -title: '' -labels: type:bug -assignees: '' - ---- - diff --git a/README.md b/README.md index 364034b898..5214b0e547 100644 --- a/README.md +++ b/README.md @@ -960,11 +960,6 @@ The values of the following two required variables will be provided by the admin * The value of this attribute will be used by Overleaf as the external user ID, defaults to `id`. Other possible reasonable values are `email` and `username` (corresponding to `preferred_username` OIDC claim). -- `OVERLEAF_OIDC_DISABLE_JIT_ACCOUNT_CREATION` - * If set to `true`, disables Just-in-Time (JIT) account creation for OIDC users. Only users with pre-existing accounts can log in. - An admin must manually create the user account using the OIDC user’s email address, with either a strong random password or, preferably, - without the `hashedPassword` field at all. The OIDC user will be able to log in only after that. Default: `false`. - - `OVERLEAF_OIDC_UPDATE_USER_DETAILS_ON_LOGIN` * If set to `true`, updates the user `first_name` and `last_name` field on login, and disables the user details form on `/user/settings` page. diff --git a/develop/docker-compose.yml b/develop/docker-compose.yml index e5b84c38b3..ab8184a5f8 100644 --- a/develop/docker-compose.yml +++ b/develop/docker-compose.yml @@ -25,10 +25,10 @@ services: env_file: - dev.env environment: + - DOCKER_RUNNER=true - TEXLIVE_IMAGE=texlive-full # docker build texlive -t texlive-full - - SANDBOXED_COMPILES=true - - SANDBOXED_COMPILES_HOST_DIR_COMPILES=${PWD}/compiles - - SANDBOXED_COMPILES_HOST_DIR_OUTPUT=${PWD}/output + - COMPILES_HOST_DIR=${PWD}/compiles + - OUTPUT_HOST_DIR=${PWD}/output user: root volumes: - ${PWD}/compiles:/overleaf/services/clsi/compiles diff --git a/libraries/access-token-encryptor/buildscript.txt b/libraries/access-token-encryptor/buildscript.txt index 8ce12073ea..74c3bbbd24 100644 --- a/libraries/access-token-encryptor/buildscript.txt +++ b/libraries/access-token-encryptor/buildscript.txt @@ -1,6 +1,6 @@ access-token-encryptor --dependencies=None ---docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker +--docker-repos=gcr.io/overleaf-ops --env-add= --env-pass-through= --esmock-loader=False diff --git a/libraries/fetch-utils/buildscript.txt b/libraries/fetch-utils/buildscript.txt index 35e8eed85b..91548ff7c6 100644 --- a/libraries/fetch-utils/buildscript.txt +++ b/libraries/fetch-utils/buildscript.txt @@ -1,6 +1,6 @@ fetch-utils --dependencies=None ---docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker +--docker-repos=gcr.io/overleaf-ops --env-add= --env-pass-through= --esmock-loader=False diff --git a/libraries/logger/buildscript.txt b/libraries/logger/buildscript.txt index a3d1cc0646..9008707b0e 100644 --- a/libraries/logger/buildscript.txt +++ b/libraries/logger/buildscript.txt @@ -1,6 +1,6 @@ logger --dependencies=None ---docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker +--docker-repos=gcr.io/overleaf-ops --env-add= --env-pass-through= --esmock-loader=False diff --git a/libraries/metrics/buildscript.txt b/libraries/metrics/buildscript.txt index 58ff195d95..2c2e5d7531 100644 --- a/libraries/metrics/buildscript.txt +++ b/libraries/metrics/buildscript.txt @@ -1,6 +1,6 @@ metrics --dependencies=None ---docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker +--docker-repos=gcr.io/overleaf-ops --env-add= --env-pass-through= --esmock-loader=False diff --git a/libraries/mongo-utils/buildscript.txt b/libraries/mongo-utils/buildscript.txt index 35ca540bfb..bda8d4f734 100644 --- a/libraries/mongo-utils/buildscript.txt +++ b/libraries/mongo-utils/buildscript.txt @@ -1,6 +1,6 @@ mongo-utils --dependencies=None ---docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker +--docker-repos=gcr.io/overleaf-ops --env-add= --env-pass-through= --esmock-loader=False diff --git a/libraries/o-error/buildscript.txt b/libraries/o-error/buildscript.txt index c61679157e..a4134b4b60 100644 --- a/libraries/o-error/buildscript.txt +++ b/libraries/o-error/buildscript.txt @@ -1,6 +1,6 @@ o-error --dependencies=None ---docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker +--docker-repos=gcr.io/overleaf-ops --env-add= --env-pass-through= --esmock-loader=False diff --git a/libraries/object-persistor/buildscript.txt b/libraries/object-persistor/buildscript.txt index d5113ce910..75d2e09382 100644 --- a/libraries/object-persistor/buildscript.txt +++ b/libraries/object-persistor/buildscript.txt @@ -1,6 +1,6 @@ object-persistor --dependencies=None ---docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker +--docker-repos=gcr.io/overleaf-ops --env-add= --env-pass-through= --esmock-loader=False diff --git a/libraries/overleaf-editor-core/buildscript.txt b/libraries/overleaf-editor-core/buildscript.txt index 25a221232a..9b6508663b 100644 --- a/libraries/overleaf-editor-core/buildscript.txt +++ b/libraries/overleaf-editor-core/buildscript.txt @@ -1,6 +1,6 @@ overleaf-editor-core --dependencies=None ---docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker +--docker-repos=gcr.io/overleaf-ops --env-add= --env-pass-through= --esmock-loader=False diff --git a/libraries/overleaf-editor-core/lib/file_data/clear_tracking_props.js b/libraries/overleaf-editor-core/lib/file_data/clear_tracking_props.js index b3ddbab7d8..ba7f0bf00b 100644 --- a/libraries/overleaf-editor-core/lib/file_data/clear_tracking_props.js +++ b/libraries/overleaf-editor-core/lib/file_data/clear_tracking_props.js @@ -1,7 +1,7 @@ // @ts-check /** - * @import { ClearTrackingPropsRawData, TrackingDirective } from '../types' + * @import { ClearTrackingPropsRawData } from '../types' */ class ClearTrackingProps { @@ -11,27 +11,12 @@ class ClearTrackingProps { /** * @param {any} other - * @returns {other is ClearTrackingProps} + * @returns {boolean} */ equals(other) { return other instanceof ClearTrackingProps } - /** - * @param {TrackingDirective} other - * @returns {other is ClearTrackingProps} - */ - canMergeWith(other) { - return other instanceof ClearTrackingProps - } - - /** - * @param {TrackingDirective} other - */ - mergeWith(other) { - return this - } - /** * @returns {ClearTrackingPropsRawData} */ diff --git a/libraries/overleaf-editor-core/lib/file_data/lazy_string_file_data.js b/libraries/overleaf-editor-core/lib/file_data/lazy_string_file_data.js index abc720d10c..bc11b3e98d 100644 --- a/libraries/overleaf-editor-core/lib/file_data/lazy_string_file_data.js +++ b/libraries/overleaf-editor-core/lib/file_data/lazy_string_file_data.js @@ -11,7 +11,7 @@ const EditOperation = require('../operation/edit_operation') const EditOperationBuilder = require('../operation/edit_operation_builder') /** - * @import { BlobStore, ReadonlyBlobStore, RangesBlob, RawHashFileData, RawLazyStringFileData } from '../types' + * @import { BlobStore, ReadonlyBlobStore, RangesBlob, RawFileData, RawLazyStringFileData } from '../types' */ class LazyStringFileData extends FileData { @@ -159,11 +159,11 @@ class LazyStringFileData extends FileData { /** @inheritdoc * @param {BlobStore} blobStore - * @return {Promise} + * @return {Promise} */ async store(blobStore) { if (this.operations.length === 0) { - /** @type RawHashFileData */ + /** @type RawFileData */ const raw = { hash: this.hash } if (this.rangesHash) { raw.rangesHash = this.rangesHash @@ -171,11 +171,9 @@ class LazyStringFileData extends FileData { return raw } const eager = await this.toEager(blobStore) - const raw = await eager.store(blobStore) - this.hash = raw.hash - this.rangesHash = raw.rangesHash this.operations.length = 0 - return raw + /** @type RawFileData */ + return await eager.store(blobStore) } } diff --git a/libraries/overleaf-editor-core/lib/file_data/string_file_data.js b/libraries/overleaf-editor-core/lib/file_data/string_file_data.js index c78c1e0414..48df633461 100644 --- a/libraries/overleaf-editor-core/lib/file_data/string_file_data.js +++ b/libraries/overleaf-editor-core/lib/file_data/string_file_data.js @@ -8,7 +8,7 @@ const CommentList = require('./comment_list') const TrackedChangeList = require('./tracked_change_list') /** - * @import { StringFileRawData, RawHashFileData, BlobStore, CommentRawData } from "../types" + * @import { StringFileRawData, RawFileData, BlobStore, CommentRawData } from "../types" * @import { TrackedChangeRawData, RangesBlob } from "../types" * @import EditOperation from "../operation/edit_operation" */ @@ -139,7 +139,7 @@ class StringFileData extends FileData { /** * @inheritdoc * @param {BlobStore} blobStore - * @return {Promise} + * @return {Promise} */ async store(blobStore) { const blob = await blobStore.putString(this.content) diff --git a/libraries/overleaf-editor-core/lib/file_data/tracked_change.js b/libraries/overleaf-editor-core/lib/file_data/tracked_change.js index e789a427b0..d0e6517d0f 100644 --- a/libraries/overleaf-editor-core/lib/file_data/tracked_change.js +++ b/libraries/overleaf-editor-core/lib/file_data/tracked_change.js @@ -84,21 +84,6 @@ class TrackedChange { ) ) } - - /** - * Return an equivalent tracked change whose extent is limited to the given - * range - * - * @param {Range} range - * @returns {TrackedChange | null} - the result or null if the intersection is empty - */ - intersectRange(range) { - const intersection = this.range.intersect(range) - if (intersection == null) { - return null - } - return new TrackedChange(intersection, this.tracking) - } } module.exports = TrackedChange diff --git a/libraries/overleaf-editor-core/lib/file_data/tracked_change_list.js b/libraries/overleaf-editor-core/lib/file_data/tracked_change_list.js index b302865c70..263b37ab50 100644 --- a/libraries/overleaf-editor-core/lib/file_data/tracked_change_list.js +++ b/libraries/overleaf-editor-core/lib/file_data/tracked_change_list.js @@ -2,11 +2,9 @@ const Range = require('../range') const TrackedChange = require('./tracked_change') const TrackingProps = require('../file_data/tracking_props') -const { InsertOp, RemoveOp, RetainOp } = require('../operation/scan_op') /** * @import { TrackingDirective, TrackedChangeRawData } from "../types" - * @import TextOperation from "../operation/text_operation" */ class TrackedChangeList { @@ -60,22 +58,6 @@ class TrackedChangeList { return this._trackedChanges.filter(change => range.contains(change.range)) } - /** - * Returns tracked changes that overlap with the given range - * @param {Range} range - * @returns {TrackedChange[]} - */ - intersectRange(range) { - const changes = [] - for (const change of this._trackedChanges) { - const intersection = change.intersectRange(range) - if (intersection != null) { - changes.push(intersection) - } - } - return changes - } - /** * Returns the tracking props for a given range. * @param {Range} range @@ -107,8 +89,6 @@ class TrackedChangeList { /** * Collapses consecutive (and compatible) ranges - * - * @private * @returns {void} */ _mergeRanges() { @@ -137,28 +117,12 @@ class TrackedChangeList { } /** - * Apply an insert operation * * @param {number} cursor * @param {string} insertedText * @param {{tracking?: TrackingProps}} opts */ applyInsert(cursor, insertedText, opts = {}) { - this._applyInsert(cursor, insertedText, opts) - this._mergeRanges() - } - - /** - * Apply an insert operation - * - * This method will not merge ranges at the end - * - * @private - * @param {number} cursor - * @param {string} insertedText - * @param {{tracking?: TrackingProps}} [opts] - */ - _applyInsert(cursor, insertedText, opts = {}) { const newTrackedChanges = [] for (const trackedChange of this._trackedChanges) { if ( @@ -207,29 +171,15 @@ class TrackedChangeList { newTrackedChanges.push(newTrackedChange) } this._trackedChanges = newTrackedChanges + this._mergeRanges() } /** - * Apply a delete operation to the list of tracked changes * * @param {number} cursor * @param {number} length */ applyDelete(cursor, length) { - this._applyDelete(cursor, length) - this._mergeRanges() - } - - /** - * Apply a delete operation to the list of tracked changes - * - * This method will not merge ranges at the end - * - * @private - * @param {number} cursor - * @param {number} length - */ - _applyDelete(cursor, length) { const newTrackedChanges = [] for (const trackedChange of this._trackedChanges) { const deletedRange = new Range(cursor, length) @@ -255,31 +205,15 @@ class TrackedChangeList { } } this._trackedChanges = newTrackedChanges - } - - /** - * Apply a retain operation to the list of tracked changes - * - * @param {number} cursor - * @param {number} length - * @param {{tracking?: TrackingDirective}} [opts] - */ - applyRetain(cursor, length, opts = {}) { - this._applyRetain(cursor, length, opts) this._mergeRanges() } /** - * Apply a retain operation to the list of tracked changes - * - * This method will not merge ranges at the end - * - * @private * @param {number} cursor * @param {number} length * @param {{tracking?: TrackingDirective}} opts */ - _applyRetain(cursor, length, opts = {}) { + applyRetain(cursor, length, opts = {}) { // If there's no tracking info, leave everything as-is if (!opts.tracking) { return @@ -335,31 +269,6 @@ class TrackedChangeList { newTrackedChanges.push(newTrackedChange) } this._trackedChanges = newTrackedChanges - } - - /** - * Apply a text operation to the list of tracked changes - * - * Ranges are merged only once at the end, for performance and to avoid - * problematic edge cases where intermediate ranges get incorrectly merged. - * - * @param {TextOperation} operation - */ - applyTextOperation(operation) { - // this cursor tracks the destination document that gets modified as - // operations are applied to it. - let cursor = 0 - for (const op of operation.ops) { - if (op instanceof InsertOp) { - this._applyInsert(cursor, op.insertion, { tracking: op.tracking }) - cursor += op.insertion.length - } else if (op instanceof RemoveOp) { - this._applyDelete(cursor, op.length) - } else if (op instanceof RetainOp) { - this._applyRetain(cursor, op.length, { tracking: op.tracking }) - cursor += op.length - } - } this._mergeRanges() } } diff --git a/libraries/overleaf-editor-core/lib/file_data/tracking_props.js b/libraries/overleaf-editor-core/lib/file_data/tracking_props.js index 82d731a232..75ec95c566 100644 --- a/libraries/overleaf-editor-core/lib/file_data/tracking_props.js +++ b/libraries/overleaf-editor-core/lib/file_data/tracking_props.js @@ -62,35 +62,6 @@ class TrackingProps { this.ts.getTime() === other.ts.getTime() ) } - - /** - * Are these tracking props compatible with the other tracking props for merging - * ranges? - * - * @param {TrackingDirective} other - * @returns {other is TrackingProps} - */ - canMergeWith(other) { - if (!(other instanceof TrackingProps)) { - return false - } - return this.type === other.type && this.userId === other.userId - } - - /** - * Merge two tracking props - * - * Assumes that `canMerge(other)` returns true - * - * @param {TrackingDirective} other - */ - mergeWith(other) { - if (!this.canMergeWith(other)) { - throw new Error('Cannot merge with incompatible tracking props') - } - const ts = this.ts <= other.ts ? this.ts : other.ts - return new TrackingProps(this.type, this.userId, ts) - } } module.exports = TrackingProps diff --git a/libraries/overleaf-editor-core/lib/operation/scan_op.js b/libraries/overleaf-editor-core/lib/operation/scan_op.js index fd322459cc..4f179f24b4 100644 --- a/libraries/overleaf-editor-core/lib/operation/scan_op.js +++ b/libraries/overleaf-editor-core/lib/operation/scan_op.js @@ -175,7 +175,7 @@ class InsertOp extends ScanOp { return false } if (this.tracking) { - if (!other.tracking || !this.tracking.canMergeWith(other.tracking)) { + if (!this.tracking.equals(other.tracking)) { return false } } else if (other.tracking) { @@ -198,10 +198,7 @@ class InsertOp extends ScanOp { throw new Error('Cannot merge with incompatible operation') } this.insertion += other.insertion - if (this.tracking != null && other.tracking != null) { - this.tracking = this.tracking.mergeWith(other.tracking) - } - // We already have the same commentIds + // We already have the same tracking info and commentIds } /** @@ -309,13 +306,9 @@ class RetainOp extends ScanOp { return false } if (this.tracking) { - if (!other.tracking || !this.tracking.canMergeWith(other.tracking)) { - return false - } - } else if (other.tracking) { - return false + return this.tracking.equals(other.tracking) } - return true + return !other.tracking } /** @@ -326,9 +319,6 @@ class RetainOp extends ScanOp { throw new Error('Cannot merge with incompatible operation') } this.length += other.length - if (this.tracking != null && other.tracking != null) { - this.tracking = this.tracking.mergeWith(other.tracking) - } } /** diff --git a/libraries/overleaf-editor-core/lib/operation/text_operation.js b/libraries/overleaf-editor-core/lib/operation/text_operation.js index 61c7f124b4..148570fa42 100644 --- a/libraries/overleaf-editor-core/lib/operation/text_operation.js +++ b/libraries/overleaf-editor-core/lib/operation/text_operation.js @@ -314,18 +314,25 @@ class TextOperation extends EditOperation { str ) } + file.trackedChanges.applyRetain(result.length, op.length, { + tracking: op.tracking, + }) result += str.slice(inputCursor, inputCursor + op.length) inputCursor += op.length } else if (op instanceof InsertOp) { if (containsNonBmpChars(op.insertion)) { throw new InvalidInsertionError(str, op.toJSON()) } + file.trackedChanges.applyInsert(result.length, op.insertion, { + tracking: op.tracking, + }) file.comments.applyInsert( new Range(result.length, op.insertion.length), { commentIds: op.commentIds } ) result += op.insertion } else if (op instanceof RemoveOp) { + file.trackedChanges.applyDelete(result.length, op.length) file.comments.applyDelete(new Range(result.length, op.length)) inputCursor += op.length } else { @@ -345,8 +352,6 @@ class TextOperation extends EditOperation { throw new TextOperation.TooLongError(operation, result.length) } - file.trackedChanges.applyTextOperation(this) - file.content = result } @@ -395,36 +400,44 @@ class TextOperation extends EditOperation { for (let i = 0, l = ops.length; i < l; i++) { const op = ops[i] if (op instanceof RetainOp) { - if (op.tracking) { - // Where we need to end up after the retains - const target = strIndex + op.length - // A previous retain could have overriden some tracking info. Now we - // need to restore it. - const previousChanges = previousState.trackedChanges.intersectRange( - new Range(strIndex, op.length) - ) + // Where we need to end up after the retains + const target = strIndex + op.length + // A previous retain could have overriden some tracking info. Now we + // need to restore it. + const previousRanges = previousState.trackedChanges.inRange( + new Range(strIndex, op.length) + ) - for (const change of previousChanges) { - if (strIndex < change.range.start) { - inverse.retain(change.range.start - strIndex, { - tracking: new ClearTrackingProps(), - }) - strIndex = change.range.start - } - inverse.retain(change.range.length, { - tracking: change.tracking, + let removeTrackingInfoIfNeeded + if (op.tracking) { + removeTrackingInfoIfNeeded = new ClearTrackingProps() + } + + for (const trackedChange of previousRanges) { + if (strIndex < trackedChange.range.start) { + inverse.retain(trackedChange.range.start - strIndex, { + tracking: removeTrackingInfoIfNeeded, }) - strIndex += change.range.length + strIndex = trackedChange.range.start } - if (strIndex < target) { - inverse.retain(target - strIndex, { - tracking: new ClearTrackingProps(), + if (trackedChange.range.end < strIndex + op.length) { + inverse.retain(trackedChange.range.length, { + tracking: trackedChange.tracking, }) - strIndex = target + strIndex = trackedChange.range.end } - } else { - inverse.retain(op.length) - strIndex += op.length + if (trackedChange.range.end !== strIndex) { + // No need to split the range at the end + const [left] = trackedChange.range.splitAt(strIndex) + inverse.retain(left.length, { tracking: trackedChange.tracking }) + strIndex = left.end + } + } + if (strIndex < target) { + inverse.retain(target - strIndex, { + tracking: removeTrackingInfoIfNeeded, + }) + strIndex = target } } else if (op instanceof InsertOp) { inverse.remove(op.insertion.length) diff --git a/libraries/overleaf-editor-core/lib/range.js b/libraries/overleaf-editor-core/lib/range.js index b3fb2bd78b..bc47632f92 100644 --- a/libraries/overleaf-editor-core/lib/range.js +++ b/libraries/overleaf-editor-core/lib/range.js @@ -86,32 +86,10 @@ class Range { } /** - * Does this range overlap another range? - * - * Overlapping means that the two ranges have at least one character in common - * - * @param {Range} other - the other range + * @param {Range} range */ - overlaps(other) { - return this.start < other.end && this.end > other.start - } - - /** - * Does this range overlap the start of another range? - * - * @param {Range} other - the other range - */ - overlapsStart(other) { - return this.start <= other.start && this.end > other.start - } - - /** - * Does this range overlap the end of another range? - * - * @param {Range} other - the other range - */ - overlapsEnd(other) { - return this.start < other.end && this.end >= other.end + overlaps(range) { + return this.start < range.end && this.end > range.start } /** @@ -249,26 +227,6 @@ class Range { ) return [rangeUpToCursor, rangeAfterCursor] } - - /** - * Returns the intersection of this range with another range - * - * @param {Range} other - the other range - * @return {Range | null} the intersection or null if the intersection is empty - */ - intersect(other) { - if (this.contains(other)) { - return other - } else if (other.contains(this)) { - return this - } else if (other.overlapsStart(this)) { - return new Range(this.pos, other.end - this.start) - } else if (other.overlapsEnd(this)) { - return new Range(other.pos, this.end - other.start) - } else { - return null - } - } } module.exports = Range diff --git a/libraries/overleaf-editor-core/test/lazy_string_file_data.test.js b/libraries/overleaf-editor-core/test/lazy_string_file_data.test.js index 946e6cd5d1..4c9f4aa497 100644 --- a/libraries/overleaf-editor-core/test/lazy_string_file_data.test.js +++ b/libraries/overleaf-editor-core/test/lazy_string_file_data.test.js @@ -193,13 +193,4 @@ describe('LazyStringFileData', function () { expect(fileData.getStringLength()).to.equal(longString.length) expect(fileData.getOperations()).to.have.length(1) }) - - it('truncates its operations after being stored', async function () { - const testHash = File.EMPTY_FILE_HASH - const fileData = new LazyStringFileData(testHash, undefined, 0) - fileData.edit(new TextOperation().insert('abc')) - const stored = await fileData.store(this.blobStore) - expect(fileData.hash).to.equal(stored.hash) - expect(fileData.operations).to.deep.equal([]) - }) }) diff --git a/libraries/overleaf-editor-core/test/range.test.js b/libraries/overleaf-editor-core/test/range.test.js index 9a048d5c03..daad8fd6ed 100644 --- a/libraries/overleaf-editor-core/test/range.test.js +++ b/libraries/overleaf-editor-core/test/range.test.js @@ -1,3 +1,4 @@ +// @ts-check 'use strict' const { expect } = require('chai') @@ -448,44 +449,4 @@ describe('Range', function () { expect(() => range.insertAt(16, 3)).to.throw() }) }) - - describe('intersect', function () { - it('should handle partially overlapping ranges', function () { - const range1 = new Range(5, 10) - const range2 = new Range(3, 6) - const intersection1 = range1.intersect(range2) - expect(intersection1.pos).to.equal(5) - expect(intersection1.length).to.equal(4) - const intersection2 = range2.intersect(range1) - expect(intersection2.pos).to.equal(5) - expect(intersection2.length).to.equal(4) - }) - - it('should intersect with itself', function () { - const range = new Range(5, 10) - const intersection = range.intersect(range) - expect(intersection.pos).to.equal(5) - expect(intersection.length).to.equal(10) - }) - - it('should handle nested ranges', function () { - const range1 = new Range(5, 10) - const range2 = new Range(7, 2) - const intersection1 = range1.intersect(range2) - expect(intersection1.pos).to.equal(7) - expect(intersection1.length).to.equal(2) - const intersection2 = range2.intersect(range1) - expect(intersection2.pos).to.equal(7) - expect(intersection2.length).to.equal(2) - }) - - it('should handle disconnected ranges', function () { - const range1 = new Range(5, 10) - const range2 = new Range(20, 30) - const intersection1 = range1.intersect(range2) - expect(intersection1).to.be.null - const intersection2 = range2.intersect(range1) - expect(intersection2).to.be.null - }) - }) }) diff --git a/libraries/overleaf-editor-core/test/scan_op.test.js b/libraries/overleaf-editor-core/test/scan_op.test.js index 98f4834d48..80ab69114e 100644 --- a/libraries/overleaf-editor-core/test/scan_op.test.js +++ b/libraries/overleaf-editor-core/test/scan_op.test.js @@ -107,7 +107,7 @@ describe('RetainOp', function () { expect(op1.equals(new RetainOp(3))).to.be.true }) - it('cannot merge with another RetainOp if the tracking user is different', function () { + it('cannot merge with another RetainOp if tracking info is different', function () { const op1 = new RetainOp( 4, new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z')) @@ -120,14 +120,14 @@ describe('RetainOp', function () { expect(() => op1.mergeWith(op2)).to.throw(Error) }) - it('can merge with another RetainOp if the tracking user is the same', function () { + it('can merge with another RetainOp if tracking info is the same', function () { const op1 = new RetainOp( 4, new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z')) ) const op2 = new RetainOp( 4, - new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:01.000Z')) + new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z')) ) op1.mergeWith(op2) expect( @@ -310,7 +310,7 @@ describe('InsertOp', function () { expect(() => op1.mergeWith(op2)).to.throw(Error) }) - it('cannot merge with another InsertOp if tracking user is different', function () { + it('cannot merge with another InsertOp if tracking info is different', function () { const op1 = new InsertOp( 'a', new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z')) @@ -323,7 +323,7 @@ describe('InsertOp', function () { expect(() => op1.mergeWith(op2)).to.throw(Error) }) - it('can merge with another InsertOp if tracking user and comment info is the same', function () { + it('can merge with another InsertOp if tracking and comment info is the same', function () { const op1 = new InsertOp( 'a', new TrackingProps( @@ -338,7 +338,7 @@ describe('InsertOp', function () { new TrackingProps( 'insert', 'user1', - new Date('2024-01-01T00:00:01.000Z') + new Date('2024-01-01T00:00:00.000Z') ), ['1', '2'] ) diff --git a/libraries/overleaf-editor-core/test/text_operation.test.js b/libraries/overleaf-editor-core/test/text_operation.test.js index 43b8c707a6..fa9bc62dc3 100644 --- a/libraries/overleaf-editor-core/test/text_operation.test.js +++ b/libraries/overleaf-editor-core/test/text_operation.test.js @@ -322,47 +322,6 @@ describe('TextOperation', function () { new TextOperation().retain(4).remove(4).retain(3) ) }) - - it('undoing a tracked delete restores the tracked changes', function () { - expectInverseToLeadToInitialState( - new StringFileData( - 'the quick brown fox jumps over the lazy dog', - undefined, - [ - { - range: { pos: 5, length: 5 }, - tracking: { - ts: '2023-01-01T00:00:00.000Z', - type: 'insert', - userId: 'user1', - }, - }, - { - range: { pos: 12, length: 3 }, - tracking: { - ts: '2023-01-01T00:00:00.000Z', - type: 'delete', - userId: 'user1', - }, - }, - { - range: { pos: 18, length: 5 }, - tracking: { - ts: '2023-01-01T00:00:00.000Z', - type: 'insert', - userId: 'user1', - }, - }, - ] - ), - new TextOperation() - .retain(7) - .retain(13, { - tracking: new TrackingProps('delete', 'user1', new Date()), - }) - .retain(23) - ) - }) }) describe('compose', function () { diff --git a/libraries/promise-utils/buildscript.txt b/libraries/promise-utils/buildscript.txt index 32c9fc8793..73dec381c1 100644 --- a/libraries/promise-utils/buildscript.txt +++ b/libraries/promise-utils/buildscript.txt @@ -1,6 +1,6 @@ promise-utils --dependencies=None ---docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker +--docker-repos=gcr.io/overleaf-ops --env-add= --env-pass-through= --esmock-loader=False diff --git a/libraries/ranges-tracker/buildscript.txt b/libraries/ranges-tracker/buildscript.txt index be28fc1d80..6276182679 100644 --- a/libraries/ranges-tracker/buildscript.txt +++ b/libraries/ranges-tracker/buildscript.txt @@ -1,6 +1,6 @@ ranges-tracker --dependencies=None ---docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker +--docker-repos=gcr.io/overleaf-ops --env-add= --env-pass-through= --esmock-loader=False diff --git a/libraries/redis-wrapper/buildscript.txt b/libraries/redis-wrapper/buildscript.txt index 395bc706ac..1e4489a655 100644 --- a/libraries/redis-wrapper/buildscript.txt +++ b/libraries/redis-wrapper/buildscript.txt @@ -1,6 +1,6 @@ redis-wrapper --dependencies=None ---docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker +--docker-repos=gcr.io/overleaf-ops --env-add= --env-pass-through= --esmock-loader=False diff --git a/libraries/settings/buildscript.txt b/libraries/settings/buildscript.txt index d4daff96d5..925234f561 100644 --- a/libraries/settings/buildscript.txt +++ b/libraries/settings/buildscript.txt @@ -1,6 +1,6 @@ settings --dependencies=None ---docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker +--docker-repos=gcr.io/overleaf-ops --env-add= --env-pass-through= --esmock-loader=False diff --git a/libraries/stream-utils/buildscript.txt b/libraries/stream-utils/buildscript.txt index 1da6bdade9..a04310e77f 100644 --- a/libraries/stream-utils/buildscript.txt +++ b/libraries/stream-utils/buildscript.txt @@ -1,6 +1,6 @@ stream-utils --dependencies=None ---docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker +--docker-repos=gcr.io/overleaf-ops --env-add= --env-pass-through= --esmock-loader=False diff --git a/package-lock.json b/package-lock.json index 2a3bb7696d..73b722b1f5 100644 --- a/package-lock.json +++ b/package-lock.json @@ -5943,16 +5943,15 @@ "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" }, "node_modules/@grpc/grpc-js": { - "version": "1.13.4", - "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.13.4.tgz", - "integrity": "sha512-GsFaMXCkMqkKIvwCQjCrwH+GHbPKBjhwo/8ZuUkWHqbI73Kky9I+pQltrlT0+MWpedCoosda53lgjYfyEPgxBg==", - "license": "Apache-2.0", + "version": "1.8.22", + "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.8.22.tgz", + "integrity": "sha512-oAjDdN7fzbUi+4hZjKG96MR6KTEubAeMpQEb+77qy+3r0Ua5xTFuie6JOLr4ZZgl5g+W5/uRTS2M1V8mVAFPuA==", "dependencies": { - "@grpc/proto-loader": "^0.7.13", - "@js-sdsl/ordered-map": "^4.4.2" + "@grpc/proto-loader": "^0.7.0", + "@types/node": ">=12.12.47" }, "engines": { - "node": ">=12.10.0" + "node": "^8.13.0 || >=10.10.0" } }, "node_modules/@grpc/proto-loader": { @@ -6990,18 +6989,6 @@ "dev": true, "optional": true }, - "node_modules/@noble/hashes": { - "version": "1.8.0", - "resolved": "https://registry.npmjs.org/@noble/hashes/-/hashes-1.8.0.tgz", - "integrity": "sha512-jCs9ldd7NwzpgXDIf6P3+NrHh9/sD6CQdxHyjQI+h/6rDNo88ypBxxz45UDuZHz9r3tNz7N/VInSVoVdtXEI4A==", - "license": "MIT", - "engines": { - "node": "^14.21.3 || >=16" - }, - "funding": { - "url": "https://paulmillr.com/funding/" - } - }, "node_modules/@node-oauth/formats": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/@node-oauth/formats/-/formats-1.0.0.tgz", @@ -8656,15 +8643,6 @@ "resolved": "services/web", "link": true }, - "node_modules/@paralleldrive/cuid2": { - "version": "2.2.2", - "resolved": "https://registry.npmjs.org/@paralleldrive/cuid2/-/cuid2-2.2.2.tgz", - "integrity": "sha512-ZOBkgDwEdoYVlSeRbYYXs0S9MejQofiVYoTbKzy/6GQa39/q5tQU2IX46+shYnUkpEl3wc+J6wRlar7r2EK2xA==", - "license": "MIT", - "dependencies": { - "@noble/hashes": "^1.1.5" - } - }, "node_modules/@phosphor-icons/react": { "version": "2.1.7", "resolved": "https://registry.npmjs.org/@phosphor-icons/react/-/react-2.1.7.tgz", @@ -11597,6 +11575,29 @@ "storybook": "^8.2.0 || ^8.3.0-0 || ^8.4.0-0 || ^8.5.0-0 || ^8.6.0-0" } }, + "node_modules/@stripe/react-stripe-js": { + "version": "3.5.0", + "resolved": "https://registry.npmjs.org/@stripe/react-stripe-js/-/react-stripe-js-3.5.0.tgz", + "integrity": "sha512-oo5J2SNbuAUjE9XmQv/SOD7vgZCa1Y9OcZyRAfvQPkyrDrru35sg5c64ANdHEmOWUibism3+25rKdARSw3HOfA==", + "license": "MIT", + "dependencies": { + "prop-types": "^15.7.2" + }, + "peerDependencies": { + "@stripe/stripe-js": ">=1.44.1 <7.0.0", + "react": ">=16.8.0 <20.0.0", + "react-dom": ">=16.8.0 <20.0.0" + } + }, + "node_modules/@stripe/stripe-js": { + "version": "5.10.0", + "resolved": "https://registry.npmjs.org/@stripe/stripe-js/-/stripe-js-5.10.0.tgz", + "integrity": "sha512-PTigkxMdMUP6B5ISS7jMqJAKhgrhZwjprDqR1eATtFfh0OpKVNp110xiH+goeVdrJ29/4LeZJR4FaHHWstsu0A==", + "license": "MIT", + "engines": { + "node": ">=12.16" + } + }, "node_modules/@swc/helpers": { "version": "0.5.17", "resolved": "https://registry.npmjs.org/@swc/helpers/-/helpers-0.5.17.tgz", @@ -15251,13 +15252,13 @@ } }, "node_modules/array-buffer-byte-length": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.2.tgz", - "integrity": "sha512-LHE+8BuR7RYGDKvnrmcuSq3tDcKv9OFEXQt/HpbZhY7V6h0zlUXutnAD82GiFx9rdieCMjkvtcsPqBwgUl1Iiw==", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.1.tgz", + "integrity": "sha512-ahC5W1xgou+KTXix4sAO8Ki12Q+jf4i0+tmk3sC+zgcynshkHxzpXdImBehiUYKKKDwvfFiJl1tZt6ewscS1Mg==", "license": "MIT", "dependencies": { - "call-bound": "^1.0.3", - "is-array-buffer": "^3.0.5" + "call-bind": "^1.0.5", + "is-array-buffer": "^3.0.4" }, "engines": { "node": ">= 0.4" @@ -15373,18 +15374,19 @@ } }, "node_modules/arraybuffer.prototype.slice": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.4.tgz", - "integrity": "sha512-BNoCY6SXXPQ7gF2opIP4GBE+Xw7U+pHMYKuzjgCN3GwiaIR09UUeKfheyIry77QtrCBlC0KK0q5/TER/tYh3PQ==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.3.tgz", + "integrity": "sha512-bMxMKAjg13EBSVscxTaYA4mRc5t1UAXa2kXiGTNfZ079HIWXEkKmkgFrh/nJqamaLSrXO5H4WFFkPEaLJWbs3A==", "license": "MIT", "dependencies": { "array-buffer-byte-length": "^1.0.1", - "call-bind": "^1.0.8", + "call-bind": "^1.0.5", "define-properties": "^1.2.1", - "es-abstract": "^1.23.5", - "es-errors": "^1.3.0", - "get-intrinsic": "^1.2.6", - "is-array-buffer": "^3.0.4" + "es-abstract": "^1.22.3", + "es-errors": "^1.2.1", + "get-intrinsic": "^1.2.3", + "is-array-buffer": "^3.0.4", + "is-shared-array-buffer": "^1.0.2" }, "engines": { "node": ">= 0.4" @@ -15478,15 +15480,6 @@ "resolved": "https://registry.npmjs.org/async/-/async-3.2.5.tgz", "integrity": "sha512-baNZyqaaLhyLVKm/DlvdW051MSgO6b8eVfIezl9E5PqWxFgzLm/wQntEW4zOytVburDEr0JlALEpdOFwvErLsg==" }, - "node_modules/async-function": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/async-function/-/async-function-1.0.0.tgz", - "integrity": "sha512-hsU18Ae8CDTR6Kgu9DYf0EbCr/a5iGL0rytQDobUcdpYOKokk8LEjVphnXkDkgpi0wYVsqrXuP0bZxJaTqdgoA==", - "license": "MIT", - "engines": { - "node": ">= 0.4" - } - }, "node_modules/async-lock": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/async-lock/-/async-lock-1.4.1.tgz", @@ -16056,32 +16049,24 @@ "optional": true }, "node_modules/bare-fs": { - "version": "4.1.5", - "resolved": "https://registry.npmjs.org/bare-fs/-/bare-fs-4.1.5.tgz", - "integrity": "sha512-1zccWBMypln0jEE05LzZt+V/8y8AQsQQqxtklqaIyg5nu6OAYFhZxPXinJTSG+kU5qyNmeLgcn9AW7eHiCHVLA==", + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/bare-fs/-/bare-fs-4.0.1.tgz", + "integrity": "sha512-ilQs4fm/l9eMfWY2dY0WCIUplSUp7U0CT1vrqMg1MUdeZl4fypu5UP0XcDBK5WBQPJAKP1b7XEodISmekH/CEg==", "license": "Apache-2.0", "optional": true, "dependencies": { - "bare-events": "^2.5.4", + "bare-events": "^2.0.0", "bare-path": "^3.0.0", - "bare-stream": "^2.6.4" + "bare-stream": "^2.0.0" }, "engines": { - "bare": ">=1.16.0" - }, - "peerDependencies": { - "bare-buffer": "*" - }, - "peerDependenciesMeta": { - "bare-buffer": { - "optional": true - } + "bare": ">=1.7.0" } }, "node_modules/bare-os": { - "version": "3.6.1", - "resolved": "https://registry.npmjs.org/bare-os/-/bare-os-3.6.1.tgz", - "integrity": "sha512-uaIjxokhFidJP+bmmvKSgiMzj2sV5GPHaZVAIktcxcpCyBFFWO+YlikVAdhmUo2vYFvFhOXIAlldqV29L8126g==", + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/bare-os/-/bare-os-3.6.0.tgz", + "integrity": "sha512-BUrFS5TqSBdA0LwHop4OjPJwisqxGy6JsWVqV6qaFoe965qqtaKfDzHY5T2YA1gUL0ZeeQeA+4BBc1FJTcHiPw==", "license": "Apache-2.0", "optional": true, "engines": { @@ -16963,44 +16948,15 @@ } }, "node_modules/call-bind": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.8.tgz", - "integrity": "sha512-oKlSFMcMwpUg2ednkhQ454wfWiU/ul3CkJe/PEHcTKuiX6RpbehUiFMXu13HalGZxfUwCQzZG747YXBn1im9ww==", - "license": "MIT", + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.7.tgz", + "integrity": "sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==", "dependencies": { - "call-bind-apply-helpers": "^1.0.0", "es-define-property": "^1.0.0", - "get-intrinsic": "^1.2.4", - "set-function-length": "^1.2.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/call-bind-apply-helpers": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", - "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", - "license": "MIT", - "dependencies": { "es-errors": "^1.3.0", - "function-bind": "^1.1.2" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/call-bound": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.4.tgz", - "integrity": "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==", - "license": "MIT", - "dependencies": { - "call-bind-apply-helpers": "^1.0.2", - "get-intrinsic": "^1.3.0" + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.4", + "set-function-length": "^1.2.1" }, "engines": { "node": ">= 0.4" @@ -17489,8 +17445,7 @@ "node_modules/chownr": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.4.tgz", - "integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==", - "license": "ISC" + "integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==" }, "node_modules/chrome-trace-event": { "version": "1.0.3", @@ -17848,10 +17803,12 @@ "license": "MIT" }, "node_modules/commander": { - "version": "2.11.0", - "resolved": "https://registry.npmjs.org/commander/-/commander-2.11.0.tgz", - "integrity": "sha512-b0553uYA5YAEGgyYIGYROzKQ7X5RAqedkfjiZxwi0kL1g3bOaBNNZfYkzt/CL0umgD5wc9Jec2FbB98CjkMRvQ==", - "license": "MIT" + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-4.1.1.tgz", + "integrity": "sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==", + "engines": { + "node": ">= 6" + } }, "node_modules/common-path-prefix": { "version": "3.0.0", @@ -17966,20 +17923,46 @@ "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=" }, "node_modules/concat-stream": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/concat-stream/-/concat-stream-2.0.0.tgz", - "integrity": "sha512-MWufYdFw53ccGjCA+Ol7XJYpAlW6/prSMzuPOTRnJGcGzuhLn4Scrz7qf6o8bROZ514ltazcIFJZevcfbo0x7A==", + "version": "1.6.2", + "resolved": "https://registry.npmjs.org/concat-stream/-/concat-stream-1.6.2.tgz", + "integrity": "sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw==", "engines": [ - "node >= 6.0" + "node >= 0.8" ], - "license": "MIT", "dependencies": { "buffer-from": "^1.0.0", "inherits": "^2.0.3", - "readable-stream": "^3.0.2", + "readable-stream": "^2.2.2", "typedarray": "^0.0.6" } }, + "node_modules/concat-stream/node_modules/isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==" + }, + "node_modules/concat-stream/node_modules/readable-stream": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", + "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "node_modules/concat-stream/node_modules/string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dependencies": { + "safe-buffer": "~5.1.0" + } + }, "node_modules/confbox": { "version": "0.1.8", "resolved": "https://registry.npmjs.org/confbox/-/confbox-0.1.8.tgz", @@ -18425,20 +18408,6 @@ "node": ">=10" } }, - "node_modules/cpu-features": { - "version": "0.0.10", - "resolved": "https://registry.npmjs.org/cpu-features/-/cpu-features-0.0.10.tgz", - "integrity": "sha512-9IkYqtX3YHPCzoVg1Py+o9057a3i0fp7S530UWokCSaFVTc7CwXPRiOjRjBQQ18ZCNafx78YfnG+HALxtVmOGA==", - "hasInstallScript": true, - "optional": true, - "dependencies": { - "buildcheck": "~0.0.6", - "nan": "^2.19.0" - }, - "engines": { - "node": ">=10.0.0" - } - }, "node_modules/crc-32": { "version": "1.2.2", "resolved": "https://registry.npmjs.org/crc-32/-/crc-32-1.2.2.tgz", @@ -19484,14 +19453,14 @@ } }, "node_modules/data-view-buffer": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/data-view-buffer/-/data-view-buffer-1.0.2.tgz", - "integrity": "sha512-EmKO5V3OLXh1rtK2wgXRansaK1/mtVdTUEiEI0W8RkvgT05kfxaH29PliLnpLP73yYO6142Q72QNa8Wx/A5CqQ==", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/data-view-buffer/-/data-view-buffer-1.0.1.tgz", + "integrity": "sha512-0lht7OugA5x3iJLOWFhWK/5ehONdprk0ISXqVFn/NFrDu+cuc8iADFrGQz5BnRK7LLU3JmkbXSxaqX+/mXYtUA==", "license": "MIT", "dependencies": { - "call-bound": "^1.0.3", + "call-bind": "^1.0.6", "es-errors": "^1.3.0", - "is-data-view": "^1.0.2" + "is-data-view": "^1.0.1" }, "engines": { "node": ">= 0.4" @@ -19501,29 +19470,29 @@ } }, "node_modules/data-view-byte-length": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/data-view-byte-length/-/data-view-byte-length-1.0.2.tgz", - "integrity": "sha512-tuhGbE6CfTM9+5ANGf+oQb72Ky/0+s3xKUpHvShfiz2RxMFgFPjsXuRLBVMtvMs15awe45SRb83D6wH4ew6wlQ==", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/data-view-byte-length/-/data-view-byte-length-1.0.1.tgz", + "integrity": "sha512-4J7wRJD3ABAzr8wP+OcIcqq2dlUKp4DVflx++hs5h5ZKydWMI6/D/fAot+yh6g2tHh8fLFTvNOaVN357NvSrOQ==", "license": "MIT", "dependencies": { - "call-bound": "^1.0.3", + "call-bind": "^1.0.7", "es-errors": "^1.3.0", - "is-data-view": "^1.0.2" + "is-data-view": "^1.0.1" }, "engines": { "node": ">= 0.4" }, "funding": { - "url": "https://github.com/sponsors/inspect-js" + "url": "https://github.com/sponsors/ljharb" } }, "node_modules/data-view-byte-offset": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/data-view-byte-offset/-/data-view-byte-offset-1.0.1.tgz", - "integrity": "sha512-BS8PfmtDGnrgYdOonGZQdLZslWIeCGFP9tpan0hi1Co2Zr2NKADsvGYA8XxuG/4UWgJ6Cjtv+YJnB6MM69QGlQ==", + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/data-view-byte-offset/-/data-view-byte-offset-1.0.0.tgz", + "integrity": "sha512-t/Ygsytq+R995EJ5PZlD4Cu56sWa8InXySaViRzw9apusqsOO2bQP+SbYzAhR0pFKoB+43lYy8rWban9JSuXnA==", "license": "MIT", "dependencies": { - "call-bound": "^1.0.2", + "call-bind": "^1.0.6", "es-errors": "^1.3.0", "is-data-view": "^1.0.1" }, @@ -19934,6 +19903,7 @@ "version": "1.0.4", "resolved": "https://registry.npmjs.org/dezalgo/-/dezalgo-1.0.4.tgz", "integrity": "sha512-rXSP0bf+5n0Qonsb+SVVfNfIsimO4HEtmnIpPHY8Q1UCzKlQrDMfdobr8nJOOsRgWCyMRqeSBQzmWUMq7zvVig==", + "dev": true, "dependencies": { "asap": "^2.0.0", "wrappy": "1" @@ -20005,88 +19975,6 @@ "node": ">=6" } }, - "node_modules/docker-modem": { - "version": "5.0.6", - "resolved": "https://registry.npmjs.org/docker-modem/-/docker-modem-5.0.6.tgz", - "integrity": "sha512-ens7BiayssQz/uAxGzH8zGXCtiV24rRWXdjNha5V4zSOcxmAZsfGVm/PPFbwQdqEkDnhG+SyR9E3zSHUbOKXBQ==", - "license": "Apache-2.0", - "dependencies": { - "debug": "^4.1.1", - "readable-stream": "^3.5.0", - "split-ca": "^1.0.1", - "ssh2": "^1.15.0" - }, - "engines": { - "node": ">= 8.0" - } - }, - "node_modules/dockerode": { - "version": "4.0.7", - "resolved": "https://registry.npmjs.org/dockerode/-/dockerode-4.0.7.tgz", - "integrity": "sha512-R+rgrSRTRdU5mH14PZTCPZtW/zw3HDWNTS/1ZAQpL/5Upe/ye5K9WQkIysu4wBoiMwKynsz0a8qWuGsHgEvSAA==", - "license": "Apache-2.0", - "dependencies": { - "@balena/dockerignore": "^1.0.2", - "@grpc/grpc-js": "^1.11.1", - "@grpc/proto-loader": "^0.7.13", - "docker-modem": "^5.0.6", - "protobufjs": "^7.3.2", - "tar-fs": "~2.1.2", - "uuid": "^10.0.0" - }, - "engines": { - "node": ">= 8.0" - } - }, - "node_modules/dockerode/node_modules/protobufjs": { - "version": "7.5.3", - "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-7.5.3.tgz", - "integrity": "sha512-sildjKwVqOI2kmFDiXQ6aEB0fjYTafpEvIBs8tOR8qI4spuL9OPROLVu2qZqi/xgCfsHIwVqlaF8JBjWFHnKbw==", - "hasInstallScript": true, - "license": "BSD-3-Clause", - "dependencies": { - "@protobufjs/aspromise": "^1.1.2", - "@protobufjs/base64": "^1.1.2", - "@protobufjs/codegen": "^2.0.4", - "@protobufjs/eventemitter": "^1.1.0", - "@protobufjs/fetch": "^1.1.0", - "@protobufjs/float": "^1.0.2", - "@protobufjs/inquire": "^1.1.0", - "@protobufjs/path": "^1.1.2", - "@protobufjs/pool": "^1.1.0", - "@protobufjs/utf8": "^1.1.0", - "@types/node": ">=13.7.0", - "long": "^5.0.0" - }, - "engines": { - "node": ">=12.0.0" - } - }, - "node_modules/dockerode/node_modules/tar-fs": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.3.tgz", - "integrity": "sha512-090nwYJDmlhwFwEW3QQl+vaNnxsO2yVsd45eTKRBzSzu+hlb1w2K9inVq5b0ngXuLVqQ4ApvsUHHnu/zQNkWAg==", - "license": "MIT", - "dependencies": { - "chownr": "^1.1.1", - "mkdirp-classic": "^0.5.2", - "pump": "^3.0.0", - "tar-stream": "^2.1.4" - } - }, - "node_modules/dockerode/node_modules/uuid": { - "version": "10.0.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-10.0.0.tgz", - "integrity": "sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ==", - "funding": [ - "https://github.com/sponsors/broofa", - "https://github.com/sponsors/ctavan" - ], - "license": "MIT", - "bin": { - "uuid": "dist/bin/uuid" - } - }, "node_modules/doctrine": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", @@ -20314,20 +20202,6 @@ "node": ">=0.10" } }, - "node_modules/dunder-proto": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", - "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", - "license": "MIT", - "dependencies": { - "call-bind-apply-helpers": "^1.0.1", - "es-errors": "^1.3.0", - "gopd": "^1.2.0" - }, - "engines": { - "node": ">= 0.4" - } - }, "node_modules/duplexify": { "version": "4.1.3", "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.3.tgz", @@ -20659,65 +20533,57 @@ } }, "node_modules/es-abstract": { - "version": "1.24.0", - "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.24.0.tgz", - "integrity": "sha512-WSzPgsdLtTcQwm4CROfS5ju2Wa1QQcVeT37jFjYzdFz1r9ahadC8B8/a4qxJxM+09F18iumCdRmlr96ZYkQvEg==", + "version": "1.23.3", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.23.3.tgz", + "integrity": "sha512-e+HfNH61Bj1X9/jLc5v1owaLYuHdeHHSQlkhCBiTK8rBvKaULl/beGMxwrMXjpYrv4pz22BlY570vVePA2ho4A==", "license": "MIT", "dependencies": { - "array-buffer-byte-length": "^1.0.2", - "arraybuffer.prototype.slice": "^1.0.4", + "array-buffer-byte-length": "^1.0.1", + "arraybuffer.prototype.slice": "^1.0.3", "available-typed-arrays": "^1.0.7", - "call-bind": "^1.0.8", - "call-bound": "^1.0.4", - "data-view-buffer": "^1.0.2", - "data-view-byte-length": "^1.0.2", - "data-view-byte-offset": "^1.0.1", - "es-define-property": "^1.0.1", + "call-bind": "^1.0.7", + "data-view-buffer": "^1.0.1", + "data-view-byte-length": "^1.0.1", + "data-view-byte-offset": "^1.0.0", + "es-define-property": "^1.0.0", "es-errors": "^1.3.0", - "es-object-atoms": "^1.1.1", - "es-set-tostringtag": "^2.1.0", - "es-to-primitive": "^1.3.0", - "function.prototype.name": "^1.1.8", - "get-intrinsic": "^1.3.0", - "get-proto": "^1.0.1", - "get-symbol-description": "^1.1.0", - "globalthis": "^1.0.4", - "gopd": "^1.2.0", + "es-object-atoms": "^1.0.0", + "es-set-tostringtag": "^2.0.3", + "es-to-primitive": "^1.2.1", + "function.prototype.name": "^1.1.6", + "get-intrinsic": "^1.2.4", + "get-symbol-description": "^1.0.2", + "globalthis": "^1.0.3", + "gopd": "^1.0.1", "has-property-descriptors": "^1.0.2", - "has-proto": "^1.2.0", - "has-symbols": "^1.1.0", + "has-proto": "^1.0.3", + "has-symbols": "^1.0.3", "hasown": "^2.0.2", - "internal-slot": "^1.1.0", - "is-array-buffer": "^3.0.5", + "internal-slot": "^1.0.7", + "is-array-buffer": "^3.0.4", "is-callable": "^1.2.7", - "is-data-view": "^1.0.2", + "is-data-view": "^1.0.1", "is-negative-zero": "^2.0.3", - "is-regex": "^1.2.1", - "is-set": "^2.0.3", - "is-shared-array-buffer": "^1.0.4", - "is-string": "^1.1.1", - "is-typed-array": "^1.1.15", - "is-weakref": "^1.1.1", - "math-intrinsics": "^1.1.0", - "object-inspect": "^1.13.4", + "is-regex": "^1.1.4", + "is-shared-array-buffer": "^1.0.3", + "is-string": "^1.0.7", + "is-typed-array": "^1.1.13", + "is-weakref": "^1.0.2", + "object-inspect": "^1.13.1", "object-keys": "^1.1.1", - "object.assign": "^4.1.7", - "own-keys": "^1.0.1", - "regexp.prototype.flags": "^1.5.4", - "safe-array-concat": "^1.1.3", - "safe-push-apply": "^1.0.0", - "safe-regex-test": "^1.1.0", - "set-proto": "^1.0.0", - "stop-iteration-iterator": "^1.1.0", - "string.prototype.trim": "^1.2.10", - "string.prototype.trimend": "^1.0.9", + "object.assign": "^4.1.5", + "regexp.prototype.flags": "^1.5.2", + "safe-array-concat": "^1.1.2", + "safe-regex-test": "^1.0.3", + "string.prototype.trim": "^1.2.9", + "string.prototype.trimend": "^1.0.8", "string.prototype.trimstart": "^1.0.8", - "typed-array-buffer": "^1.0.3", - "typed-array-byte-length": "^1.0.3", - "typed-array-byte-offset": "^1.0.4", - "typed-array-length": "^1.0.7", - "unbox-primitive": "^1.1.0", - "which-typed-array": "^1.1.19" + "typed-array-buffer": "^1.0.2", + "typed-array-byte-length": "^1.0.1", + "typed-array-byte-offset": "^1.0.2", + "typed-array-length": "^1.0.6", + "unbox-primitive": "^1.0.2", + "which-typed-array": "^1.1.15" }, "engines": { "node": ">= 0.4" @@ -20727,10 +20593,12 @@ } }, "node_modules/es-define-property": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", - "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", - "license": "MIT", + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.0.tgz", + "integrity": "sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==", + "dependencies": { + "get-intrinsic": "^1.2.4" + }, "engines": { "node": ">= 0.4" } @@ -20771,9 +20639,9 @@ "license": "MIT" }, "node_modules/es-object-atoms": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", - "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.0.0.tgz", + "integrity": "sha512-MZ4iQ6JwHOBQjahnjwaC1ZtIBH+2ohjamzAO3oaHcXYup7qxjF2fixyH+Q71voWHeOkI2q/TnJao/KfXYIZWbw==", "license": "MIT", "dependencies": { "es-errors": "^1.3.0" @@ -20783,15 +20651,14 @@ } }, "node_modules/es-set-tostringtag": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz", - "integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.0.3.tgz", + "integrity": "sha512-3T8uNMC3OQTHkFUsFq8r/BwAXLHvU/9O9mE0fBc/MY5iq/8H7ncvO947LmYA6ldWw9Uh8Yhf25zu6n7nML5QWQ==", "license": "MIT", "dependencies": { - "es-errors": "^1.3.0", - "get-intrinsic": "^1.2.6", + "get-intrinsic": "^1.2.4", "has-tostringtag": "^1.0.2", - "hasown": "^2.0.2" + "hasown": "^2.0.1" }, "engines": { "node": ">= 0.4" @@ -20807,14 +20674,13 @@ } }, "node_modules/es-to-primitive": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.3.0.tgz", - "integrity": "sha512-w+5mJ3GuFL+NjVtJlvydShqE1eN3h3PbI7/5LAsYJP/2qtuMXjfL2LpHSRqo4b4eSF5K/DH1JXKUAHSB2UW50g==", - "license": "MIT", + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", + "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", "dependencies": { - "is-callable": "^1.2.7", - "is-date-object": "^1.0.5", - "is-symbol": "^1.0.4" + "is-callable": "^1.1.4", + "is-date-object": "^1.0.1", + "is-symbol": "^1.0.2" }, "engines": { "node": ">= 0.4" @@ -22969,7 +22835,8 @@ "node_modules/fast-safe-stringify": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", - "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==" + "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==", + "dev": true }, "node_modules/fast-text-encoding": { "version": "1.0.3", @@ -23464,18 +23331,11 @@ } }, "node_modules/for-each": { - "version": "0.3.5", - "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.5.tgz", - "integrity": "sha512-dKx12eRCVIzqCxFGplyFKJMPvLEWgmNtUrpTiJIR5u97zEhRG8ySrtboPHZXx7daLxQVrl643cTzbab2tkQjxg==", - "license": "MIT", + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.3.tgz", + "integrity": "sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw==", "dependencies": { - "is-callable": "^1.2.7" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" + "is-callable": "^1.1.3" } }, "node_modules/for-in": { @@ -23637,7 +23497,6 @@ "resolved": "https://registry.npmjs.org/formidable/-/formidable-1.2.6.tgz", "integrity": "sha512-KcpbcpuLNOwrEjnbpMC0gS+X8ciDoZE1kkqzat4a8vrprf+s9pKNQ/QIwWfbfs4ltgmFl3MD177SNTkve3BwGQ==", "deprecated": "Please upgrade to latest, formidable@v2 or formidable@v3! Check these notes: https://bit.ly/2ZEqIau", - "license": "MIT", "funding": { "url": "https://ko-fi.com/tunnckoCore/commissions" } @@ -23813,17 +23672,14 @@ } }, "node_modules/function.prototype.name": { - "version": "1.1.8", - "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.8.tgz", - "integrity": "sha512-e5iwyodOHhbMr/yNrc7fDYG4qlbIvI5gajyzPnb5TCwyhjApznQh1BMFou9b30SevY43gCJKXycoCBjMbsuW0Q==", - "license": "MIT", + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.6.tgz", + "integrity": "sha512-Z5kx79swU5P27WEayXM1tBi5Ze/lbIyiNgU3qyXUOf9b2rgXYyF9Dy9Cx+IQv/Lc8WCG6L82zwUPpSS9hGehIg==", "dependencies": { - "call-bind": "^1.0.8", - "call-bound": "^1.0.3", - "define-properties": "^1.2.1", - "functions-have-names": "^1.2.3", - "hasown": "^2.0.2", - "is-callable": "^1.2.7" + "call-bind": "^1.0.2", + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1", + "functions-have-names": "^1.2.3" }, "engines": { "node": ">= 0.4" @@ -23935,21 +23791,15 @@ } }, "node_modules/get-intrinsic": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", - "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", - "license": "MIT", + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.4.tgz", + "integrity": "sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==", "dependencies": { - "call-bind-apply-helpers": "^1.0.2", - "es-define-property": "^1.0.1", "es-errors": "^1.3.0", - "es-object-atoms": "^1.1.1", "function-bind": "^1.1.2", - "get-proto": "^1.0.1", - "gopd": "^1.2.0", - "has-symbols": "^1.1.0", - "hasown": "^2.0.2", - "math-intrinsics": "^1.1.0" + "has-proto": "^1.0.1", + "has-symbols": "^1.0.3", + "hasown": "^2.0.0" }, "engines": { "node": ">= 0.4" @@ -23977,19 +23827,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/get-proto": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", - "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", - "license": "MIT", - "dependencies": { - "dunder-proto": "^1.0.1", - "es-object-atoms": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - } - }, "node_modules/get-stream": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-5.2.0.tgz", @@ -24006,14 +23843,14 @@ } }, "node_modules/get-symbol-description": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.1.0.tgz", - "integrity": "sha512-w9UMqWwJxHNOvoNzSJ2oPF5wvYcvP7jUvYzhp67yEhTi17ZDBBC1z9pTdGuzjD+EFIqLSYRweZjqfiPzQ06Ebg==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.2.tgz", + "integrity": "sha512-g0QYk1dZBxGwk+Ngc+ltRH2IBp2f7zBkBMBJZCDerh6EhlhSR6+9irMCuT/09zD6qkarHUSn529sK/yL4S27mg==", "license": "MIT", "dependencies": { - "call-bound": "^1.0.3", + "call-bind": "^1.0.5", "es-errors": "^1.3.0", - "get-intrinsic": "^1.2.6" + "get-intrinsic": "^1.2.4" }, "engines": { "node": ">= 0.4" @@ -24233,13 +24070,11 @@ } }, "node_modules/globalthis": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/globalthis/-/globalthis-1.0.4.tgz", - "integrity": "sha512-DpLKbNU4WylpxJykQujfCcwYWiV/Jhm50Goo0wrVILAv5jOr9d+H+UR3PhSCD2rCCEIg0uc+G+muBTwD54JhDQ==", - "license": "MIT", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/globalthis/-/globalthis-1.0.3.tgz", + "integrity": "sha512-sFdI5LyBiNTHjRd7cGPWapiHWMOXKyuBNX/cWJ3NfzrZQVa8GI/8cofCl74AOVqq9W5kNmguTIzJ/1s2gyI9wA==", "dependencies": { - "define-properties": "^1.2.1", - "gopd": "^1.0.1" + "define-properties": "^1.1.3" }, "engines": { "node": ">= 0.4" @@ -24786,12 +24621,11 @@ "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" }, "node_modules/gopd": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", - "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", - "license": "MIT", - "engines": { - "node": ">= 0.4" + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.0.1.tgz", + "integrity": "sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==", + "dependencies": { + "get-intrinsic": "^1.1.3" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -24811,7 +24645,6 @@ "version": "2.1.8", "resolved": "https://registry.npmjs.org/graphlib/-/graphlib-2.1.8.tgz", "integrity": "sha512-jcLLfkpoVGmH7/InMC/1hIvOPSUh38oJtGhvrOFGzioE1DZ+0YW16RgmOJhHiuWTvGiJQ9Z1Ik43JvkRPRvE+A==", - "license": "MIT", "dependencies": { "lodash": "^4.17.15" } @@ -25032,13 +24865,10 @@ } }, "node_modules/has-proto": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.2.0.tgz", - "integrity": "sha512-KIL7eQPfHQRC8+XluaIw7BHUwwqL19bQn4hzNgdr+1wXoU0KKj6rufu47lhY7KbJR2C6T6+PfyN0Ea7wkSS+qQ==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.3.tgz", + "integrity": "sha512-SJ1amZAJUiZS+PhsVLf5tGydlaVB8EdFpaSO4gmiUKUOxk8qzn5AIy4ZeJUmh22znIdk/uMAUT2pl3FxzVUH+Q==", "license": "MIT", - "dependencies": { - "dunder-proto": "^1.0.0" - }, "engines": { "node": ">= 0.4" }, @@ -25047,10 +24877,9 @@ } }, "node_modules/has-symbols": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", - "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", - "license": "MIT", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", + "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==", "engines": { "node": ">= 0.4" }, @@ -26008,14 +25837,14 @@ } }, "node_modules/internal-slot": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.1.0.tgz", - "integrity": "sha512-4gd7VpWNQNB4UKKCFFVcp1AVv+FMOgs9NKzjHKusc8jTMhd5eL1NqQqOpE0KzMds804/yHlglp3uxgluOqAPLw==", + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.7.tgz", + "integrity": "sha512-NGnrKwXzSms2qUUih/ILZ5JBqNTSa1+ZmP6flaIp6KmSElgE9qdndzS3cqjrDovwFdmwsGsLdeFgB6suw+1e9g==", "license": "MIT", "dependencies": { "es-errors": "^1.3.0", - "hasown": "^2.0.2", - "side-channel": "^1.1.0" + "hasown": "^2.0.0", + "side-channel": "^1.0.4" }, "engines": { "node": ">= 0.4" @@ -26193,14 +26022,13 @@ } }, "node_modules/is-array-buffer": { - "version": "3.0.5", - "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.5.tgz", - "integrity": "sha512-DDfANUiiG2wC1qawP66qlTugJeL5HyzMpfr8lLK+jMQirGzNod0B12cFB/9q838Ru27sBwfw78/rdoU7RERz6A==", + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.4.tgz", + "integrity": "sha512-wcjaerHw0ydZwfhiKbXJWLDY8A7yV7KhjQOpb83hGgGfId/aQa4TOvwyzn2PuswW2gPCYEL/nEAiSVpdOj1lXw==", "license": "MIT", "dependencies": { - "call-bind": "^1.0.8", - "call-bound": "^1.0.3", - "get-intrinsic": "^1.2.6" + "call-bind": "^1.0.2", + "get-intrinsic": "^1.2.1" }, "engines": { "node": ">= 0.4" @@ -26215,35 +26043,12 @@ "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=", "dev": true }, - "node_modules/is-async-function": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/is-async-function/-/is-async-function-2.1.1.tgz", - "integrity": "sha512-9dgM/cZBnNvjzaMYHVoxxfPj2QXt22Ev7SuuPrs+xav0ukGB0S6d4ydZdEiM48kLx5kDV+QBPrpVnFyefL8kkQ==", - "license": "MIT", - "dependencies": { - "async-function": "^1.0.0", - "call-bound": "^1.0.3", - "get-proto": "^1.0.1", - "has-tostringtag": "^1.0.2", - "safe-regex-test": "^1.1.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/is-bigint": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.1.0.tgz", - "integrity": "sha512-n4ZT37wG78iz03xPRKJrHTdZbe3IicyucEtdRsV5yglwc3GyUfbAfpSeD0FJ41NbUNSt5wbhqfp1fS+BgnvDFQ==", - "license": "MIT", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.4.tgz", + "integrity": "sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg==", "dependencies": { - "has-bigints": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" + "has-bigints": "^1.0.1" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -26262,13 +26067,12 @@ } }, "node_modules/is-boolean-object": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.2.2.tgz", - "integrity": "sha512-wa56o2/ElJMYqjCjGkXri7it5FbebW5usLw/nPmCMs5DeZ7eziSYZhSmPRn0txqeW4LnAmQQU7FgqLpsEFKM4A==", - "license": "MIT", + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.1.2.tgz", + "integrity": "sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA==", "dependencies": { - "call-bound": "^1.0.3", - "has-tostringtag": "^1.0.2" + "call-bind": "^1.0.2", + "has-tostringtag": "^1.0.0" }, "engines": { "node": ">= 0.4" @@ -26333,13 +26137,11 @@ } }, "node_modules/is-data-view": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-data-view/-/is-data-view-1.0.2.tgz", - "integrity": "sha512-RKtWF8pGmS87i2D6gqQu/l7EYRlVdfzemCJN/P3UOs//x1QE7mfhvzHIApBTRf7axvT6DMGwSwBXYCT0nfB9xw==", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-data-view/-/is-data-view-1.0.1.tgz", + "integrity": "sha512-AHkaJrsUVW6wq6JS8y3JnM/GJF/9cf+k20+iDzlSaJrinEo5+7vRiteOSwBhHRiAyQATN1AmY4hwzxJKPmYf+w==", "license": "MIT", "dependencies": { - "call-bound": "^1.0.2", - "get-intrinsic": "^1.2.6", "is-typed-array": "^1.1.13" }, "engines": { @@ -26350,13 +26152,11 @@ } }, "node_modules/is-date-object": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.1.0.tgz", - "integrity": "sha512-PwwhEakHVKTdRNVOw+/Gyh0+MzlCl4R6qKvkhuvLtPMggI1WAHt9sOwZxQLSGpUaDnrdyDsomoRgNnCfKNSXXg==", - "license": "MIT", + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.5.tgz", + "integrity": "sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ==", "dependencies": { - "call-bound": "^1.0.2", - "has-tostringtag": "^1.0.2" + "has-tostringtag": "^1.0.0" }, "engines": { "node": ">= 0.4" @@ -26421,21 +26221,6 @@ "node": ">=0.10.0" } }, - "node_modules/is-finalizationregistry": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/is-finalizationregistry/-/is-finalizationregistry-1.1.1.tgz", - "integrity": "sha512-1pC6N8qWJbWoPtEjgcL2xyhQOP491EQjeUo3qTKcmV8YSDDJrOepfG8pcC7h/QgnQHYSv0mJ3Z/ZWxmatVrysg==", - "license": "MIT", - "dependencies": { - "call-bound": "^1.0.3" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/is-fullwidth-code-point": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", @@ -26533,13 +26318,10 @@ } }, "node_modules/is-map": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/is-map/-/is-map-2.0.3.tgz", - "integrity": "sha512-1Qed0/Hr2m+YqxnM09CjA2d/i6YZNfF6R2oRAOj36eUdS6qIV/huPJNSEpKbupewFs+ZsJlxsjjPbc0/afW6Lw==", - "license": "MIT", - "engines": { - "node": ">= 0.4" - }, + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/is-map/-/is-map-2.0.2.tgz", + "integrity": "sha512-cOZFQQozTha1f4MxLFzlgKYPTyj26picdZTx82hbc/Xf4K/tZOOXSCkMvU4pKioRXGDLJRn0GM7Upe7kR721yg==", + "dev": true, "funding": { "url": "https://github.com/sponsors/ljharb" } @@ -26601,13 +26383,11 @@ } }, "node_modules/is-number-object": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.1.1.tgz", - "integrity": "sha512-lZhclumE1G6VYD8VHe35wFaIif+CTy5SJIi5+3y4psDgWu4wPDoBhF8NxUOinEc7pHgiTsT6MaBb92rKhhD+Xw==", - "license": "MIT", + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.7.tgz", + "integrity": "sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ==", "dependencies": { - "call-bound": "^1.0.3", - "has-tostringtag": "^1.0.2" + "has-tostringtag": "^1.0.0" }, "engines": { "node": ">= 0.4" @@ -26666,15 +26446,12 @@ "integrity": "sha512-+lP4/6lKUBfQjZ2pdxThZvLUAafmZb8OAxFb8XXtiQmS35INgr85hdOGoEs124ez1FCnZJt6jau/T+alh58QFQ==" }, "node_modules/is-regex": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.2.1.tgz", - "integrity": "sha512-MjYsKHO5O7mCsmRGxWcLWheFqN9DJ/2TmngvjKXihe6efViPqc274+Fx/4fYj/r03+ESvBdTXK0V6tA3rgez1g==", - "license": "MIT", + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", + "integrity": "sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==", "dependencies": { - "call-bound": "^1.0.2", - "gopd": "^1.2.0", - "has-tostringtag": "^1.0.2", - "hasown": "^2.0.2" + "call-bind": "^1.0.2", + "has-tostringtag": "^1.0.0" }, "engines": { "node": ">= 0.4" @@ -26684,13 +26461,10 @@ } }, "node_modules/is-set": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/is-set/-/is-set-2.0.3.tgz", - "integrity": "sha512-iPAjerrse27/ygGLxw+EBR9agv9Y6uLeYVJMu+QNCoouJ1/1ri0mGrcWpfCqFZuzzx3WjtwxG098X+n4OuRkPg==", - "license": "MIT", - "engines": { - "node": ">= 0.4" - }, + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/is-set/-/is-set-2.0.2.tgz", + "integrity": "sha512-+2cnTEZeY5z/iXGbLhPrOAaK/Mau5k5eXq9j14CpRTftq0pAJu2MwVRSZhyZWBzx3o6X795Lz6Bpb6R0GKf37g==", + "dev": true, "funding": { "url": "https://github.com/sponsors/ljharb" } @@ -26703,12 +26477,12 @@ "license": "MIT" }, "node_modules/is-shared-array-buffer": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.4.tgz", - "integrity": "sha512-ISWac8drv4ZGfwKl5slpHG9OwPNty4jOWPRIhBpxOoD+hqITiwuipOQ2bNthAzwA3B4fIjO4Nln74N0S9byq8A==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.3.tgz", + "integrity": "sha512-nA2hv5XIhLR3uVzDDfCIknerhx8XUKnstuOERPNNIinXG7v9u+ohXF67vxm4TPTEPU6lm61ZkwP3c9PCB97rhg==", "license": "MIT", "dependencies": { - "call-bound": "^1.0.3" + "call-bind": "^1.0.7" }, "engines": { "node": ">= 0.4" @@ -26729,13 +26503,11 @@ } }, "node_modules/is-string": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.1.1.tgz", - "integrity": "sha512-BtEeSsoaQjlSPBemMQIrY1MY0uM6vnS1g5fmufYOtnxLGUZM2178PKbhsk7Ffv58IX+ZtcvoGwccYsh0PglkAA==", - "license": "MIT", + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.7.tgz", + "integrity": "sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==", "dependencies": { - "call-bound": "^1.0.3", - "has-tostringtag": "^1.0.2" + "has-tostringtag": "^1.0.0" }, "engines": { "node": ">= 0.4" @@ -26745,14 +26517,11 @@ } }, "node_modules/is-symbol": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.1.1.tgz", - "integrity": "sha512-9gGx6GTtCQM73BgmHQXfDmLtfjjTUDSyoxTCbp5WtoixAhfgsDirWIcVQ/IHpvI5Vgd5i/J5F7B9cN/WlVbC/w==", - "license": "MIT", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.4.tgz", + "integrity": "sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg==", "dependencies": { - "call-bound": "^1.0.2", - "has-symbols": "^1.1.0", - "safe-regex-test": "^1.1.0" + "has-symbols": "^1.0.2" }, "engines": { "node": ">= 0.4" @@ -26762,12 +26531,12 @@ } }, "node_modules/is-typed-array": { - "version": "1.1.15", - "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.15.tgz", - "integrity": "sha512-p3EcsicXjit7SaskXHs1hA91QxgTw46Fv6EFKKGS5DRFLD8yKnohjF3hxoju94b/OcMZoQukzpPpBE9uLVKzgQ==", + "version": "1.1.13", + "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.13.tgz", + "integrity": "sha512-uZ25/bUAlUY5fR4OKT4rZQEBrzQWYV9ZJYGGsUmEJ6thodVJ1HX64ePQ6Z0qPWP+m+Uq6e9UugrE38jeYsDSMw==", "license": "MIT", "dependencies": { - "which-typed-array": "^1.1.16" + "which-typed-array": "^1.1.14" }, "engines": { "node": ">= 0.4" @@ -26808,43 +26577,33 @@ "integrity": "sha512-X/kiF3Xndj6WI7l/yLyzR7V1IbQd6L4S4cewSL0fRciemPmHbaXIKR2qtf+zseH+lbMG0vFp4HvCUe7amGZVhw==" }, "node_modules/is-weakmap": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/is-weakmap/-/is-weakmap-2.0.2.tgz", - "integrity": "sha512-K5pXYOm9wqY1RgjpL3YTkF39tni1XajUIkawTLUo9EZEVUFga5gSQJF8nNS7ZwJQ02y+1YCNYcMh+HIf1ZqE+w==", - "license": "MIT", - "engines": { - "node": ">= 0.4" - }, + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-weakmap/-/is-weakmap-2.0.1.tgz", + "integrity": "sha512-NSBR4kH5oVj1Uwvv970ruUkCV7O1mzgVFO4/rev2cLRda9Tm9HrL70ZPut4rOHgY0FNrUu9BCbXA2sdQ+x0chA==", + "dev": true, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/is-weakref": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.1.1.tgz", - "integrity": "sha512-6i9mGWSlqzNMEqpCp93KwRS1uUOodk2OJ6b+sq7ZPDSy2WuI5NFIxp/254TytR8ftefexkWn5xNiHUNpPOfSew==", - "license": "MIT", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.0.2.tgz", + "integrity": "sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ==", "dependencies": { - "call-bound": "^1.0.3" - }, - "engines": { - "node": ">= 0.4" + "call-bind": "^1.0.2" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/is-weakset": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/is-weakset/-/is-weakset-2.0.4.tgz", - "integrity": "sha512-mfcwb6IzQyOKTs84CQMrOwW4gQcaTOAWJ0zzJCl2WSPDrWk/OzDaImWFH3djXhb24g4eudZfLRozAvPGw4d9hQ==", - "license": "MIT", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/is-weakset/-/is-weakset-2.0.2.tgz", + "integrity": "sha512-t2yVvttHkQktwnNNmBQ98AhENLdPUTDTE21uPqAQ0ARwQfGeQKRVS0NNurH7bTf7RrvcVn1OOge45CnBeHCSmg==", + "dev": true, "dependencies": { - "call-bound": "^1.0.3", - "get-intrinsic": "^1.2.6" - }, - "engines": { - "node": ">= 0.4" + "call-bind": "^1.0.2", + "get-intrinsic": "^1.1.1" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -27565,7 +27324,6 @@ "version": "3.0.15", "resolved": "https://registry.npmjs.org/json-refs/-/json-refs-3.0.15.tgz", "integrity": "sha512-0vOQd9eLNBL18EGl5yYaO44GhixmImes2wiYn9Z3sag3QnehWrYWlB9AFtMxCL2Bj3fyxgDYkxGFEU/chlYssw==", - "license": "MIT", "dependencies": { "commander": "~4.1.1", "graphlib": "^2.1.8", @@ -27587,25 +27345,14 @@ "version": "1.0.10", "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", - "license": "MIT", "dependencies": { "sprintf-js": "~1.0.2" } }, - "node_modules/json-refs/node_modules/commander": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/commander/-/commander-4.1.1.tgz", - "integrity": "sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==", - "license": "MIT", - "engines": { - "node": ">= 6" - } - }, "node_modules/json-refs/node_modules/js-yaml": { "version": "3.14.1", "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", - "license": "MIT", "dependencies": { "argparse": "^1.0.7", "esprima": "^4.0.0" @@ -27618,7 +27365,6 @@ "version": "3.0.0", "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", - "license": "MIT", "engines": { "node": ">=8" } @@ -28384,14 +28130,12 @@ "node_modules/lodash._arraypool": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash._arraypool/-/lodash._arraypool-2.4.1.tgz", - "integrity": "sha512-tC2aLC7bbkDXKNrjDu9OLiVx9pFIvjinID2eD9PzNdAQGZScWUd/h8faqOw5d6oLsOvFRCRbz1ASoB+deyMVUw==", - "license": "MIT" + "integrity": "sha1-6I7suS4ruEyQZWEv2VigcZzUf5Q=" }, "node_modules/lodash._basebind": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash._basebind/-/lodash._basebind-2.4.1.tgz", - "integrity": "sha512-VGHm6DH+1UiuafQdE/DNMqxOcSyhRu0xO9+jPDq7xITRn5YOorGrHVQmavMVXCYmTm80YRTZZCn/jTW7MokwLg==", - "license": "MIT", + "integrity": "sha1-6UC5690nwyfgqNqxtVkWxTQelXU=", "dependencies": { "lodash._basecreate": "~2.4.1", "lodash._setbinddata": "~2.4.1", @@ -28402,8 +28146,7 @@ "node_modules/lodash._baseclone": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash._baseclone/-/lodash._baseclone-2.4.1.tgz", - "integrity": "sha512-+zJVXs0VxC/Au+/7foiKzw8UaWvfSfPh20XhqK/6HFQiUeclL5fz05zY7G9yDAFItAKKZwB4cgpzGvxiwuG1wQ==", - "license": "MIT", + "integrity": "sha1-MPgj5X4X43NdODvWK2Czh1Q7QYY=", "dependencies": { "lodash._getarray": "~2.4.1", "lodash._releasearray": "~2.4.1", @@ -28418,8 +28161,7 @@ "node_modules/lodash._basecreate": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash._basecreate/-/lodash._basecreate-2.4.1.tgz", - "integrity": "sha512-8JJ3FnMPm54t3BwPLk8q8mPyQKQXm/rt9df+awr4NGtyJrtcCXM3Of1I86S6jVy1b4yAyFBb8wbKPEauuqzRmQ==", - "license": "MIT", + "integrity": "sha1-+Ob1tXip405UEXm1a47uv0oofgg=", "dependencies": { "lodash._isnative": "~2.4.1", "lodash.isobject": "~2.4.1", @@ -28429,8 +28171,7 @@ "node_modules/lodash._basecreatecallback": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash._basecreatecallback/-/lodash._basecreatecallback-2.4.1.tgz", - "integrity": "sha512-SLczhg860fGW7AKlYcuOFstDtJuQhaANlJ4Y/jrOoRxhmVtK41vbJDH3OefVRSRkSCQo4HI82QVkAVsoGa5gSw==", - "license": "MIT", + "integrity": "sha1-fQsmdknLKeehOdAQO3wR+uhOSFE=", "dependencies": { "lodash._setbinddata": "~2.4.1", "lodash.bind": "~2.4.1", @@ -28441,8 +28182,7 @@ "node_modules/lodash._basecreatewrapper": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash._basecreatewrapper/-/lodash._basecreatewrapper-2.4.1.tgz", - "integrity": "sha512-x2ja1fa/qmzbizuXgVM4QAP9svtMbdxjG8Anl9bCeDAwLOVQ1vLrA0hLb/NkpbGi9evjtkl0aWLTEoOlUdBPQA==", - "license": "MIT", + "integrity": "sha1-TTHy595+E0+/KAN2K4FQsyUZZm8=", "dependencies": { "lodash._basecreate": "~2.4.1", "lodash._setbinddata": "~2.4.1", @@ -28453,8 +28193,7 @@ "node_modules/lodash._createwrapper": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash._createwrapper/-/lodash._createwrapper-2.4.1.tgz", - "integrity": "sha512-5TCfLt1haQpsa7bgLYRKNNE4yqhO4ZxIayN1btQmazMchO6Q8JYFRMqbJ3W+uNmMm4R0Jw7KGkZX5YfDDnywuw==", - "license": "MIT", + "integrity": "sha1-UdaVeXPaTtVW43KQ2MGhjFPeFgc=", "dependencies": { "lodash._basebind": "~2.4.1", "lodash._basecreatewrapper": "~2.4.1", @@ -28465,8 +28204,7 @@ "node_modules/lodash._getarray": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash._getarray/-/lodash._getarray-2.4.1.tgz", - "integrity": "sha512-iIrScwY3atGvLVbQL/+CNUznaPwBJg78S/JO4cTUFXRkRsZgEBhscB27cVoT4tsIOUyFu/5M/0umfHNGJ6wYwg==", - "license": "MIT", + "integrity": "sha1-+vH3+BD6mFolHCGHQESBCUg55e4=", "dependencies": { "lodash._arraypool": "~2.4.1" } @@ -28474,26 +28212,22 @@ "node_modules/lodash._isnative": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash._isnative/-/lodash._isnative-2.4.1.tgz", - "integrity": "sha512-BOlKGKNHhCHswGOWtmVb5zBygyxN7EmTuzVOSQI6QSoGhG+kvv71gICFS1TBpnqvT1n53txK8CDK3u5D2/GZxQ==", - "license": "MIT" + "integrity": "sha1-PqZAS3hKe+g2x7V1gOHN95sUgyw=" }, "node_modules/lodash._maxpoolsize": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash._maxpoolsize/-/lodash._maxpoolsize-2.4.1.tgz", - "integrity": "sha512-xKDem1BxoIfcCtaJHotjtyfdIvZO9qrF+mv3G1+ngQmaI3MJt3Qm46i9HLk/CbzABbavUrr1/EomQT8KxtsrYA==", - "license": "MIT" + "integrity": "sha1-nUgvRjuOZq++WcLBTtsRcGAXIzQ=" }, "node_modules/lodash._objecttypes": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash._objecttypes/-/lodash._objecttypes-2.4.1.tgz", - "integrity": "sha512-XpqGh1e7hhkOzftBfWE7zt+Yn9mVHFkDhicVttvKLsoCMLVVL+xTQjfjB4X4vtznauxv0QZ5ZAeqjvat0dh62Q==", - "license": "MIT" + "integrity": "sha1-fAt/admKH3ZSn4kLDNsbTf7BHBE=" }, "node_modules/lodash._releasearray": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash._releasearray/-/lodash._releasearray-2.4.1.tgz", - "integrity": "sha512-wwCwWX8PK/mYR5VZjcU5JFl6py/qrfLGMxzpKOfSqgA1PaZ6Z625CZLCxH1KsqyxSkOFmNm+mEYjeDpXlM4hrg==", - "license": "MIT", + "integrity": "sha1-phOWMNdtFTawfdyAliiJsIL2pkE=", "dependencies": { "lodash._arraypool": "~2.4.1", "lodash._maxpoolsize": "~2.4.1" @@ -28502,8 +28236,7 @@ "node_modules/lodash._setbinddata": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash._setbinddata/-/lodash._setbinddata-2.4.1.tgz", - "integrity": "sha512-Vx0XKzpg2DFbQw4wrp1xSWd2sfl3W/BG6bucSRZmftS1AzbWRemCmBQDxyQTNhlLNec428PXkuuja+VNBZgu2A==", - "license": "MIT", + "integrity": "sha1-98IAzRuS7yNrOZ7s9zxkjReqlNI=", "dependencies": { "lodash._isnative": "~2.4.1", "lodash.noop": "~2.4.1" @@ -28512,8 +28245,7 @@ "node_modules/lodash._shimkeys": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash._shimkeys/-/lodash._shimkeys-2.4.1.tgz", - "integrity": "sha512-lBrglYxLD/6KAJ8IEa5Lg+YHgNAL7FyKqXg4XOUI+Du/vtniLs1ZqS+yHNKPkK54waAgkdUnDOYaWf+rv4B+AA==", - "license": "MIT", + "integrity": "sha1-bpzJZm/wgfC1psl4uD4kLmlJ0gM=", "dependencies": { "lodash._objecttypes": "~2.4.1" } @@ -28521,14 +28253,12 @@ "node_modules/lodash._slice": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash._slice/-/lodash._slice-2.4.1.tgz", - "integrity": "sha512-+odPJa4PE2UgYnQgJgkLs0UD03QU78R2ivhrFnG9GdtYOZdE6ObxOj7KiUEUlqOOgatFT+ZqSypFjDSduTigKg==", - "license": "MIT" + "integrity": "sha1-dFz0GlNZexj2iImFREBe+isG2Q8=" }, "node_modules/lodash.assign": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash.assign/-/lodash.assign-2.4.1.tgz", - "integrity": "sha512-AqQ4AJz5buSx9ELXWt5dONwJyVPd4NTADMKhoVYWCugjoVf172/LpvVhwmSJn4g8/Dc0S8hxTe8rt5Dob3X9KQ==", - "license": "MIT", + "integrity": "sha1-hMOVlt1xGBqXsGUpE6fJZ15Jsao=", "dependencies": { "lodash._basecreatecallback": "~2.4.1", "lodash._objecttypes": "~2.4.1", @@ -28538,8 +28268,7 @@ "node_modules/lodash.bind": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash.bind/-/lodash.bind-2.4.1.tgz", - "integrity": "sha512-hn2VWYZ+N9aYncRad4jORvlGgpFrn+axnPIWRvFxjk6CWcZH5b5alI8EymYsHITI23Z9wrW/+ORq+azrVFpOfw==", - "license": "MIT", + "integrity": "sha1-XRn6AFyMTSNvr0dCx7eh/Kvikmc=", "dependencies": { "lodash._createwrapper": "~2.4.1", "lodash._slice": "~2.4.1" @@ -28553,8 +28282,7 @@ "node_modules/lodash.clonedeep": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash.clonedeep/-/lodash.clonedeep-2.4.1.tgz", - "integrity": "sha512-zj5vReFLkR+lJOBKP1wyteZ13zut/KSmXtdCBgxcy/m4UTitcBxpeVZT7gwk8BQrztPI5dIgO4bhBppXV4rpTQ==", - "license": "MIT", + "integrity": "sha1-8pIDtAsS/uCkXTYxZIJZvrq8eGg=", "dependencies": { "lodash._baseclone": "~2.4.1", "lodash._basecreatecallback": "~2.4.1" @@ -28584,8 +28312,7 @@ "node_modules/lodash.foreach": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash.foreach/-/lodash.foreach-2.4.1.tgz", - "integrity": "sha512-AvOobAkE7qBtIiHU5QHQIfveWH5Usr9pIcFIzBv7u4S6bvb3FWpFrh9ltqBY7UeL5lw6e8d+SggiUXQVyh+FpA==", - "license": "MIT", + "integrity": "sha1-/j/Do0yGyUyrb5UiVgKCdB4BYwk=", "dependencies": { "lodash._basecreatecallback": "~2.4.1", "lodash.forown": "~2.4.1" @@ -28594,8 +28321,7 @@ "node_modules/lodash.forown": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash.forown/-/lodash.forown-2.4.1.tgz", - "integrity": "sha512-VC+CKm/zSs5t3i/MHv71HZoQphuqOvez1xhjWBwHU5zAbsCYrqwHr+MyQyMk14HzA3hSRNA5lCqDMSw5G2Qscg==", - "license": "MIT", + "integrity": "sha1-eLQer+FAX6lmRZ6kGT/VAtCEUks=", "dependencies": { "lodash._basecreatecallback": "~2.4.1", "lodash._objecttypes": "~2.4.1", @@ -28616,8 +28342,7 @@ "node_modules/lodash.identity": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash.identity/-/lodash.identity-2.4.1.tgz", - "integrity": "sha512-VRYX+8XipeLjorag5bz3YBBRJ+5kj8hVBzfnaHgXPZAVTYowBdY5l0M5ZnOmlAMCOXBFabQtm7f5VqjMKEji0w==", - "license": "MIT" + "integrity": "sha1-ZpTP+mX++TH3wxzobHRZfPVg9PE=" }, "node_modules/lodash.includes": { "version": "4.3.0", @@ -28632,8 +28357,7 @@ "node_modules/lodash.isarray": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash.isarray/-/lodash.isarray-2.4.1.tgz", - "integrity": "sha512-yRDd0z+APziDqbk0MqR6Qfwj/Qn3jLxFJbI9U8MuvdTnqIXdZ5YXyGLnwuzCpZmjr26F1GNOjKLMMZ10i/wy6A==", - "license": "MIT", + "integrity": "sha1-tSoybB9i9tfac6MdVAHfbvRPD6E=", "dependencies": { "lodash._isnative": "~2.4.1" } @@ -28646,15 +28370,12 @@ "node_modules/lodash.isequal": { "version": "4.5.0", "resolved": "https://registry.npmjs.org/lodash.isequal/-/lodash.isequal-4.5.0.tgz", - "integrity": "sha512-pDo3lu8Jhfjqls6GkMgpahsF9kCyayhgykjyLMNFTKWrpVdAQtYyB4muAMWozBB4ig/dtWAmsMxLEI8wuz+DYQ==", - "deprecated": "This package is deprecated. Use require('node:util').isDeepStrictEqual instead.", - "license": "MIT" + "integrity": "sha1-QVxEePK8wwEgwizhDtMib30+GOA=" }, "node_modules/lodash.isfunction": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash.isfunction/-/lodash.isfunction-2.4.1.tgz", - "integrity": "sha512-6XcAB3izeQxPOQQNAJbbdjXbvWEt2Pn9ezPrjr4CwoLwmqsLVbsiEXD19cmmt4mbzOCOCdHzOQiUivUOJLra7w==", - "license": "MIT" + "integrity": "sha1-LP1XXHPkmKtX4xm3f6Aq3vE6lNE=" }, "node_modules/lodash.isinteger": { "version": "4.0.4", @@ -28669,8 +28390,7 @@ "node_modules/lodash.isobject": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash.isobject/-/lodash.isobject-2.4.1.tgz", - "integrity": "sha512-sTebg2a1PoicYEZXD5PBdQcTlIJ6hUslrlWr7iV0O7n+i4596s2NQ9I5CaZ5FbXSfya/9WQsrYLANUJv9paYVA==", - "license": "MIT", + "integrity": "sha1-Wi5H/mmVPx7mMafrof5k0tBlWPU=", "dependencies": { "lodash._objecttypes": "~2.4.1" } @@ -28688,8 +28408,7 @@ "node_modules/lodash.keys": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash.keys/-/lodash.keys-2.4.1.tgz", - "integrity": "sha512-ZpJhwvUXHSNL5wYd1RM6CUa2ZuqorG9ngoJ9Ix5Cce+uX7I5O/E06FCJdhSZ33b5dVyeQDnIlWH7B2s5uByZ7g==", - "license": "MIT", + "integrity": "sha1-SN6kbfj/djKxDXBrissmWR4rNyc=", "dependencies": { "lodash._isnative": "~2.4.1", "lodash._shimkeys": "~2.4.1", @@ -28710,8 +28429,7 @@ "node_modules/lodash.noop": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash.noop/-/lodash.noop-2.4.1.tgz", - "integrity": "sha512-uNcV98/blRhInPUGQEnj9ekXXfG+q+rfoNSFZgl/eBfog9yBDW9gfUv2AHX/rAF7zZRlzWhbslGhbGQFZlCkZA==", - "license": "MIT" + "integrity": "sha1-T7VPgWZS5a4Q6PcvcXo4jHMmU4o=" }, "node_modules/lodash.once": { "version": "4.1.1", @@ -28727,8 +28445,7 @@ "node_modules/lodash.support": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash.support/-/lodash.support-2.4.1.tgz", - "integrity": "sha512-6SwqWwGFHhTXEiqB/yQgu8FYd//tm786d49y7kizHVCJH7zdzs191UQn3ES3tkkDbUddNRfkCRYqJFHtbLnbCw==", - "license": "MIT", + "integrity": "sha1-Mg4LZwMWc8KNeiu12eAzGkUkBRU=", "dependencies": { "lodash._isnative": "~2.4.1" } @@ -29118,15 +28835,6 @@ "dev": true, "license": "ISC" }, - "node_modules/math-intrinsics": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", - "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", - "license": "MIT", - "engines": { - "node": ">= 0.4" - } - }, "node_modules/mathjax": { "version": "3.2.2", "resolved": "https://registry.npmjs.org/mathjax/-/mathjax-3.2.2.tgz", @@ -29740,6 +29448,7 @@ "version": "2.6.0", "resolved": "https://registry.npmjs.org/mime/-/mime-2.6.0.tgz", "integrity": "sha512-USPkMeET31rOMiarsBNIHZKLGgvKc/LrjofAnBlOttf5ajRvqiRA8QsenbcooctK6d6Ts6aqZXBA+XbkKthiQg==", + "dev": true, "bin": { "mime": "cli.js" }, @@ -29748,10 +29457,9 @@ } }, "node_modules/mime-db": { - "version": "1.52.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", - "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", - "license": "MIT", + "version": "1.51.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.51.0.tgz", + "integrity": "sha512-5y8A56jg7XVQx2mbv1lu49NR4dokRnhZYTtL+KGfaa27uq4pSTXkwQkFJl4pkRMyNFz/EtYDSkiiEHx3F7UN6g==", "engines": { "node": ">= 0.6" } @@ -29767,12 +29475,11 @@ } }, "node_modules/mime-types": { - "version": "2.1.35", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", - "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", - "license": "MIT", + "version": "2.1.34", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.34.tgz", + "integrity": "sha512-6cP692WwGIs9XXdOO4++N+7qjqv0rqxxVvJ3VHPh/Sc9mVZcQP+ZGhkKiTvWMQRr2tbHkJP/Yn7Y0npb3ZBs4A==", "dependencies": { - "mime-db": "1.52.0" + "mime-db": "1.51.0" }, "engines": { "node": ">= 0.6" @@ -30009,8 +29716,7 @@ "node_modules/mkdirp-classic": { "version": "0.5.3", "resolved": "https://registry.npmjs.org/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz", - "integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==", - "license": "MIT" + "integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==" }, "node_modules/mlly": { "version": "1.7.4", @@ -30475,18 +30181,18 @@ } }, "node_modules/multer": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/multer/-/multer-2.0.1.tgz", - "integrity": "sha512-Ug8bXeTIUlxurg8xLTEskKShvcKDZALo1THEX5E41pYCD2sCVub5/kIRIGqWNoqV6szyLyQKV6mD4QUrWE5GCQ==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/multer/-/multer-2.0.0.tgz", + "integrity": "sha512-bS8rPZurbAuHGAnApbM9d4h1wSoYqrOqkE+6a64KLMK9yWU7gJXBDDVklKQ3TPi9DRb85cRs6yXaC0+cjxRtRg==", "license": "MIT", "dependencies": { "append-field": "^1.0.0", - "busboy": "^1.6.0", - "concat-stream": "^2.0.0", - "mkdirp": "^0.5.6", + "busboy": "^1.0.0", + "concat-stream": "^1.5.2", + "mkdirp": "^0.5.4", "object-assign": "^4.1.1", - "type-is": "^1.6.18", - "xtend": "^4.0.2" + "type-is": "^1.6.4", + "xtend": "^4.0.0" }, "engines": { "node": ">= 10.16.0" @@ -30616,8 +30322,7 @@ "node_modules/native-promise-only": { "version": "0.8.1", "resolved": "https://registry.npmjs.org/native-promise-only/-/native-promise-only-0.8.1.tgz", - "integrity": "sha512-zkVhZUA3y8mbz652WrL5x0fB0ehrBkulWT3TomAQ9iDtyXZvzKeEA6GPxAItBYeNYl5yngKRX612qHOhvMkDeg==", - "license": "MIT" + "integrity": "sha1-IKMYwwy0X3H+et+/eyHJnBRy7xE=" }, "node_modules/native-request": { "version": "1.1.0", @@ -31171,13 +30876,9 @@ } }, "node_modules/object-inspect": { - "version": "1.13.4", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz", - "integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==", - "license": "MIT", - "engines": { - "node": ">= 0.4" - }, + "version": "1.13.1", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.1.tgz", + "integrity": "sha512-5qoj1RUiKOMsCCNLV1CBiPYE10sziTsnmNxkAI/rZhiD63CF7IqdFGC/XzjWjpSgLf0LxXX3bDFIh0E18f6UhQ==", "funding": { "url": "https://github.com/sponsors/ljharb" } @@ -31219,16 +30920,14 @@ } }, "node_modules/object.assign": { - "version": "4.1.7", - "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.7.tgz", - "integrity": "sha512-nK28WOo+QIjBkDduTINE4JkF/UJJKyf2EJxvJKfblDpyg0Q+pkOHNTL0Qwy6NP6FhE/EnzV73BxxqcJaXY9anw==", + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.5.tgz", + "integrity": "sha512-byy+U7gp+FVwmyzKPYhW2h5l3crpmGsxl7X2s8y43IgxvG4g3QZ6CffDtsNQy1WsmZpQbO+ybo0AlW7TY6DcBQ==", "license": "MIT", "dependencies": { - "call-bind": "^1.0.8", - "call-bound": "^1.0.3", + "call-bind": "^1.0.5", "define-properties": "^1.2.1", - "es-object-atoms": "^1.0.0", - "has-symbols": "^1.1.0", + "has-symbols": "^1.0.3", "object-keys": "^1.1.1" }, "engines": { @@ -31479,23 +31178,6 @@ "resolved": "libraries/overleaf-editor-core", "link": true }, - "node_modules/own-keys": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/own-keys/-/own-keys-1.0.1.tgz", - "integrity": "sha512-qFOyK5PjiWZd+QQIh+1jhdb9LpxTF0qs7Pm8o5QHYZ0M3vKqSqzsZaEB6oWlxZ+q2sJBMI/Ktgd2N5ZwQoRHfg==", - "license": "MIT", - "dependencies": { - "get-intrinsic": "^1.2.6", - "object-keys": "^1.1.1", - "safe-push-apply": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/p-event": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/p-event/-/p-event-4.2.0.tgz", @@ -32055,80 +31737,12 @@ } }, "node_modules/path-loader": { - "version": "1.0.12", - "resolved": "https://registry.npmjs.org/path-loader/-/path-loader-1.0.12.tgz", - "integrity": "sha512-n7oDG8B+k/p818uweWrOixY9/Dsr89o2TkCm6tOTex3fpdo2+BFDgR+KpB37mGKBRsBAlR8CIJMFN0OEy/7hIQ==", - "license": "MIT", + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/path-loader/-/path-loader-1.0.10.tgz", + "integrity": "sha512-CMP0v6S6z8PHeJ6NFVyVJm6WyJjIwFvyz2b0n2/4bKdS/0uZa/9sKUlYZzubrn3zuDRU0zIuEDX9DZYQ2ZI8TA==", "dependencies": { "native-promise-only": "^0.8.1", - "superagent": "^7.1.6" - } - }, - "node_modules/path-loader/node_modules/debug": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz", - "integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==", - "license": "MIT", - "dependencies": { - "ms": "^2.1.3" - }, - "engines": { - "node": ">=6.0" - }, - "peerDependenciesMeta": { - "supports-color": { - "optional": true - } - } - }, - "node_modules/path-loader/node_modules/formidable": { - "version": "2.1.5", - "resolved": "https://registry.npmjs.org/formidable/-/formidable-2.1.5.tgz", - "integrity": "sha512-Oz5Hwvwak/DCaXVVUtPn4oLMLLy1CdclLKO1LFgU7XzDpVMUU5UjlSLpGMocyQNNk8F6IJW9M/YdooSn2MRI+Q==", - "license": "MIT", - "dependencies": { - "@paralleldrive/cuid2": "^2.2.2", - "dezalgo": "^1.0.4", - "once": "^1.4.0", - "qs": "^6.11.0" - }, - "funding": { - "url": "https://ko-fi.com/tunnckoCore/commissions" - } - }, - "node_modules/path-loader/node_modules/semver": { - "version": "7.7.2", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", - "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", - "license": "ISC", - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/path-loader/node_modules/superagent": { - "version": "7.1.6", - "resolved": "https://registry.npmjs.org/superagent/-/superagent-7.1.6.tgz", - "integrity": "sha512-gZkVCQR1gy/oUXr+kxJMLDjla434KmSOKbx5iGD30Ql+AkJQ/YlPKECJy2nhqOsHLjGHzoDTXNSjhnvWhzKk7g==", - "deprecated": "Please upgrade to v9.0.0+ as we have fixed a public vulnerability with formidable dependency. Note that v9.0.0+ requires Node.js v14.18.0+. See https://github.com/ladjs/superagent/pull/1800 for insight. This project is supported and maintained by the team at Forward Email @ https://forwardemail.net", - "license": "MIT", - "dependencies": { - "component-emitter": "^1.3.0", - "cookiejar": "^2.1.3", - "debug": "^4.3.4", - "fast-safe-stringify": "^2.1.1", - "form-data": "^4.0.0", - "formidable": "^2.0.1", - "methods": "^1.1.2", - "mime": "2.6.0", - "qs": "^6.10.3", - "readable-stream": "^3.6.0", - "semver": "^7.3.7" - }, - "engines": { - "node": ">=6.4.0 <13 || >=14" + "superagent": "^3.8.3" } }, "node_modules/path-parse": { @@ -35465,28 +35079,6 @@ "node": ">=4.0.0" } }, - "node_modules/reflect.getprototypeof": { - "version": "1.0.10", - "resolved": "https://registry.npmjs.org/reflect.getprototypeof/-/reflect.getprototypeof-1.0.10.tgz", - "integrity": "sha512-00o4I+DVrefhv+nX0ulyi3biSHCPDe+yLv5o/p6d/UVlirijB8E16FtfwSAi4g3tcqrQ4lRAqQSoFEZJehYEcw==", - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.8", - "define-properties": "^1.2.1", - "es-abstract": "^1.23.9", - "es-errors": "^1.3.0", - "es-object-atoms": "^1.0.0", - "get-intrinsic": "^1.2.7", - "get-proto": "^1.0.1", - "which-builtin-type": "^1.2.1" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/regenerate": { "version": "1.4.2", "resolved": "https://registry.npmjs.org/regenerate/-/regenerate-1.4.2.tgz", @@ -35551,17 +35143,15 @@ } }, "node_modules/regexp.prototype.flags": { - "version": "1.5.4", - "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.4.tgz", - "integrity": "sha512-dYqgNSZbDwkaJ2ceRd9ojCGjBq+mOm9LmtXnAnEGyHhN/5R7iDW2TRw3h+o/jCFxus3P2LfWIIiwowAjANm7IA==", + "version": "1.5.2", + "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.2.tgz", + "integrity": "sha512-NcDiDkTLuPR+++OCKB0nWafEmhg/Da8aUPLPMQbK+bxKKCm1/S5he+AqYa4PlMCVBalb4/yxIRub6qkEx5yJbw==", "license": "MIT", "dependencies": { - "call-bind": "^1.0.8", + "call-bind": "^1.0.6", "define-properties": "^1.2.1", "es-errors": "^1.3.0", - "get-proto": "^1.0.1", - "gopd": "^1.2.0", - "set-function-name": "^2.0.2" + "set-function-name": "^2.0.1" }, "engines": { "node": ">= 0.4" @@ -36076,15 +35666,14 @@ } }, "node_modules/safe-array-concat": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/safe-array-concat/-/safe-array-concat-1.1.3.tgz", - "integrity": "sha512-AURm5f0jYEOydBj7VQlVvDrjeFgthDdEF5H1dP+6mNpoXOMo1quQqJ4wvJDyRZ9+pO3kGWoOdmV08cSv2aJV6Q==", + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/safe-array-concat/-/safe-array-concat-1.1.2.tgz", + "integrity": "sha512-vj6RsCsWBCf19jIeHEfkRMw8DPiBb+DMXklQ/1SGDHOMlHdPUkZXFQ2YdplS23zESTijAcurb1aSgJA3AgMu1Q==", "license": "MIT", "dependencies": { - "call-bind": "^1.0.8", - "call-bound": "^1.0.2", - "get-intrinsic": "^1.2.6", - "has-symbols": "^1.1.0", + "call-bind": "^1.0.7", + "get-intrinsic": "^1.2.4", + "has-symbols": "^1.0.3", "isarray": "^2.0.5" }, "engines": { @@ -36105,22 +35694,6 @@ "integrity": "sha512-gH8eh2nZudPQO6TytOvbxnuhYBOvDBBLW52tz5q6X58lJcd/tkmqFR+5Z9adS8aJtURSXWThWy/xJtJwixErvg==", "optional": true }, - "node_modules/safe-push-apply": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/safe-push-apply/-/safe-push-apply-1.0.0.tgz", - "integrity": "sha512-iKE9w/Z7xCzUMIZqdBsp6pEQvwuEebH4vdpjcDWnyzaI6yl6O9FHvVpmGelvEHNsoY6wGblkxR6Zty/h00WiSA==", - "license": "MIT", - "dependencies": { - "es-errors": "^1.3.0", - "isarray": "^2.0.5" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/safe-regex": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/safe-regex/-/safe-regex-1.1.0.tgz", @@ -36131,14 +35704,14 @@ } }, "node_modules/safe-regex-test": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.1.0.tgz", - "integrity": "sha512-x/+Cz4YrimQxQccJf5mKEbIa1NzeCRNI5Ecl/ekmlYaampdNLPalVyIcCZNNH3MvmqBugV5TMYZXv0ljslUlaw==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.0.3.tgz", + "integrity": "sha512-CdASjNJPvRa7roO6Ra/gLYBTzYzzPyyBXxIMdGW3USQLyjWEls2RgW5UBTXaQVp+OrpeCK3bLem8smtmheoRuw==", "license": "MIT", "dependencies": { - "call-bound": "^1.0.2", + "call-bind": "^1.0.6", "es-errors": "^1.3.0", - "is-regex": "^1.2.1" + "is-regex": "^1.1.4" }, "engines": { "node": ">= 0.4" @@ -36856,29 +36429,13 @@ } }, "node_modules/set-function-name": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.2.tgz", - "integrity": "sha512-7PGFlmtwsEADb0WYyvCMa1t+yke6daIG4Wirafur5kcf+MhUnPms1UeR0CKQdTZD81yESwMHbtn+TR+dMviakQ==", - "license": "MIT", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.1.tgz", + "integrity": "sha512-tMNCiqYVkXIZgc2Hnoy2IvC/f8ezc5koaRFkCjrpWzGpCd3qbZXPzVy9MAZzK1ch/X0jvSkojys3oqJN0qCmdA==", "dependencies": { - "define-data-property": "^1.1.4", - "es-errors": "^1.3.0", + "define-data-property": "^1.0.1", "functions-have-names": "^1.2.3", - "has-property-descriptors": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/set-proto": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/set-proto/-/set-proto-1.0.0.tgz", - "integrity": "sha512-RJRdvCo6IAnPdsvP/7m6bsQqNnn1FCBX5ZNtFL98MmFF/4xAIJTIg1YbHW5DC2W5SKZanrC6i4HsJqlajw/dZw==", - "license": "MIT", - "dependencies": { - "dunder-proto": "^1.0.1", - "es-errors": "^1.3.0", - "es-object-atoms": "^1.0.0" + "has-property-descriptors": "^1.0.0" }, "engines": { "node": ">= 0.4" @@ -36989,69 +36546,14 @@ "integrity": "sha512-sQTKC1Re/rM6XyFM6fIAGHRPVGvyXfgzIDvzoq608vM+jeyVD0Tu1E6Np0Kc2zAIFWIj963V2800iF/9LPieQw==" }, "node_modules/side-channel": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz", - "integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==", - "license": "MIT", + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.6.tgz", + "integrity": "sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==", "dependencies": { + "call-bind": "^1.0.7", "es-errors": "^1.3.0", - "object-inspect": "^1.13.3", - "side-channel-list": "^1.0.0", - "side-channel-map": "^1.0.1", - "side-channel-weakmap": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/side-channel-list": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/side-channel-list/-/side-channel-list-1.0.0.tgz", - "integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==", - "license": "MIT", - "dependencies": { - "es-errors": "^1.3.0", - "object-inspect": "^1.13.3" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/side-channel-map": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/side-channel-map/-/side-channel-map-1.0.1.tgz", - "integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==", - "license": "MIT", - "dependencies": { - "call-bound": "^1.0.2", - "es-errors": "^1.3.0", - "get-intrinsic": "^1.2.5", - "object-inspect": "^1.13.3" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/side-channel-weakmap": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz", - "integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==", - "license": "MIT", - "dependencies": { - "call-bound": "^1.0.2", - "es-errors": "^1.3.0", - "get-intrinsic": "^1.2.5", - "object-inspect": "^1.13.3", - "side-channel-map": "^1.0.1" + "get-intrinsic": "^1.2.4", + "object-inspect": "^1.13.1" }, "engines": { "node": ">= 0.4" @@ -37630,8 +37132,7 @@ "node_modules/spark-md5": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/spark-md5/-/spark-md5-3.0.2.tgz", - "integrity": "sha512-wcFzz9cDfbuqe0FZzfi2or1sgyIrsDwmPwfZC4hiNidPdPINjeUwNfv5kldczoEAcjl9Y1L3SM7Uz2PUEQzxQw==", - "license": "(WTFPL OR MIT)" + "integrity": "sha512-wcFzz9cDfbuqe0FZzfi2or1sgyIrsDwmPwfZC4hiNidPdPINjeUwNfv5kldczoEAcjl9Y1L3SM7Uz2PUEQzxQw==" }, "node_modules/sparse-bitfield": { "version": "3.0.3", @@ -37720,8 +37221,7 @@ "node_modules/split-ca": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/split-ca/-/split-ca-1.0.1.tgz", - "integrity": "sha512-Q5thBSxp5t8WPTTJQS59LrGqOZqOsrhDGDVm8azCqIBjSBd7nd9o2PM+mDulQQkh8h//4U6hFZnc/mul8t5pWQ==", - "license": "ISC" + "integrity": "sha1-bIOv82kvphJW4M0ZfgXp3hV2kaY=" }, "node_modules/split-string": { "version": "3.1.0", @@ -37756,23 +37256,6 @@ "es5-ext": "^0.10.53" } }, - "node_modules/ssh2": { - "version": "1.16.0", - "resolved": "https://registry.npmjs.org/ssh2/-/ssh2-1.16.0.tgz", - "integrity": "sha512-r1X4KsBGedJqo7h8F5c4Ybpcr5RjyP+aWIG007uBPRjmdQWfEiVLzSK71Zji1B9sKxwaCvD8y8cwSkYrlLiRRg==", - "hasInstallScript": true, - "dependencies": { - "asn1": "^0.2.6", - "bcrypt-pbkdf": "^1.0.2" - }, - "engines": { - "node": ">=10.16.0" - }, - "optionalDependencies": { - "cpu-features": "~0.0.10", - "nan": "^2.20.0" - } - }, "node_modules/sshpk": { "version": "1.17.0", "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.17.0.tgz", @@ -37887,13 +37370,12 @@ "license": "MIT" }, "node_modules/stop-iteration-iterator": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/stop-iteration-iterator/-/stop-iteration-iterator-1.1.0.tgz", - "integrity": "sha512-eLoXW/DHyl62zxY4SCaIgnRhuMr6ri4juEYARS8E6sCEqzKpOiE521Ucofdx+KnDZl5xmvGYaaKCk5FEOxJCoQ==", - "license": "MIT", + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/stop-iteration-iterator/-/stop-iteration-iterator-1.0.0.tgz", + "integrity": "sha512-iCGQj+0l0HOdZ2AEeBADlsRC+vsnDsZsbdSiH1yNSjcfKM7fdpCMfqAL/dwF5BLiw/XhRft/Wax6zQbhq2BcjQ==", + "dev": true, "dependencies": { - "es-errors": "^1.3.0", - "internal-slot": "^1.1.0" + "internal-slot": "^1.0.4" }, "engines": { "node": ">= 0.4" @@ -38066,18 +37548,15 @@ } }, "node_modules/string.prototype.trim": { - "version": "1.2.10", - "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.10.tgz", - "integrity": "sha512-Rs66F0P/1kedk5lyYyH9uBzuiI/kNRmwJAR9quK6VOtIpZ2G+hMZd+HQbbv25MgCA6gEffoMZYxlTod4WcdrKA==", + "version": "1.2.9", + "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.9.tgz", + "integrity": "sha512-klHuCNxiMZ8MlsOihJhJEBJAiMVqU3Z2nEXWfWnIqjN0gEFS9J9+IxKozWWtQGcgoa1WUZzLjKPTr4ZHNFTFxw==", "license": "MIT", "dependencies": { - "call-bind": "^1.0.8", - "call-bound": "^1.0.2", - "define-data-property": "^1.1.4", + "call-bind": "^1.0.7", "define-properties": "^1.2.1", - "es-abstract": "^1.23.5", - "es-object-atoms": "^1.0.0", - "has-property-descriptors": "^1.0.2" + "es-abstract": "^1.23.0", + "es-object-atoms": "^1.0.0" }, "engines": { "node": ">= 0.4" @@ -38087,19 +37566,15 @@ } }, "node_modules/string.prototype.trimend": { - "version": "1.0.9", - "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.9.tgz", - "integrity": "sha512-G7Ok5C6E/j4SGfyLCloXTrngQIQU3PWtXGst3yM7Bea9FRURf1S42ZHlZZtsNque2FN2PoUhfZXYLNWwEr4dLQ==", + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.8.tgz", + "integrity": "sha512-p73uL5VCHCO2BZZ6krwwQE3kCzM7NKmis8S//xEC6fQonchbum4eP6kR4DLEjQFO3Wnj3Fuo8NM0kOSjVdHjZQ==", "license": "MIT", "dependencies": { - "call-bind": "^1.0.8", - "call-bound": "^1.0.2", + "call-bind": "^1.0.7", "define-properties": "^1.2.1", "es-object-atoms": "^1.0.0" }, - "engines": { - "node": ">= 0.4" - }, "funding": { "url": "https://github.com/sponsors/ljharb" } @@ -38588,8 +38063,7 @@ "version": "3.8.3", "resolved": "https://registry.npmjs.org/superagent/-/superagent-3.8.3.tgz", "integrity": "sha512-GLQtLMCoEIK4eDv6OGtkOoSMt3D+oq0y3dsxMuYuDvaNUvuT8eFBuLmfR0iYYzHC1e8hpzC6ZsxbuP6DIalMFA==", - "deprecated": "Please upgrade to v9.0.0+ as we have fixed a public vulnerability with formidable dependency. Note that v9.0.0+ requires Node.js v14.18.0+. See https://github.com/ladjs/superagent/pull/1800 for insight. This project is supported and maintained by the team at Forward Email @ https://forwardemail.net", - "license": "MIT", + "deprecated": "Please upgrade to v7.0.2+ of superagent. We have fixed numerous issues with streams, form-data, attach(), filesystem errors not bubbling up (ENOENT on attach()), and all tests are now passing. See the releases tab for more information at . Thanks to @shadowgate15, @spence-s, and @niftylettuce. Superagent is sponsored by Forward Email at .", "dependencies": { "component-emitter": "^1.2.0", "cookiejar": "^2.1.0", @@ -38610,58 +38084,32 @@ "version": "3.2.7", "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", - "license": "MIT", "dependencies": { "ms": "^2.1.1" } }, "node_modules/superagent/node_modules/form-data": { - "version": "2.5.3", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.3.tgz", - "integrity": "sha512-XHIrMD0NpDrNM/Ckf7XJiBbLl57KEhT3+i3yY+eWm+cqYZJQTZrKo8Y8AWKnuV5GT4scfuUGt9LzNoIx3dU1nQ==", - "license": "MIT", + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.1.tgz", + "integrity": "sha512-m21N3WOmEEURgk6B9GLOE4RuWOFf28Lhh9qGYeNlGq4VDXUlJy2th2slBNU8Gp8EzloYZOibZJ7t5ecIrFSjVA==", "dependencies": { "asynckit": "^0.4.0", - "combined-stream": "^1.0.8", - "es-set-tostringtag": "^2.1.0", - "mime-types": "^2.1.35", - "safe-buffer": "^5.2.1" + "combined-stream": "^1.0.6", + "mime-types": "^2.1.12" }, "engines": { "node": ">= 0.12" } }, - "node_modules/superagent/node_modules/form-data/node_modules/safe-buffer": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", - "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT" - }, "node_modules/superagent/node_modules/isarray": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", - "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==", - "license": "MIT" + "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=" }, "node_modules/superagent/node_modules/mime": { "version": "1.6.0", "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", - "license": "MIT", "bin": { "mime": "cli.js" }, @@ -38670,10 +38118,9 @@ } }, "node_modules/superagent/node_modules/readable-stream": { - "version": "2.3.8", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", - "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", - "license": "MIT", + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", "dependencies": { "core-util-is": "~1.0.0", "inherits": "~2.0.3", @@ -38688,7 +38135,6 @@ "version": "1.1.1", "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", - "license": "MIT", "dependencies": { "safe-buffer": "~5.1.0" } @@ -38929,8 +38375,7 @@ "node_modules/swagger-converter": { "version": "0.1.7", "resolved": "https://registry.npmjs.org/swagger-converter/-/swagger-converter-0.1.7.tgz", - "integrity": "sha512-O2hZbWqq8x6j0uZ4qWj5dw45WPoAxKsJLJZqOgTqRtPNi8IqA+rDkDV/48S8qanS3KGv1QcVoPNLivMbyHHdAQ==", - "license": "MIT", + "integrity": "sha1-oJdRnG8e5N1n4wjZtT3cnCslf5c=", "dependencies": { "lodash.clonedeep": "^2.4.1" } @@ -38981,6 +38426,12 @@ "lodash": "^4.17.14" } }, + "node_modules/swagger-tools/node_modules/commander": { + "version": "2.11.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.11.0.tgz", + "integrity": "sha512-b0553uYA5YAEGgyYIGYROzKQ7X5RAqedkfjiZxwi0kL1g3bOaBNNZfYkzt/CL0umgD5wc9Jec2FbB98CjkMRvQ==", + "license": "MIT" + }, "node_modules/swagger-tools/node_modules/debug": { "version": "3.2.7", "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", @@ -39079,9 +38530,9 @@ } }, "node_modules/tar-fs": { - "version": "3.0.9", - "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-3.0.9.tgz", - "integrity": "sha512-XF4w9Xp+ZQgifKakjZYmFdkLoSWd34VGKcsTCwlNWM7QG3ZbaxnTsaBwnjFZqHRf/rROxaR8rXnbtwdvaDI+lA==", + "version": "3.0.8", + "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-3.0.8.tgz", + "integrity": "sha512-ZoROL70jptorGAlgAYiLoBLItEKw/fUxg9BSYK/dF/GAGYFJOJJJMvjPAKDJraCXFwadD456FCuvLWgfhMsPwg==", "license": "MIT", "dependencies": { "pump": "^3.0.0", @@ -39905,14 +39356,14 @@ } }, "node_modules/traverse": { - "version": "0.6.11", - "resolved": "https://registry.npmjs.org/traverse/-/traverse-0.6.11.tgz", - "integrity": "sha512-vxXDZg8/+p3gblxB6BhhG5yWVn1kGRlaL8O78UDXc3wRnPizB5g83dcvWV1jpDMIPnjZjOFuxlMmE82XJ4407w==", + "version": "0.6.9", + "resolved": "https://registry.npmjs.org/traverse/-/traverse-0.6.9.tgz", + "integrity": "sha512-7bBrcF+/LQzSgFmT0X5YclVqQxtv7TDJ1f8Wj7ibBu/U6BMLeOpUxuZjV7rMc44UtKxlnMFigdhFAIszSX1DMg==", "license": "MIT", "dependencies": { - "gopd": "^1.2.0", - "typedarray.prototype.slice": "^1.0.5", - "which-typed-array": "^1.1.18" + "gopd": "^1.0.1", + "typedarray.prototype.slice": "^1.0.3", + "which-typed-array": "^1.1.15" }, "engines": { "node": ">= 0.4" @@ -40069,30 +39520,30 @@ } }, "node_modules/typed-array-buffer": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.3.tgz", - "integrity": "sha512-nAYYwfY3qnzX30IkA6AQZjVbtK6duGontcQm1WSG1MD94YLqK0515GNApXkoxKOWMusVssAHWLh9SeaoefYFGw==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.2.tgz", + "integrity": "sha512-gEymJYKZtKXzzBzM4jqa9w6Q1Jjm7x2d+sh19AdsD4wqnMPDYyvwpsIc2Q/835kHuo3BEQ7CjelGhfTsoBb2MQ==", "license": "MIT", "dependencies": { - "call-bound": "^1.0.3", + "call-bind": "^1.0.7", "es-errors": "^1.3.0", - "is-typed-array": "^1.1.14" + "is-typed-array": "^1.1.13" }, "engines": { "node": ">= 0.4" } }, "node_modules/typed-array-byte-length": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/typed-array-byte-length/-/typed-array-byte-length-1.0.3.tgz", - "integrity": "sha512-BaXgOuIxz8n8pIq3e7Atg/7s+DpiYrxn4vdot3w9KbnBhcRQq6o3xemQdIfynqSeXeDrF32x+WvfzmOjPiY9lg==", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/typed-array-byte-length/-/typed-array-byte-length-1.0.1.tgz", + "integrity": "sha512-3iMJ9q0ao7WE9tWcaYKIptkNBuOIcZCCT0d4MRvuuH88fEoEH62IuQe0OtraD3ebQEoTRk8XCBoknUNc1Y67pw==", "license": "MIT", "dependencies": { - "call-bind": "^1.0.8", + "call-bind": "^1.0.7", "for-each": "^0.3.3", - "gopd": "^1.2.0", - "has-proto": "^1.2.0", - "is-typed-array": "^1.1.14" + "gopd": "^1.0.1", + "has-proto": "^1.0.3", + "is-typed-array": "^1.1.13" }, "engines": { "node": ">= 0.4" @@ -40102,18 +39553,17 @@ } }, "node_modules/typed-array-byte-offset": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/typed-array-byte-offset/-/typed-array-byte-offset-1.0.4.tgz", - "integrity": "sha512-bTlAFB/FBYMcuX81gbL4OcpH5PmlFHqlCCpAl8AlEzMz5k53oNDvN8p1PNOWLEmI2x4orp3raOFB51tv9X+MFQ==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/typed-array-byte-offset/-/typed-array-byte-offset-1.0.2.tgz", + "integrity": "sha512-Ous0vodHa56FviZucS2E63zkgtgrACj7omjwd/8lTEMEPFFyjfixMZ1ZXenpgCFBBt4EC1J2XsyVS2gkG0eTFA==", "license": "MIT", "dependencies": { "available-typed-arrays": "^1.0.7", - "call-bind": "^1.0.8", + "call-bind": "^1.0.7", "for-each": "^0.3.3", - "gopd": "^1.2.0", - "has-proto": "^1.2.0", - "is-typed-array": "^1.1.15", - "reflect.getprototypeof": "^1.0.9" + "gopd": "^1.0.1", + "has-proto": "^1.0.3", + "is-typed-array": "^1.1.13" }, "engines": { "node": ">= 0.4" @@ -40123,17 +39573,17 @@ } }, "node_modules/typed-array-length": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.7.tgz", - "integrity": "sha512-3KS2b+kL7fsuk/eJZ7EQdnEmQoaho/r6KUef7hxvltNA5DR8NAUM+8wJMbJyZ4G9/7i3v5zPBIMN5aybAh2/Jg==", + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.6.tgz", + "integrity": "sha512-/OxDN6OtAk5KBpGb28T+HZc2M+ADtvRxXrKKbUwtsLgdoxgX13hyy7ek6bFRl5+aBs2yZzB0c4CnQfAtVypW/g==", "license": "MIT", "dependencies": { "call-bind": "^1.0.7", "for-each": "^0.3.3", "gopd": "^1.0.1", + "has-proto": "^1.0.3", "is-typed-array": "^1.1.13", - "possible-typed-array-names": "^1.0.0", - "reflect.getprototypeof": "^1.0.6" + "possible-typed-array-names": "^1.0.0" }, "engines": { "node": ">= 0.4" @@ -40148,19 +39598,17 @@ "integrity": "sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c=" }, "node_modules/typedarray.prototype.slice": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/typedarray.prototype.slice/-/typedarray.prototype.slice-1.0.5.tgz", - "integrity": "sha512-q7QNVDGTdl702bVFiI5eY4l/HkgCM6at9KhcFbgUAzezHFbOVy4+0O/lCjsABEQwbZPravVfBIiBVGo89yzHFg==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/typedarray.prototype.slice/-/typedarray.prototype.slice-1.0.3.tgz", + "integrity": "sha512-8WbVAQAUlENo1q3c3zZYuy5k9VzBQvp8AX9WOtbvyWlLM1v5JaSRmjubLjzHF4JFtptjH/5c/i95yaElvcjC0A==", "license": "MIT", "dependencies": { - "call-bind": "^1.0.8", + "call-bind": "^1.0.7", "define-properties": "^1.2.1", - "es-abstract": "^1.23.9", + "es-abstract": "^1.23.0", "es-errors": "^1.3.0", - "get-proto": "^1.0.1", - "math-intrinsics": "^1.1.0", - "typed-array-buffer": "^1.0.3", - "typed-array-byte-offset": "^1.0.4" + "typed-array-buffer": "^1.0.2", + "typed-array-byte-offset": "^1.0.2" }, "engines": { "node": ">= 0.4" @@ -40248,18 +39696,14 @@ } }, "node_modules/unbox-primitive": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.1.0.tgz", - "integrity": "sha512-nWJ91DjeOkej/TA8pXQ3myruKpKEYgqvpw9lz4OPHj/NWFNluYrjbz9j01CJ8yKQd2g4jFoOkINCTW2I5LEEyw==", - "license": "MIT", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.2.tgz", + "integrity": "sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw==", "dependencies": { - "call-bound": "^1.0.3", + "call-bind": "^1.0.2", "has-bigints": "^1.0.2", - "has-symbols": "^1.1.0", - "which-boxed-primitive": "^1.1.1" - }, - "engines": { - "node": ">= 0.4" + "has-symbols": "^1.0.3", + "which-boxed-primitive": "^1.0.2" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -41944,64 +41388,30 @@ } }, "node_modules/which-boxed-primitive": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.1.1.tgz", - "integrity": "sha512-TbX3mj8n0odCBFVlY8AxkqcHASw3L60jIuF8jFP78az3C2YhmGvqbHBpAjTRH2/xqYunrJ9g1jSyjCjpoWzIAA==", - "license": "MIT", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz", + "integrity": "sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==", "dependencies": { - "is-bigint": "^1.1.0", - "is-boolean-object": "^1.2.1", - "is-number-object": "^1.1.1", - "is-string": "^1.1.1", - "is-symbol": "^1.1.1" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/which-builtin-type": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/which-builtin-type/-/which-builtin-type-1.2.1.tgz", - "integrity": "sha512-6iBczoX+kDQ7a3+YJBnh3T+KZRxM/iYNPXicqk66/Qfm1b93iu+yOImkg0zHbj5LNOcNv1TEADiZ0xa34B4q6Q==", - "license": "MIT", - "dependencies": { - "call-bound": "^1.0.2", - "function.prototype.name": "^1.1.6", - "has-tostringtag": "^1.0.2", - "is-async-function": "^2.0.0", - "is-date-object": "^1.1.0", - "is-finalizationregistry": "^1.1.0", - "is-generator-function": "^1.0.10", - "is-regex": "^1.2.1", - "is-weakref": "^1.0.2", - "isarray": "^2.0.5", - "which-boxed-primitive": "^1.1.0", - "which-collection": "^1.0.2", - "which-typed-array": "^1.1.16" - }, - "engines": { - "node": ">= 0.4" + "is-bigint": "^1.0.1", + "is-boolean-object": "^1.1.0", + "is-number-object": "^1.0.4", + "is-string": "^1.0.5", + "is-symbol": "^1.0.3" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/which-collection": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/which-collection/-/which-collection-1.0.2.tgz", - "integrity": "sha512-K4jVyjnBdgvc86Y6BkaLZEN933SwYOuBFkdmBu9ZfkcAbdVbpITnDmjvZ/aQjRXQrv5EPkTnD1s39GiiqbngCw==", - "license": "MIT", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/which-collection/-/which-collection-1.0.1.tgz", + "integrity": "sha512-W8xeTUwaln8i3K/cY1nGXzdnVZlidBcagyNFtBdD5kxnb4TvGKR7FfSIS3mYpwWS1QUCutfKz8IY8RjftB0+1A==", + "dev": true, "dependencies": { - "is-map": "^2.0.3", - "is-set": "^2.0.3", - "is-weakmap": "^2.0.2", - "is-weakset": "^2.0.3" - }, - "engines": { - "node": ">= 0.4" + "is-map": "^2.0.1", + "is-set": "^2.0.1", + "is-weakmap": "^2.0.1", + "is-weakset": "^2.0.1" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -42014,17 +41424,15 @@ "dev": true }, "node_modules/which-typed-array": { - "version": "1.1.19", - "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.19.tgz", - "integrity": "sha512-rEvr90Bck4WZt9HHFC4DJMsjvu7x+r6bImz0/BrbWb7A2djJ8hnZMrWnHo9F8ssv0OMErasDhftrfROTyqSDrw==", + "version": "1.1.15", + "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.15.tgz", + "integrity": "sha512-oV0jmFtUky6CXfkqehVvBP/LSWJ2sy4vWMioiENyJLePrBO/yKyV9OyJySfAKosh+RYkIl5zJCNZ8/4JncrpdA==", "license": "MIT", "dependencies": { "available-typed-arrays": "^1.0.7", - "call-bind": "^1.0.8", - "call-bound": "^1.0.4", - "for-each": "^0.3.5", - "get-proto": "^1.0.1", - "gopd": "^1.2.0", + "call-bind": "^1.0.7", + "for-each": "^0.3.3", + "gopd": "^1.0.1", "has-tostringtag": "^1.0.2" }, "engines": { @@ -42541,7 +41949,6 @@ "version": "3.25.1", "resolved": "https://registry.npmjs.org/z-schema/-/z-schema-3.25.1.tgz", "integrity": "sha512-7tDlwhrBG+oYFdXNOjILSurpfQyuVgkRe3hB2q8TEssamDHB7BbLWYkYO98nTn0FibfdFroFKDjndbgufAgS/Q==", - "license": "MIT", "dependencies": { "core-js": "^2.5.7", "lodash.get": "^4.0.0", @@ -42555,19 +41962,23 @@ "commander": "^2.7.1" } }, + "node_modules/z-schema/node_modules/commander": { + "version": "2.20.3", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", + "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==", + "optional": true + }, "node_modules/z-schema/node_modules/core-js": { "version": "2.6.12", "resolved": "https://registry.npmjs.org/core-js/-/core-js-2.6.12.tgz", "integrity": "sha512-Kb2wC0fvsWfQrgk8HU5lW6U/Lcs8+9aaYcy4ZFc6DDlo4nZ7n70dEgE5rtR0oG6ufKDUnrwfWL1mXR5ljDatrQ==", - "deprecated": "core-js@<3.23.3 is no longer maintained and not recommended for usage due to the number of issues. Because of the V8 engine whims, feature detection in old core-js versions could cause a slowdown up to 100x even if nothing is polyfilled. Some versions have web compatibility issues. Please, upgrade your dependencies to the actual version of core-js.", - "hasInstallScript": true, - "license": "MIT" + "deprecated": "core-js@<3.4 is no longer maintained and not recommended for usage due to the number of issues. Because of the V8 engine whims, feature detection in old core-js versions could cause a slowdown up to 100x even if nothing is polyfilled. Please, upgrade your dependencies to the actual version of core-js.", + "hasInstallScript": true }, "node_modules/z-schema/node_modules/validator": { "version": "10.11.0", "resolved": "https://registry.npmjs.org/validator/-/validator-10.11.0.tgz", "integrity": "sha512-X/p3UZerAIsbBfN/IwahhYaBbY68EN/UQBWHtsbXGT5bfrH/p4NQzUCG1kF/rtKaNpnJ7jAu6NGTdSNtyNIXMw==", - "license": "MIT", "engines": { "node": ">= 0.10" } @@ -42691,13 +42102,13 @@ "async": "^3.2.5", "body-parser": "^1.20.3", "bunyan": "^1.8.15", - "dockerode": "^4.0.7", + "dockerode": "^4.0.5", "express": "^4.21.2", "lodash": "^4.17.21", "p-limit": "^3.1.0", "request": "^2.88.2", "send": "^0.19.0", - "tar-fs": "^3.0.9", + "tar-fs": "^3.0.4", "workerpool": "^6.1.5" }, "devDependencies": { @@ -42764,6 +42175,33 @@ "node": ">= 0.6" } }, + "services/clsi/node_modules/@grpc/grpc-js": { + "version": "1.13.2", + "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.13.2.tgz", + "integrity": "sha512-nnR5nmL6lxF8YBqb6gWvEgLdLh/Fn+kvAdX5hUOnt48sNSb0riz/93ASd2E5gvanPA41X6Yp25bIfGRp1SMb2g==", + "license": "Apache-2.0", + "dependencies": { + "@grpc/proto-loader": "^0.7.13", + "@js-sdsl/ordered-map": "^4.4.2" + }, + "engines": { + "node": ">=12.10.0" + } + }, + "services/clsi/node_modules/cpu-features": { + "version": "0.0.10", + "resolved": "https://registry.npmjs.org/cpu-features/-/cpu-features-0.0.10.tgz", + "integrity": "sha512-9IkYqtX3YHPCzoVg1Py+o9057a3i0fp7S530UWokCSaFVTc7CwXPRiOjRjBQQ18ZCNafx78YfnG+HALxtVmOGA==", + "hasInstallScript": true, + "optional": true, + "dependencies": { + "buildcheck": "~0.0.6", + "nan": "^2.19.0" + }, + "engines": { + "node": ">=10.0.0" + } + }, "services/clsi/node_modules/diff": { "version": "4.0.2", "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", @@ -42773,6 +42211,75 @@ "node": ">=0.3.1" } }, + "services/clsi/node_modules/docker-modem": { + "version": "5.0.6", + "resolved": "https://registry.npmjs.org/docker-modem/-/docker-modem-5.0.6.tgz", + "integrity": "sha512-ens7BiayssQz/uAxGzH8zGXCtiV24rRWXdjNha5V4zSOcxmAZsfGVm/PPFbwQdqEkDnhG+SyR9E3zSHUbOKXBQ==", + "license": "Apache-2.0", + "dependencies": { + "debug": "^4.1.1", + "readable-stream": "^3.5.0", + "split-ca": "^1.0.1", + "ssh2": "^1.15.0" + }, + "engines": { + "node": ">= 8.0" + } + }, + "services/clsi/node_modules/dockerode": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/dockerode/-/dockerode-4.0.5.tgz", + "integrity": "sha512-ZPmKSr1k1571Mrh7oIBS/j0AqAccoecY2yH420ni5j1KyNMgnoTh4Nu4FWunh0HZIJmRSmSysJjBIpa/zyWUEA==", + "license": "Apache-2.0", + "dependencies": { + "@balena/dockerignore": "^1.0.2", + "@grpc/grpc-js": "^1.11.1", + "@grpc/proto-loader": "^0.7.13", + "docker-modem": "^5.0.6", + "protobufjs": "^7.3.2", + "tar-fs": "~2.1.2", + "uuid": "^10.0.0" + }, + "engines": { + "node": ">= 8.0" + } + }, + "services/clsi/node_modules/dockerode/node_modules/tar-fs": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.2.tgz", + "integrity": "sha512-EsaAXwxmx8UB7FRKqeozqEPop69DXcmYwTQwXvyAPF352HJsPdkVhvTaDPYqfNgruveJIJy3TA2l+2zj8LJIJA==", + "license": "MIT", + "dependencies": { + "chownr": "^1.1.1", + "mkdirp-classic": "^0.5.2", + "pump": "^3.0.0", + "tar-stream": "^2.1.4" + } + }, + "services/clsi/node_modules/protobufjs": { + "version": "7.4.0", + "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-7.4.0.tgz", + "integrity": "sha512-mRUWCc3KUU4w1jU8sGxICXH/gNS94DvI1gxqDvBzhj1JpcsimQkYiOJfwsPUykUI5ZaspFbSgmBLER8IrQ3tqw==", + "hasInstallScript": true, + "license": "BSD-3-Clause", + "dependencies": { + "@protobufjs/aspromise": "^1.1.2", + "@protobufjs/base64": "^1.1.2", + "@protobufjs/codegen": "^2.0.4", + "@protobufjs/eventemitter": "^1.1.0", + "@protobufjs/fetch": "^1.1.0", + "@protobufjs/float": "^1.0.2", + "@protobufjs/inquire": "^1.1.0", + "@protobufjs/path": "^1.1.2", + "@protobufjs/pool": "^1.1.0", + "@protobufjs/utf8": "^1.1.0", + "@types/node": ">=13.7.0", + "long": "^5.0.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, "services/clsi/node_modules/sinon": { "version": "9.0.3", "resolved": "https://registry.npmjs.org/sinon/-/sinon-9.0.3.tgz", @@ -42792,6 +42299,23 @@ "url": "https://opencollective.com/sinon" } }, + "services/clsi/node_modules/ssh2": { + "version": "1.16.0", + "resolved": "https://registry.npmjs.org/ssh2/-/ssh2-1.16.0.tgz", + "integrity": "sha512-r1X4KsBGedJqo7h8F5c4Ybpcr5RjyP+aWIG007uBPRjmdQWfEiVLzSK71Zji1B9sKxwaCvD8y8cwSkYrlLiRRg==", + "hasInstallScript": true, + "dependencies": { + "asn1": "^0.2.6", + "bcrypt-pbkdf": "^1.0.2" + }, + "engines": { + "node": ">=10.16.0" + }, + "optionalDependencies": { + "cpu-features": "~0.0.10", + "nan": "^2.20.0" + } + }, "services/clsi/node_modules/supports-color": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", @@ -42804,6 +42328,19 @@ "node": ">=8" } }, + "services/clsi/node_modules/uuid": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-10.0.0.tgz", + "integrity": "sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ==", + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "license": "MIT", + "bin": { + "uuid": "dist/bin/uuid" + } + }, "services/contacts": { "name": "@overleaf/contacts", "dependencies": { @@ -42871,7 +42408,6 @@ "services/docstore": { "name": "@overleaf/docstore", "dependencies": { - "@overleaf/fetch-utils": "*", "@overleaf/logger": "*", "@overleaf/metrics": "*", "@overleaf/o-error": "*", @@ -43182,7 +42718,6 @@ "license": "Proprietary", "dependencies": { "@google-cloud/secret-manager": "^5.6.0", - "@overleaf/fetch-utils": "*", "@overleaf/logger": "*", "@overleaf/metrics": "*", "@overleaf/mongo-utils": "*", @@ -43212,7 +42747,6 @@ "mongodb": "6.12.0", "overleaf-editor-core": "*", "p-limit": "^6.2.0", - "p-queue": "^8.1.0", "pg": "^8.7.1", "pg-query-stream": "^4.2.4", "swagger-tools": "^0.10.4", @@ -44694,7 +44228,6 @@ "@overleaf/promise-utils": "*", "@overleaf/redis-wrapper": "*", "@overleaf/settings": "*", - "@overleaf/stream-utils": "*", "async": "^3.2.5", "aws-sdk": "^2.650.0", "body-parser": "^1.20.3", @@ -45154,7 +44687,8 @@ "@overleaf/settings": "*", "@phosphor-icons/react": "^2.1.7", "@slack/webhook": "^7.0.2", - "@stripe/stripe-js": "^7.3.0", + "@stripe/react-stripe-js": "^3.1.1", + "@stripe/stripe-js": "^5.6.0", "@xmldom/xmldom": "^0.7.13", "accepts": "^1.3.7", "ajv": "^8.12.0", @@ -45206,7 +44740,7 @@ "moment": "^2.29.4", "mongodb-legacy": "6.1.3", "mongoose": "8.9.5", - "multer": "overleaf/multer#4dbceda355efc3fc8ac3cf5c66c3778c8a6fdb23", + "multer": "overleaf/multer#199c5ff05bd375c508f4074498237baead7f5148", "nocache": "^2.1.0", "node-fetch": "^2.7.0", "nodemailer": "^6.7.0", @@ -45641,15 +45175,6 @@ "lodash": "^4.17.15" } }, - "services/web/node_modules/@stripe/stripe-js": { - "version": "7.3.0", - "resolved": "https://registry.npmjs.org/@stripe/stripe-js/-/stripe-js-7.3.0.tgz", - "integrity": "sha512-xnCyFIEI5SQnQrKkCxVj7nS5fWTZap+zuIGzmmxLMdlmgahFJaihK4zogqE8YyKKTLtrp/EldkEijSgtXsRVDg==", - "license": "MIT", - "engines": { - "node": ">=12.16" - } - }, "services/web/node_modules/@transloadit/prettier-bytes": { "version": "0.0.9", "resolved": "https://registry.npmjs.org/@transloadit/prettier-bytes/-/prettier-bytes-0.0.9.tgz", @@ -46497,18 +46022,18 @@ } }, "services/web/node_modules/multer": { - "version": "2.0.1", - "resolved": "git+ssh://git@github.com/overleaf/multer.git#4dbceda355efc3fc8ac3cf5c66c3778c8a6fdb23", - "integrity": "sha512-kkvPK48OQibR5vIoTQBbZp1uWVCvT9MrW3Y0mqdhFYJP/HVJujb4eSCEU0yj+hyf0Y+H/BKCmPdM4fJnzqAO4w==", + "version": "2.0.0", + "resolved": "git+ssh://git@github.com/overleaf/multer.git#199c5ff05bd375c508f4074498237baead7f5148", + "integrity": "sha512-S5MlIoOgrDr+a2jLS8z7jQlbzvZ0m30U2tRwdyLrxhnnMUQZYEzkVysEv10Dw41RTpM5bQQDs563Vzl1LLhxhQ==", "license": "MIT", "dependencies": { "append-field": "^1.0.0", - "busboy": "^1.6.0", - "concat-stream": "^2.0.0", - "mkdirp": "^0.5.6", + "busboy": "^1.0.0", + "concat-stream": "^1.5.2", + "mkdirp": "^0.5.4", "object-assign": "^4.1.1", - "type-is": "^1.6.18", - "xtend": "^4.0.2" + "type-is": "^1.6.4", + "xtend": "^4.0.0" }, "engines": { "node": ">= 10.16.0" diff --git a/package.json b/package.json index a51bbcd743..64fbd258ed 100644 --- a/package.json +++ b/package.json @@ -37,7 +37,7 @@ }, "swagger-tools": { "body-parser": "1.20.3", - "multer": "2.0.1", + "multer": "2.0.0", "path-to-regexp": "3.3.0", "qs": "6.13.0" } diff --git a/server-ce/hotfix/5.5.1/Dockerfile b/server-ce/hotfix/5.5.1/Dockerfile deleted file mode 100644 index 9572d29740..0000000000 --- a/server-ce/hotfix/5.5.1/Dockerfile +++ /dev/null @@ -1,28 +0,0 @@ -FROM sharelatex/sharelatex:5.5.0 - - -# fix tls configuration in redis for history-v1 -COPY pr_25168.patch . -RUN patch -p1 < pr_25168.patch && rm pr_25168.patch - -# improve logging in history system -COPY pr_26086.patch . -RUN patch -p1 < pr_26086.patch && rm pr_26086.patch - -# fix create-user.mjs script -COPY pr_26152.patch . -RUN patch -p1 < pr_26152.patch && rm pr_26152.patch - -# check mongo featureCompatibilityVersion -COPY pr_26091.patch . -RUN patch -p1 < pr_26091.patch && rm pr_26091.patch - -# update multer and tar-fs -RUN sed -i 's/"multer": "2.0.0"/"multer": "2.0.1"/g' package.json -RUN sed -i 's/"dockerode": "^4.0.5"/"dockerode": "^4.0.7"/g' services/clsi/package.json -RUN sed -i 's/"tar-fs": "^3.0.4"/"tar-fs": "^3.0.9"/g' services/clsi/package.json -RUN sed -i 's/199c5ff05bd375c508f4074498237baead7f5148/4dbceda355efc3fc8ac3cf5c66c3778c8a6fdb23/g' services/web/package.json -COPY package-lock.json.diff . -RUN patch package-lock.json < package-lock.json.diff -RUN npm install --omit=dev -RUN npm install @paralleldrive/cuid2@2.2.2 -w services/history-v1 diff --git a/server-ce/hotfix/5.5.1/package-lock.json.diff b/server-ce/hotfix/5.5.1/package-lock.json.diff deleted file mode 100644 index ecbf851bc8..0000000000 --- a/server-ce/hotfix/5.5.1/package-lock.json.diff +++ /dev/null @@ -1,2202 +0,0 @@ -4954,4956c4954,4957 -< "version": "1.8.22", -< "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.8.22.tgz", -< "integrity": "sha512-oAjDdN7fzbUi+4hZjKG96MR6KTEubAeMpQEb+77qy+3r0Ua5xTFuie6JOLr4ZZgl5g+W5/uRTS2M1V8mVAFPuA==", ---- -> "version": "1.13.4", -> "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.13.4.tgz", -> "integrity": "sha512-GsFaMXCkMqkKIvwCQjCrwH+GHbPKBjhwo/8ZuUkWHqbI73Kky9I+pQltrlT0+MWpedCoosda53lgjYfyEPgxBg==", -> "license": "Apache-2.0", -4958,4959c4959,4960 -< "@grpc/proto-loader": "^0.7.0", -< "@types/node": ">=12.12.47" ---- -> "@grpc/proto-loader": "^0.7.13", -> "@js-sdsl/ordered-map": "^4.4.2" -4962c4963 -< "node": "^8.13.0 || >=10.10.0" ---- -> "node": ">=12.10.0" -5915a5917,5928 -> "node_modules/@noble/hashes": { -> "version": "1.8.0", -> "resolved": "https://registry.npmjs.org/@noble/hashes/-/hashes-1.8.0.tgz", -> "integrity": "sha512-jCs9ldd7NwzpgXDIf6P3+NrHh9/sD6CQdxHyjQI+h/6rDNo88ypBxxz45UDuZHz9r3tNz7N/VInSVoVdtXEI4A==", -> "license": "MIT", -> "engines": { -> "node": "^14.21.3 || >=16" -> }, -> "funding": { -> "url": "https://paulmillr.com/funding/" -> } -> }, -7528a7542,7550 -> "node_modules/@paralleldrive/cuid2": { -> "version": "2.2.2", -> "resolved": "https://registry.npmjs.org/@paralleldrive/cuid2/-/cuid2-2.2.2.tgz", -> "integrity": "sha512-ZOBkgDwEdoYVlSeRbYYXs0S9MejQofiVYoTbKzy/6GQa39/q5tQU2IX46+shYnUkpEl3wc+J6wRlar7r2EK2xA==", -> "license": "MIT", -> "dependencies": { -> "@noble/hashes": "^1.1.5" -> } -> }, -13479,13481c13501,13503 -< "version": "1.0.1", -< "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.1.tgz", -< "integrity": "sha512-ahC5W1xgou+KTXix4sAO8Ki12Q+jf4i0+tmk3sC+zgcynshkHxzpXdImBehiUYKKKDwvfFiJl1tZt6ewscS1Mg==", ---- -> "version": "1.0.2", -> "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.2.tgz", -> "integrity": "sha512-LHE+8BuR7RYGDKvnrmcuSq3tDcKv9OFEXQt/HpbZhY7V6h0zlUXutnAD82GiFx9rdieCMjkvtcsPqBwgUl1Iiw==", -13484,13485c13506,13507 -< "call-bind": "^1.0.5", -< "is-array-buffer": "^3.0.4" ---- -> "call-bound": "^1.0.3", -> "is-array-buffer": "^3.0.5" -13601,13603c13623,13625 -< "version": "1.0.3", -< "resolved": "https://registry.npmjs.org/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.3.tgz", -< "integrity": "sha512-bMxMKAjg13EBSVscxTaYA4mRc5t1UAXa2kXiGTNfZ079HIWXEkKmkgFrh/nJqamaLSrXO5H4WFFkPEaLJWbs3A==", ---- -> "version": "1.0.4", -> "resolved": "https://registry.npmjs.org/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.4.tgz", -> "integrity": "sha512-BNoCY6SXXPQ7gF2opIP4GBE+Xw7U+pHMYKuzjgCN3GwiaIR09UUeKfheyIry77QtrCBlC0KK0q5/TER/tYh3PQ==", -13607c13629 -< "call-bind": "^1.0.5", ---- -> "call-bind": "^1.0.8", -13609,13613c13631,13634 -< "es-abstract": "^1.22.3", -< "es-errors": "^1.2.1", -< "get-intrinsic": "^1.2.3", -< "is-array-buffer": "^3.0.4", -< "is-shared-array-buffer": "^1.0.2" ---- -> "es-abstract": "^1.23.5", -> "es-errors": "^1.3.0", -> "get-intrinsic": "^1.2.6", -> "is-array-buffer": "^3.0.4" -13706a13728,13736 -> "node_modules/async-function": { -> "version": "1.0.0", -> "resolved": "https://registry.npmjs.org/async-function/-/async-function-1.0.0.tgz", -> "integrity": "sha512-hsU18Ae8CDTR6Kgu9DYf0EbCr/a5iGL0rytQDobUcdpYOKokk8LEjVphnXkDkgpi0wYVsqrXuP0bZxJaTqdgoA==", -> "license": "MIT", -> "engines": { -> "node": ">= 0.4" -> } -> }, -14255,14257c14285,14287 -< "version": "4.0.1", -< "resolved": "https://registry.npmjs.org/bare-fs/-/bare-fs-4.0.1.tgz", -< "integrity": "sha512-ilQs4fm/l9eMfWY2dY0WCIUplSUp7U0CT1vrqMg1MUdeZl4fypu5UP0XcDBK5WBQPJAKP1b7XEodISmekH/CEg==", ---- -> "version": "4.1.5", -> "resolved": "https://registry.npmjs.org/bare-fs/-/bare-fs-4.1.5.tgz", -> "integrity": "sha512-1zccWBMypln0jEE05LzZt+V/8y8AQsQQqxtklqaIyg5nu6OAYFhZxPXinJTSG+kU5qyNmeLgcn9AW7eHiCHVLA==", -14261c14291 -< "bare-events": "^2.0.0", ---- -> "bare-events": "^2.5.4", -14263c14293 -< "bare-stream": "^2.0.0" ---- -> "bare-stream": "^2.6.4" -14266c14296,14304 -< "bare": ">=1.7.0" ---- -> "bare": ">=1.16.0" -> }, -> "peerDependencies": { -> "bare-buffer": "*" -> }, -> "peerDependenciesMeta": { -> "bare-buffer": { -> "optional": true -> } -14270,14272c14308,14310 -< "version": "3.6.0", -< "resolved": "https://registry.npmjs.org/bare-os/-/bare-os-3.6.0.tgz", -< "integrity": "sha512-BUrFS5TqSBdA0LwHop4OjPJwisqxGy6JsWVqV6qaFoe965qqtaKfDzHY5T2YA1gUL0ZeeQeA+4BBc1FJTcHiPw==", ---- -> "version": "3.6.1", -> "resolved": "https://registry.npmjs.org/bare-os/-/bare-os-3.6.1.tgz", -> "integrity": "sha512-uaIjxokhFidJP+bmmvKSgiMzj2sV5GPHaZVAIktcxcpCyBFFWO+YlikVAdhmUo2vYFvFhOXIAlldqV29L8126g==", -15110,15112c15148,15151 -< "version": "1.0.7", -< "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.7.tgz", -< "integrity": "sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==", ---- -> "version": "1.0.8", -> "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.8.tgz", -> "integrity": "sha512-oKlSFMcMwpUg2ednkhQ454wfWiU/ul3CkJe/PEHcTKuiX6RpbehUiFMXu13HalGZxfUwCQzZG747YXBn1im9ww==", -> "license": "MIT", -15113a15153 -> "call-bind-apply-helpers": "^1.0.0", -15115,15116d15154 -< "es-errors": "^1.3.0", -< "function-bind": "^1.1.2", -15118c15156,15185 -< "set-function-length": "^1.2.1" ---- -> "set-function-length": "^1.2.2" -> }, -> "engines": { -> "node": ">= 0.4" -> }, -> "funding": { -> "url": "https://github.com/sponsors/ljharb" -> } -> }, -> "node_modules/call-bind-apply-helpers": { -> "version": "1.0.2", -> "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", -> "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", -> "license": "MIT", -> "dependencies": { -> "es-errors": "^1.3.0", -> "function-bind": "^1.1.2" -> }, -> "engines": { -> "node": ">= 0.4" -> } -> }, -> "node_modules/call-bound": { -> "version": "1.0.4", -> "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.4.tgz", -> "integrity": "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==", -> "license": "MIT", -> "dependencies": { -> "call-bind-apply-helpers": "^1.0.2", -> "get-intrinsic": "^1.3.0" -15423c15490,15491 -< "integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==" ---- -> "integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==", -> "license": "ISC" -15751,15756c15819,15822 -< "version": "4.1.1", -< "resolved": "https://registry.npmjs.org/commander/-/commander-4.1.1.tgz", -< "integrity": "sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==", -< "engines": { -< "node": ">= 6" -< } ---- -> "version": "2.11.0", -> "resolved": "https://registry.npmjs.org/commander/-/commander-2.11.0.tgz", -> "integrity": "sha512-b0553uYA5YAEGgyYIGYROzKQ7X5RAqedkfjiZxwi0kL1g3bOaBNNZfYkzt/CL0umgD5wc9Jec2FbB98CjkMRvQ==", -> "license": "MIT" -15871,15873c15937,15939 -< "version": "1.6.2", -< "resolved": "https://registry.npmjs.org/concat-stream/-/concat-stream-1.6.2.tgz", -< "integrity": "sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw==", ---- -> "version": "2.0.0", -> "resolved": "https://registry.npmjs.org/concat-stream/-/concat-stream-2.0.0.tgz", -> "integrity": "sha512-MWufYdFw53ccGjCA+Ol7XJYpAlW6/prSMzuPOTRnJGcGzuhLn4Scrz7qf6o8bROZ514ltazcIFJZevcfbo0x7A==", -15875c15941 -< "node >= 0.8" ---- -> "node >= 6.0" -15876a15943 -> "license": "MIT", -15880c15947 -< "readable-stream": "^2.2.2", ---- -> "readable-stream": "^3.0.2", -15884,15910d15950 -< "node_modules/concat-stream/node_modules/isarray": { -< "version": "1.0.0", -< "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", -< "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==" -< }, -< "node_modules/concat-stream/node_modules/readable-stream": { -< "version": "2.3.8", -< "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", -< "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", -< "dependencies": { -< "core-util-is": "~1.0.0", -< "inherits": "~2.0.3", -< "isarray": "~1.0.0", -< "process-nextick-args": "~2.0.0", -< "safe-buffer": "~5.1.1", -< "string_decoder": "~1.1.1", -< "util-deprecate": "~1.0.1" -< } -< }, -< "node_modules/concat-stream/node_modules/string_decoder": { -< "version": "1.1.1", -< "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", -< "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", -< "dependencies": { -< "safe-buffer": "~5.1.0" -< } -< }, -16125c16165,16166 -< "integrity": "sha512-LDx6oHrK+PhzLKJU9j5S7/Y3jM/mUHvD/DeI1WQmJn652iPC5Y4TBzC9l+5OMOXlyTTA+SmVUPm0HQUwpD5Jqw==" ---- -> "integrity": "sha512-LDx6oHrK+PhzLKJU9j5S7/Y3jM/mUHvD/DeI1WQmJn652iPC5Y4TBzC9l+5OMOXlyTTA+SmVUPm0HQUwpD5Jqw==", -> "license": "MIT" -16337a16379,16392 -> "node_modules/cpu-features": { -> "version": "0.0.10", -> "resolved": "https://registry.npmjs.org/cpu-features/-/cpu-features-0.0.10.tgz", -> "integrity": "sha512-9IkYqtX3YHPCzoVg1Py+o9057a3i0fp7S530UWokCSaFVTc7CwXPRiOjRjBQQ18ZCNafx78YfnG+HALxtVmOGA==", -> "hasInstallScript": true, -> "optional": true, -> "dependencies": { -> "buildcheck": "~0.0.6", -> "nan": "^2.19.0" -> }, -> "engines": { -> "node": ">=10.0.0" -> } -> }, -17268,17270c17323,17325 -< "version": "1.0.1", -< "resolved": "https://registry.npmjs.org/data-view-buffer/-/data-view-buffer-1.0.1.tgz", -< "integrity": "sha512-0lht7OugA5x3iJLOWFhWK/5ehONdprk0ISXqVFn/NFrDu+cuc8iADFrGQz5BnRK7LLU3JmkbXSxaqX+/mXYtUA==", ---- -> "version": "1.0.2", -> "resolved": "https://registry.npmjs.org/data-view-buffer/-/data-view-buffer-1.0.2.tgz", -> "integrity": "sha512-EmKO5V3OLXh1rtK2wgXRansaK1/mtVdTUEiEI0W8RkvgT05kfxaH29PliLnpLP73yYO6142Q72QNa8Wx/A5CqQ==", -17273c17328 -< "call-bind": "^1.0.6", ---- -> "call-bound": "^1.0.3", -17275c17330 -< "is-data-view": "^1.0.1" ---- -> "is-data-view": "^1.0.2" -17285,17287c17340,17342 -< "version": "1.0.1", -< "resolved": "https://registry.npmjs.org/data-view-byte-length/-/data-view-byte-length-1.0.1.tgz", -< "integrity": "sha512-4J7wRJD3ABAzr8wP+OcIcqq2dlUKp4DVflx++hs5h5ZKydWMI6/D/fAot+yh6g2tHh8fLFTvNOaVN357NvSrOQ==", ---- -> "version": "1.0.2", -> "resolved": "https://registry.npmjs.org/data-view-byte-length/-/data-view-byte-length-1.0.2.tgz", -> "integrity": "sha512-tuhGbE6CfTM9+5ANGf+oQb72Ky/0+s3xKUpHvShfiz2RxMFgFPjsXuRLBVMtvMs15awe45SRb83D6wH4ew6wlQ==", -17290c17345 -< "call-bind": "^1.0.7", ---- -> "call-bound": "^1.0.3", -17292c17347 -< "is-data-view": "^1.0.1" ---- -> "is-data-view": "^1.0.2" -17298c17353 -< "url": "https://github.com/sponsors/ljharb" ---- -> "url": "https://github.com/sponsors/inspect-js" -17302,17304c17357,17359 -< "version": "1.0.0", -< "resolved": "https://registry.npmjs.org/data-view-byte-offset/-/data-view-byte-offset-1.0.0.tgz", -< "integrity": "sha512-t/Ygsytq+R995EJ5PZlD4Cu56sWa8InXySaViRzw9apusqsOO2bQP+SbYzAhR0pFKoB+43lYy8rWban9JSuXnA==", ---- -> "version": "1.0.1", -> "resolved": "https://registry.npmjs.org/data-view-byte-offset/-/data-view-byte-offset-1.0.1.tgz", -> "integrity": "sha512-BS8PfmtDGnrgYdOonGZQdLZslWIeCGFP9tpan0hi1Co2Zr2NKADsvGYA8XxuG/4UWgJ6Cjtv+YJnB6MM69QGlQ==", -17307c17362 -< "call-bind": "^1.0.6", ---- -> "call-bound": "^1.0.2", -17666a17722,17731 -> "node_modules/dezalgo": { -> "version": "1.0.4", -> "resolved": "https://registry.npmjs.org/dezalgo/-/dezalgo-1.0.4.tgz", -> "integrity": "sha512-rXSP0bf+5n0Qonsb+SVVfNfIsimO4HEtmnIpPHY8Q1UCzKlQrDMfdobr8nJOOsRgWCyMRqeSBQzmWUMq7zvVig==", -> "license": "ISC", -> "dependencies": { -> "asap": "^2.0.0", -> "wrappy": "1" -> } -> }, -17725a17791,17872 -> "node_modules/docker-modem": { -> "version": "5.0.6", -> "resolved": "https://registry.npmjs.org/docker-modem/-/docker-modem-5.0.6.tgz", -> "integrity": "sha512-ens7BiayssQz/uAxGzH8zGXCtiV24rRWXdjNha5V4zSOcxmAZsfGVm/PPFbwQdqEkDnhG+SyR9E3zSHUbOKXBQ==", -> "license": "Apache-2.0", -> "dependencies": { -> "debug": "^4.1.1", -> "readable-stream": "^3.5.0", -> "split-ca": "^1.0.1", -> "ssh2": "^1.15.0" -> }, -> "engines": { -> "node": ">= 8.0" -> } -> }, -> "node_modules/dockerode": { -> "version": "4.0.7", -> "resolved": "https://registry.npmjs.org/dockerode/-/dockerode-4.0.7.tgz", -> "integrity": "sha512-R+rgrSRTRdU5mH14PZTCPZtW/zw3HDWNTS/1ZAQpL/5Upe/ye5K9WQkIysu4wBoiMwKynsz0a8qWuGsHgEvSAA==", -> "license": "Apache-2.0", -> "dependencies": { -> "@balena/dockerignore": "^1.0.2", -> "@grpc/grpc-js": "^1.11.1", -> "@grpc/proto-loader": "^0.7.13", -> "docker-modem": "^5.0.6", -> "protobufjs": "^7.3.2", -> "tar-fs": "~2.1.2", -> "uuid": "^10.0.0" -> }, -> "engines": { -> "node": ">= 8.0" -> } -> }, -> "node_modules/dockerode/node_modules/protobufjs": { -> "version": "7.5.3", -> "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-7.5.3.tgz", -> "integrity": "sha512-sildjKwVqOI2kmFDiXQ6aEB0fjYTafpEvIBs8tOR8qI4spuL9OPROLVu2qZqi/xgCfsHIwVqlaF8JBjWFHnKbw==", -> "hasInstallScript": true, -> "license": "BSD-3-Clause", -> "dependencies": { -> "@protobufjs/aspromise": "^1.1.2", -> "@protobufjs/base64": "^1.1.2", -> "@protobufjs/codegen": "^2.0.4", -> "@protobufjs/eventemitter": "^1.1.0", -> "@protobufjs/fetch": "^1.1.0", -> "@protobufjs/float": "^1.0.2", -> "@protobufjs/inquire": "^1.1.0", -> "@protobufjs/path": "^1.1.2", -> "@protobufjs/pool": "^1.1.0", -> "@protobufjs/utf8": "^1.1.0", -> "@types/node": ">=13.7.0", -> "long": "^5.0.0" -> }, -> "engines": { -> "node": ">=12.0.0" -> } -> }, -> "node_modules/dockerode/node_modules/tar-fs": { -> "version": "2.1.3", -> "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.3.tgz", -> "integrity": "sha512-090nwYJDmlhwFwEW3QQl+vaNnxsO2yVsd45eTKRBzSzu+hlb1w2K9inVq5b0ngXuLVqQ4ApvsUHHnu/zQNkWAg==", -> "license": "MIT", -> "dependencies": { -> "chownr": "^1.1.1", -> "mkdirp-classic": "^0.5.2", -> "pump": "^3.0.0", -> "tar-stream": "^2.1.4" -> } -> }, -> "node_modules/dockerode/node_modules/uuid": { -> "version": "10.0.0", -> "resolved": "https://registry.npmjs.org/uuid/-/uuid-10.0.0.tgz", -> "integrity": "sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ==", -> "funding": [ -> "https://github.com/sponsors/broofa", -> "https://github.com/sponsors/ctavan" -> ], -> "license": "MIT", -> "bin": { -> "uuid": "dist/bin/uuid" -> } -> }, -17926a18074,18087 -> "node_modules/dunder-proto": { -> "version": "1.0.1", -> "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", -> "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", -> "license": "MIT", -> "dependencies": { -> "call-bind-apply-helpers": "^1.0.1", -> "es-errors": "^1.3.0", -> "gopd": "^1.2.0" -> }, -> "engines": { -> "node": ">= 0.4" -> } -> }, -18212,18214c18373,18375 -< "version": "1.23.3", -< "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.23.3.tgz", -< "integrity": "sha512-e+HfNH61Bj1X9/jLc5v1owaLYuHdeHHSQlkhCBiTK8rBvKaULl/beGMxwrMXjpYrv4pz22BlY570vVePA2ho4A==", ---- -> "version": "1.24.0", -> "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.24.0.tgz", -> "integrity": "sha512-WSzPgsdLtTcQwm4CROfS5ju2Wa1QQcVeT37jFjYzdFz1r9ahadC8B8/a4qxJxM+09F18iumCdRmlr96ZYkQvEg==", -18217,18218c18378,18379 -< "array-buffer-byte-length": "^1.0.1", -< "arraybuffer.prototype.slice": "^1.0.3", ---- -> "array-buffer-byte-length": "^1.0.2", -> "arraybuffer.prototype.slice": "^1.0.4", -18220,18224c18381,18386 -< "call-bind": "^1.0.7", -< "data-view-buffer": "^1.0.1", -< "data-view-byte-length": "^1.0.1", -< "data-view-byte-offset": "^1.0.0", -< "es-define-property": "^1.0.0", ---- -> "call-bind": "^1.0.8", -> "call-bound": "^1.0.4", -> "data-view-buffer": "^1.0.2", -> "data-view-byte-length": "^1.0.2", -> "data-view-byte-offset": "^1.0.1", -> "es-define-property": "^1.0.1", -18226,18233c18388,18396 -< "es-object-atoms": "^1.0.0", -< "es-set-tostringtag": "^2.0.3", -< "es-to-primitive": "^1.2.1", -< "function.prototype.name": "^1.1.6", -< "get-intrinsic": "^1.2.4", -< "get-symbol-description": "^1.0.2", -< "globalthis": "^1.0.3", -< "gopd": "^1.0.1", ---- -> "es-object-atoms": "^1.1.1", -> "es-set-tostringtag": "^2.1.0", -> "es-to-primitive": "^1.3.0", -> "function.prototype.name": "^1.1.8", -> "get-intrinsic": "^1.3.0", -> "get-proto": "^1.0.1", -> "get-symbol-description": "^1.1.0", -> "globalthis": "^1.0.4", -> "gopd": "^1.2.0", -18235,18236c18398,18399 -< "has-proto": "^1.0.3", -< "has-symbols": "^1.0.3", ---- -> "has-proto": "^1.2.0", -> "has-symbols": "^1.1.0", -18238,18239c18401,18402 -< "internal-slot": "^1.0.7", -< "is-array-buffer": "^3.0.4", ---- -> "internal-slot": "^1.1.0", -> "is-array-buffer": "^3.0.5", -18241c18404 -< "is-data-view": "^1.0.1", ---- -> "is-data-view": "^1.0.2", -18243,18248c18406,18413 -< "is-regex": "^1.1.4", -< "is-shared-array-buffer": "^1.0.3", -< "is-string": "^1.0.7", -< "is-typed-array": "^1.1.13", -< "is-weakref": "^1.0.2", -< "object-inspect": "^1.13.1", ---- -> "is-regex": "^1.2.1", -> "is-set": "^2.0.3", -> "is-shared-array-buffer": "^1.0.4", -> "is-string": "^1.1.1", -> "is-typed-array": "^1.1.15", -> "is-weakref": "^1.1.1", -> "math-intrinsics": "^1.1.0", -> "object-inspect": "^1.13.4", -18250,18255c18415,18424 -< "object.assign": "^4.1.5", -< "regexp.prototype.flags": "^1.5.2", -< "safe-array-concat": "^1.1.2", -< "safe-regex-test": "^1.0.3", -< "string.prototype.trim": "^1.2.9", -< "string.prototype.trimend": "^1.0.8", ---- -> "object.assign": "^4.1.7", -> "own-keys": "^1.0.1", -> "regexp.prototype.flags": "^1.5.4", -> "safe-array-concat": "^1.1.3", -> "safe-push-apply": "^1.0.0", -> "safe-regex-test": "^1.1.0", -> "set-proto": "^1.0.0", -> "stop-iteration-iterator": "^1.1.0", -> "string.prototype.trim": "^1.2.10", -> "string.prototype.trimend": "^1.0.9", -18257,18262c18426,18431 -< "typed-array-buffer": "^1.0.2", -< "typed-array-byte-length": "^1.0.1", -< "typed-array-byte-offset": "^1.0.2", -< "typed-array-length": "^1.0.6", -< "unbox-primitive": "^1.0.2", -< "which-typed-array": "^1.1.15" ---- -> "typed-array-buffer": "^1.0.3", -> "typed-array-byte-length": "^1.0.3", -> "typed-array-byte-offset": "^1.0.4", -> "typed-array-length": "^1.0.7", -> "unbox-primitive": "^1.1.0", -> "which-typed-array": "^1.1.19" -18272,18277c18441,18444 -< "version": "1.0.0", -< "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.0.tgz", -< "integrity": "sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==", -< "dependencies": { -< "get-intrinsic": "^1.2.4" -< }, ---- -> "version": "1.0.1", -> "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", -> "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", -> "license": "MIT", -18318,18320c18485,18487 -< "version": "1.0.0", -< "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.0.0.tgz", -< "integrity": "sha512-MZ4iQ6JwHOBQjahnjwaC1ZtIBH+2ohjamzAO3oaHcXYup7qxjF2fixyH+Q71voWHeOkI2q/TnJao/KfXYIZWbw==", ---- -> "version": "1.1.1", -> "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", -> "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", -18330,18332c18497,18499 -< "version": "2.0.3", -< "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.0.3.tgz", -< "integrity": "sha512-3T8uNMC3OQTHkFUsFq8r/BwAXLHvU/9O9mE0fBc/MY5iq/8H7ncvO947LmYA6ldWw9Uh8Yhf25zu6n7nML5QWQ==", ---- -> "version": "2.1.0", -> "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz", -> "integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==", -18335c18502,18503 -< "get-intrinsic": "^1.2.4", ---- -> "es-errors": "^1.3.0", -> "get-intrinsic": "^1.2.6", -18337c18505 -< "hasown": "^2.0.1" ---- -> "hasown": "^2.0.2" -18353,18355c18521,18524 -< "version": "1.2.1", -< "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", -< "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", ---- -> "version": "1.3.0", -> "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.3.0.tgz", -> "integrity": "sha512-w+5mJ3GuFL+NjVtJlvydShqE1eN3h3PbI7/5LAsYJP/2qtuMXjfL2LpHSRqo4b4eSF5K/DH1JXKUAHSB2UW50g==", -> "license": "MIT", -18357,18359c18526,18528 -< "is-callable": "^1.1.4", -< "is-date-object": "^1.0.1", -< "is-symbol": "^1.0.2" ---- -> "is-callable": "^1.2.7", -> "is-date-object": "^1.0.5", -> "is-symbol": "^1.0.4" -20463a20633,20638 -> "node_modules/fast-safe-stringify": { -> "version": "2.1.1", -> "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", -> "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==", -> "license": "MIT" -> }, -20933,20935c21108,21111 -< "version": "0.3.3", -< "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.3.tgz", -< "integrity": "sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw==", ---- -> "version": "0.3.5", -> "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.5.tgz", -> "integrity": "sha512-dKx12eRCVIzqCxFGplyFKJMPvLEWgmNtUrpTiJIR5u97zEhRG8ySrtboPHZXx7daLxQVrl643cTzbab2tkQjxg==", -> "license": "MIT", -20937c21113,21119 -< "is-callable": "^1.1.3" ---- -> "is-callable": "^1.2.7" -> }, -> "engines": { -> "node": ">= 0.4" -> }, -> "funding": { -> "url": "https://github.com/sponsors/ljharb" -21100a21283 -> "license": "MIT", -21272,21274c21455,21458 -< "version": "1.1.6", -< "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.6.tgz", -< "integrity": "sha512-Z5kx79swU5P27WEayXM1tBi5Ze/lbIyiNgU3qyXUOf9b2rgXYyF9Dy9Cx+IQv/Lc8WCG6L82zwUPpSS9hGehIg==", ---- -> "version": "1.1.8", -> "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.8.tgz", -> "integrity": "sha512-e5iwyodOHhbMr/yNrc7fDYG4qlbIvI5gajyzPnb5TCwyhjApznQh1BMFou9b30SevY43gCJKXycoCBjMbsuW0Q==", -> "license": "MIT", -21276,21279c21460,21465 -< "call-bind": "^1.0.2", -< "define-properties": "^1.2.0", -< "es-abstract": "^1.22.1", -< "functions-have-names": "^1.2.3" ---- -> "call-bind": "^1.0.8", -> "call-bound": "^1.0.3", -> "define-properties": "^1.2.1", -> "functions-have-names": "^1.2.3", -> "hasown": "^2.0.2", -> "is-callable": "^1.2.7" -21385,21387c21571,21574 -< "version": "1.2.4", -< "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.4.tgz", -< "integrity": "sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==", ---- -> "version": "1.3.0", -> "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", -> "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", -> "license": "MIT", -21388a21576,21577 -> "call-bind-apply-helpers": "^1.0.2", -> "es-define-property": "^1.0.1", -21389a21579 -> "es-object-atoms": "^1.1.1", -21391,21393c21581,21585 -< "has-proto": "^1.0.1", -< "has-symbols": "^1.0.3", -< "hasown": "^2.0.0" ---- -> "get-proto": "^1.0.1", -> "gopd": "^1.2.0", -> "has-symbols": "^1.1.0", -> "hasown": "^2.0.2", -> "math-intrinsics": "^1.1.0" -21420a21613,21625 -> "node_modules/get-proto": { -> "version": "1.0.1", -> "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", -> "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", -> "license": "MIT", -> "dependencies": { -> "dunder-proto": "^1.0.1", -> "es-object-atoms": "^1.0.0" -> }, -> "engines": { -> "node": ">= 0.4" -> } -> }, -21437,21439c21642,21644 -< "version": "1.0.2", -< "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.2.tgz", -< "integrity": "sha512-g0QYk1dZBxGwk+Ngc+ltRH2IBp2f7zBkBMBJZCDerh6EhlhSR6+9irMCuT/09zD6qkarHUSn529sK/yL4S27mg==", ---- -> "version": "1.1.0", -> "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.1.0.tgz", -> "integrity": "sha512-w9UMqWwJxHNOvoNzSJ2oPF5wvYcvP7jUvYzhp67yEhTi17ZDBBC1z9pTdGuzjD+EFIqLSYRweZjqfiPzQ06Ebg==", -21442c21647 -< "call-bind": "^1.0.5", ---- -> "call-bound": "^1.0.3", -21444c21649 -< "get-intrinsic": "^1.2.4" ---- -> "get-intrinsic": "^1.2.6" -21664,21666c21869,21872 -< "version": "1.0.3", -< "resolved": "https://registry.npmjs.org/globalthis/-/globalthis-1.0.3.tgz", -< "integrity": "sha512-sFdI5LyBiNTHjRd7cGPWapiHWMOXKyuBNX/cWJ3NfzrZQVa8GI/8cofCl74AOVqq9W5kNmguTIzJ/1s2gyI9wA==", ---- -> "version": "1.0.4", -> "resolved": "https://registry.npmjs.org/globalthis/-/globalthis-1.0.4.tgz", -> "integrity": "sha512-DpLKbNU4WylpxJykQujfCcwYWiV/Jhm50Goo0wrVILAv5jOr9d+H+UR3PhSCD2rCCEIg0uc+G+muBTwD54JhDQ==", -> "license": "MIT", -21668c21874,21875 -< "define-properties": "^1.1.3" ---- -> "define-properties": "^1.2.1", -> "gopd": "^1.0.1" -22055,22059c22262,22267 -< "version": "1.0.1", -< "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.0.1.tgz", -< "integrity": "sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==", -< "dependencies": { -< "get-intrinsic": "^1.1.3" ---- -> "version": "1.2.0", -> "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", -> "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", -> "license": "MIT", -> "engines": { -> "node": ">= 0.4" -22079a22288 -> "license": "MIT", -22300,22302c22509,22511 -< "version": "1.0.3", -< "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.3.tgz", -< "integrity": "sha512-SJ1amZAJUiZS+PhsVLf5tGydlaVB8EdFpaSO4gmiUKUOxk8qzn5AIy4ZeJUmh22znIdk/uMAUT2pl3FxzVUH+Q==", ---- -> "version": "1.2.0", -> "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.2.0.tgz", -> "integrity": "sha512-KIL7eQPfHQRC8+XluaIw7BHUwwqL19bQn4hzNgdr+1wXoU0KKj6rufu47lhY7KbJR2C6T6+PfyN0Ea7wkSS+qQ==", -22303a22513,22515 -> "dependencies": { -> "dunder-proto": "^1.0.0" -> }, -22312,22314c22524,22527 -< "version": "1.0.3", -< "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", -< "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==", ---- -> "version": "1.1.0", -> "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", -> "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", -> "license": "MIT", -23257,23259c23470,23472 -< "version": "1.0.7", -< "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.7.tgz", -< "integrity": "sha512-NGnrKwXzSms2qUUih/ILZ5JBqNTSa1+ZmP6flaIp6KmSElgE9qdndzS3cqjrDovwFdmwsGsLdeFgB6suw+1e9g==", ---- -> "version": "1.1.0", -> "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.1.0.tgz", -> "integrity": "sha512-4gd7VpWNQNB4UKKCFFVcp1AVv+FMOgs9NKzjHKusc8jTMhd5eL1NqQqOpE0KzMds804/yHlglp3uxgluOqAPLw==", -23263,23264c23476,23477 -< "hasown": "^2.0.0", -< "side-channel": "^1.0.4" ---- -> "hasown": "^2.0.2", -> "side-channel": "^1.1.0" -23402,23404c23615,23617 -< "version": "3.0.4", -< "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.4.tgz", -< "integrity": "sha512-wcjaerHw0ydZwfhiKbXJWLDY8A7yV7KhjQOpb83hGgGfId/aQa4TOvwyzn2PuswW2gPCYEL/nEAiSVpdOj1lXw==", ---- -> "version": "3.0.5", -> "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.5.tgz", -> "integrity": "sha512-DDfANUiiG2wC1qawP66qlTugJeL5HyzMpfr8lLK+jMQirGzNod0B12cFB/9q838Ru27sBwfw78/rdoU7RERz6A==", -23407,23408c23620,23622 -< "call-bind": "^1.0.2", -< "get-intrinsic": "^1.2.1" ---- -> "call-bind": "^1.0.8", -> "call-bound": "^1.0.3", -> "get-intrinsic": "^1.2.6" -23422a23637,23655 -> "node_modules/is-async-function": { -> "version": "2.1.1", -> "resolved": "https://registry.npmjs.org/is-async-function/-/is-async-function-2.1.1.tgz", -> "integrity": "sha512-9dgM/cZBnNvjzaMYHVoxxfPj2QXt22Ev7SuuPrs+xav0ukGB0S6d4ydZdEiM48kLx5kDV+QBPrpVnFyefL8kkQ==", -> "license": "MIT", -> "dependencies": { -> "async-function": "^1.0.0", -> "call-bound": "^1.0.3", -> "get-proto": "^1.0.1", -> "has-tostringtag": "^1.0.2", -> "safe-regex-test": "^1.1.0" -> }, -> "engines": { -> "node": ">= 0.4" -> }, -> "funding": { -> "url": "https://github.com/sponsors/ljharb" -> } -> }, -23424,23426c23657,23660 -< "version": "1.0.4", -< "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.4.tgz", -< "integrity": "sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg==", ---- -> "version": "1.1.0", -> "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.1.0.tgz", -> "integrity": "sha512-n4ZT37wG78iz03xPRKJrHTdZbe3IicyucEtdRsV5yglwc3GyUfbAfpSeD0FJ41NbUNSt5wbhqfp1fS+BgnvDFQ==", -> "license": "MIT", -23428c23662,23665 -< "has-bigints": "^1.0.1" ---- -> "has-bigints": "^1.0.2" -> }, -> "engines": { -> "node": ">= 0.4" -23447,23449c23684,23687 -< "version": "1.1.2", -< "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.1.2.tgz", -< "integrity": "sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA==", ---- -> "version": "1.2.2", -> "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.2.2.tgz", -> "integrity": "sha512-wa56o2/ElJMYqjCjGkXri7it5FbebW5usLw/nPmCMs5DeZ7eziSYZhSmPRn0txqeW4LnAmQQU7FgqLpsEFKM4A==", -> "license": "MIT", -23451,23452c23689,23690 -< "call-bind": "^1.0.2", -< "has-tostringtag": "^1.0.0" ---- -> "call-bound": "^1.0.3", -> "has-tostringtag": "^1.0.2" -23517,23519c23755,23757 -< "version": "1.0.1", -< "resolved": "https://registry.npmjs.org/is-data-view/-/is-data-view-1.0.1.tgz", -< "integrity": "sha512-AHkaJrsUVW6wq6JS8y3JnM/GJF/9cf+k20+iDzlSaJrinEo5+7vRiteOSwBhHRiAyQATN1AmY4hwzxJKPmYf+w==", ---- -> "version": "1.0.2", -> "resolved": "https://registry.npmjs.org/is-data-view/-/is-data-view-1.0.2.tgz", -> "integrity": "sha512-RKtWF8pGmS87i2D6gqQu/l7EYRlVdfzemCJN/P3UOs//x1QE7mfhvzHIApBTRf7axvT6DMGwSwBXYCT0nfB9xw==", -23521a23760,23761 -> "call-bound": "^1.0.2", -> "get-intrinsic": "^1.2.6", -23532,23534c23772,23775 -< "version": "1.0.5", -< "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.5.tgz", -< "integrity": "sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ==", ---- -> "version": "1.1.0", -> "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.1.0.tgz", -> "integrity": "sha512-PwwhEakHVKTdRNVOw+/Gyh0+MzlCl4R6qKvkhuvLtPMggI1WAHt9sOwZxQLSGpUaDnrdyDsomoRgNnCfKNSXXg==", -> "license": "MIT", -23536c23777,23778 -< "has-tostringtag": "^1.0.0" ---- -> "call-bound": "^1.0.2", -> "has-tostringtag": "^1.0.2" -23601a23844,23858 -> "node_modules/is-finalizationregistry": { -> "version": "1.1.1", -> "resolved": "https://registry.npmjs.org/is-finalizationregistry/-/is-finalizationregistry-1.1.1.tgz", -> "integrity": "sha512-1pC6N8qWJbWoPtEjgcL2xyhQOP491EQjeUo3qTKcmV8YSDDJrOepfG8pcC7h/QgnQHYSv0mJ3Z/ZWxmatVrysg==", -> "license": "MIT", -> "dependencies": { -> "call-bound": "^1.0.3" -> }, -> "engines": { -> "node": ">= 0.4" -> }, -> "funding": { -> "url": "https://github.com/sponsors/ljharb" -> } -> }, -23688,23691c23945,23951 -< "version": "2.0.2", -< "resolved": "https://registry.npmjs.org/is-map/-/is-map-2.0.2.tgz", -< "integrity": "sha512-cOZFQQozTha1f4MxLFzlgKYPTyj26picdZTx82hbc/Xf4K/tZOOXSCkMvU4pKioRXGDLJRn0GM7Upe7kR721yg==", -< "dev": true, ---- -> "version": "2.0.3", -> "resolved": "https://registry.npmjs.org/is-map/-/is-map-2.0.3.tgz", -> "integrity": "sha512-1Qed0/Hr2m+YqxnM09CjA2d/i6YZNfF6R2oRAOj36eUdS6qIV/huPJNSEpKbupewFs+ZsJlxsjjPbc0/afW6Lw==", -> "license": "MIT", -> "engines": { -> "node": ">= 0.4" -> }, -23753,23755c24013,24016 -< "version": "1.0.7", -< "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.7.tgz", -< "integrity": "sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ==", ---- -> "version": "1.1.1", -> "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.1.1.tgz", -> "integrity": "sha512-lZhclumE1G6VYD8VHe35wFaIif+CTy5SJIi5+3y4psDgWu4wPDoBhF8NxUOinEc7pHgiTsT6MaBb92rKhhD+Xw==", -> "license": "MIT", -23757c24018,24019 -< "has-tostringtag": "^1.0.0" ---- -> "call-bound": "^1.0.3", -> "has-tostringtag": "^1.0.2" -23817,23819c24079,24082 -< "version": "1.1.4", -< "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", -< "integrity": "sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==", ---- -> "version": "1.2.1", -> "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.2.1.tgz", -> "integrity": "sha512-MjYsKHO5O7mCsmRGxWcLWheFqN9DJ/2TmngvjKXihe6efViPqc274+Fx/4fYj/r03+ESvBdTXK0V6tA3rgez1g==", -> "license": "MIT", -23821,23822c24084,24087 -< "call-bind": "^1.0.2", -< "has-tostringtag": "^1.0.0" ---- -> "call-bound": "^1.0.2", -> "gopd": "^1.2.0", -> "has-tostringtag": "^1.0.2", -> "hasown": "^2.0.2" -23832,23835c24097,24103 -< "version": "2.0.2", -< "resolved": "https://registry.npmjs.org/is-set/-/is-set-2.0.2.tgz", -< "integrity": "sha512-+2cnTEZeY5z/iXGbLhPrOAaK/Mau5k5eXq9j14CpRTftq0pAJu2MwVRSZhyZWBzx3o6X795Lz6Bpb6R0GKf37g==", -< "dev": true, ---- -> "version": "2.0.3", -> "resolved": "https://registry.npmjs.org/is-set/-/is-set-2.0.3.tgz", -> "integrity": "sha512-iPAjerrse27/ygGLxw+EBR9agv9Y6uLeYVJMu+QNCoouJ1/1ri0mGrcWpfCqFZuzzx3WjtwxG098X+n4OuRkPg==", -> "license": "MIT", -> "engines": { -> "node": ">= 0.4" -> }, -23848,23850c24116,24118 -< "version": "1.0.3", -< "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.3.tgz", -< "integrity": "sha512-nA2hv5XIhLR3uVzDDfCIknerhx8XUKnstuOERPNNIinXG7v9u+ohXF67vxm4TPTEPU6lm61ZkwP3c9PCB97rhg==", ---- -> "version": "1.0.4", -> "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.4.tgz", -> "integrity": "sha512-ISWac8drv4ZGfwKl5slpHG9OwPNty4jOWPRIhBpxOoD+hqITiwuipOQ2bNthAzwA3B4fIjO4Nln74N0S9byq8A==", -23853c24121 -< "call-bind": "^1.0.7" ---- -> "call-bound": "^1.0.3" -23874,23876c24142,24145 -< "version": "1.0.7", -< "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.7.tgz", -< "integrity": "sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==", ---- -> "version": "1.1.1", -> "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.1.1.tgz", -> "integrity": "sha512-BtEeSsoaQjlSPBemMQIrY1MY0uM6vnS1g5fmufYOtnxLGUZM2178PKbhsk7Ffv58IX+ZtcvoGwccYsh0PglkAA==", -> "license": "MIT", -23878c24147,24148 -< "has-tostringtag": "^1.0.0" ---- -> "call-bound": "^1.0.3", -> "has-tostringtag": "^1.0.2" -23888,23890c24158,24161 -< "version": "1.0.4", -< "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.4.tgz", -< "integrity": "sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg==", ---- -> "version": "1.1.1", -> "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.1.1.tgz", -> "integrity": "sha512-9gGx6GTtCQM73BgmHQXfDmLtfjjTUDSyoxTCbp5WtoixAhfgsDirWIcVQ/IHpvI5Vgd5i/J5F7B9cN/WlVbC/w==", -> "license": "MIT", -23892c24163,24165 -< "has-symbols": "^1.0.2" ---- -> "call-bound": "^1.0.2", -> "has-symbols": "^1.1.0", -> "safe-regex-test": "^1.1.0" -23902,23904c24175,24177 -< "version": "1.1.13", -< "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.13.tgz", -< "integrity": "sha512-uZ25/bUAlUY5fR4OKT4rZQEBrzQWYV9ZJYGGsUmEJ6thodVJ1HX64ePQ6Z0qPWP+m+Uq6e9UugrE38jeYsDSMw==", ---- -> "version": "1.1.15", -> "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.15.tgz", -> "integrity": "sha512-p3EcsicXjit7SaskXHs1hA91QxgTw46Fv6EFKKGS5DRFLD8yKnohjF3hxoju94b/OcMZoQukzpPpBE9uLVKzgQ==", -23907c24180 -< "which-typed-array": "^1.1.14" ---- -> "which-typed-array": "^1.1.16" -23943,23946c24216,24222 -< "version": "2.0.1", -< "resolved": "https://registry.npmjs.org/is-weakmap/-/is-weakmap-2.0.1.tgz", -< "integrity": "sha512-NSBR4kH5oVj1Uwvv970ruUkCV7O1mzgVFO4/rev2cLRda9Tm9HrL70ZPut4rOHgY0FNrUu9BCbXA2sdQ+x0chA==", -< "dev": true, ---- -> "version": "2.0.2", -> "resolved": "https://registry.npmjs.org/is-weakmap/-/is-weakmap-2.0.2.tgz", -> "integrity": "sha512-K5pXYOm9wqY1RgjpL3YTkF39tni1XajUIkawTLUo9EZEVUFga5gSQJF8nNS7ZwJQ02y+1YCNYcMh+HIf1ZqE+w==", -> "license": "MIT", -> "engines": { -> "node": ">= 0.4" -> }, -23952,23954c24228,24231 -< "version": "1.0.2", -< "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.0.2.tgz", -< "integrity": "sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ==", ---- -> "version": "1.1.1", -> "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.1.1.tgz", -> "integrity": "sha512-6i9mGWSlqzNMEqpCp93KwRS1uUOodk2OJ6b+sq7ZPDSy2WuI5NFIxp/254TytR8ftefexkWn5xNiHUNpPOfSew==", -> "license": "MIT", -23956c24233,24236 -< "call-bind": "^1.0.2" ---- -> "call-bound": "^1.0.3" -> }, -> "engines": { -> "node": ">= 0.4" -23963,23966c24243,24246 -< "version": "2.0.2", -< "resolved": "https://registry.npmjs.org/is-weakset/-/is-weakset-2.0.2.tgz", -< "integrity": "sha512-t2yVvttHkQktwnNNmBQ98AhENLdPUTDTE21uPqAQ0ARwQfGeQKRVS0NNurH7bTf7RrvcVn1OOge45CnBeHCSmg==", -< "dev": true, ---- -> "version": "2.0.4", -> "resolved": "https://registry.npmjs.org/is-weakset/-/is-weakset-2.0.4.tgz", -> "integrity": "sha512-mfcwb6IzQyOKTs84CQMrOwW4gQcaTOAWJ0zzJCl2WSPDrWk/OzDaImWFH3djXhb24g4eudZfLRozAvPGw4d9hQ==", -> "license": "MIT", -23968,23969c24248,24252 -< "call-bind": "^1.0.2", -< "get-intrinsic": "^1.1.1" ---- -> "call-bound": "^1.0.3", -> "get-intrinsic": "^1.2.6" -> }, -> "engines": { -> "node": ">= 0.4" -24543a24827 -> "license": "MIT", -24564a24849 -> "license": "MIT", -24568a24854,24862 -> "node_modules/json-refs/node_modules/commander": { -> "version": "4.1.1", -> "resolved": "https://registry.npmjs.org/commander/-/commander-4.1.1.tgz", -> "integrity": "sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==", -> "license": "MIT", -> "engines": { -> "node": ">= 6" -> } -> }, -24572a24867 -> "license": "MIT", -24584a24880 -> "license": "MIT", -25175,25187d25470 -< "node_modules/less/node_modules/mime": { -< "version": "1.6.0", -< "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", -< "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", -< "dev": true, -< "optional": true, -< "bin": { -< "mime": "cli.js" -< }, -< "engines": { -< "node": ">=4" -< } -< }, -25335c25618,25619 -< "integrity": "sha1-6I7suS4ruEyQZWEv2VigcZzUf5Q=" ---- -> "integrity": "sha512-tC2aLC7bbkDXKNrjDu9OLiVx9pFIvjinID2eD9PzNdAQGZScWUd/h8faqOw5d6oLsOvFRCRbz1ASoB+deyMVUw==", -> "license": "MIT" -25340c25624,25625 -< "integrity": "sha1-6UC5690nwyfgqNqxtVkWxTQelXU=", ---- -> "integrity": "sha512-VGHm6DH+1UiuafQdE/DNMqxOcSyhRu0xO9+jPDq7xITRn5YOorGrHVQmavMVXCYmTm80YRTZZCn/jTW7MokwLg==", -> "license": "MIT", -25351c25636,25637 -< "integrity": "sha1-MPgj5X4X43NdODvWK2Czh1Q7QYY=", ---- -> "integrity": "sha512-+zJVXs0VxC/Au+/7foiKzw8UaWvfSfPh20XhqK/6HFQiUeclL5fz05zY7G9yDAFItAKKZwB4cgpzGvxiwuG1wQ==", -> "license": "MIT", -25366c25652,25653 -< "integrity": "sha1-+Ob1tXip405UEXm1a47uv0oofgg=", ---- -> "integrity": "sha512-8JJ3FnMPm54t3BwPLk8q8mPyQKQXm/rt9df+awr4NGtyJrtcCXM3Of1I86S6jVy1b4yAyFBb8wbKPEauuqzRmQ==", -> "license": "MIT", -25376c25663,25664 -< "integrity": "sha1-fQsmdknLKeehOdAQO3wR+uhOSFE=", ---- -> "integrity": "sha512-SLczhg860fGW7AKlYcuOFstDtJuQhaANlJ4Y/jrOoRxhmVtK41vbJDH3OefVRSRkSCQo4HI82QVkAVsoGa5gSw==", -> "license": "MIT", -25387c25675,25676 -< "integrity": "sha1-TTHy595+E0+/KAN2K4FQsyUZZm8=", ---- -> "integrity": "sha512-x2ja1fa/qmzbizuXgVM4QAP9svtMbdxjG8Anl9bCeDAwLOVQ1vLrA0hLb/NkpbGi9evjtkl0aWLTEoOlUdBPQA==", -> "license": "MIT", -25398c25687,25688 -< "integrity": "sha1-UdaVeXPaTtVW43KQ2MGhjFPeFgc=", ---- -> "integrity": "sha512-5TCfLt1haQpsa7bgLYRKNNE4yqhO4ZxIayN1btQmazMchO6Q8JYFRMqbJ3W+uNmMm4R0Jw7KGkZX5YfDDnywuw==", -> "license": "MIT", -25409c25699,25700 -< "integrity": "sha1-+vH3+BD6mFolHCGHQESBCUg55e4=", ---- -> "integrity": "sha512-iIrScwY3atGvLVbQL/+CNUznaPwBJg78S/JO4cTUFXRkRsZgEBhscB27cVoT4tsIOUyFu/5M/0umfHNGJ6wYwg==", -> "license": "MIT", -25417c25708,25709 -< "integrity": "sha1-PqZAS3hKe+g2x7V1gOHN95sUgyw=" ---- -> "integrity": "sha512-BOlKGKNHhCHswGOWtmVb5zBygyxN7EmTuzVOSQI6QSoGhG+kvv71gICFS1TBpnqvT1n53txK8CDK3u5D2/GZxQ==", -> "license": "MIT" -25422c25714,25715 -< "integrity": "sha1-nUgvRjuOZq++WcLBTtsRcGAXIzQ=" ---- -> "integrity": "sha512-xKDem1BxoIfcCtaJHotjtyfdIvZO9qrF+mv3G1+ngQmaI3MJt3Qm46i9HLk/CbzABbavUrr1/EomQT8KxtsrYA==", -> "license": "MIT" -25427c25720,25721 -< "integrity": "sha1-fAt/admKH3ZSn4kLDNsbTf7BHBE=" ---- -> "integrity": "sha512-XpqGh1e7hhkOzftBfWE7zt+Yn9mVHFkDhicVttvKLsoCMLVVL+xTQjfjB4X4vtznauxv0QZ5ZAeqjvat0dh62Q==", -> "license": "MIT" -25432c25726,25727 -< "integrity": "sha1-phOWMNdtFTawfdyAliiJsIL2pkE=", ---- -> "integrity": "sha512-wwCwWX8PK/mYR5VZjcU5JFl6py/qrfLGMxzpKOfSqgA1PaZ6Z625CZLCxH1KsqyxSkOFmNm+mEYjeDpXlM4hrg==", -> "license": "MIT", -25441c25736,25737 -< "integrity": "sha1-98IAzRuS7yNrOZ7s9zxkjReqlNI=", ---- -> "integrity": "sha512-Vx0XKzpg2DFbQw4wrp1xSWd2sfl3W/BG6bucSRZmftS1AzbWRemCmBQDxyQTNhlLNec428PXkuuja+VNBZgu2A==", -> "license": "MIT", -25450c25746,25747 -< "integrity": "sha1-bpzJZm/wgfC1psl4uD4kLmlJ0gM=", ---- -> "integrity": "sha512-lBrglYxLD/6KAJ8IEa5Lg+YHgNAL7FyKqXg4XOUI+Du/vtniLs1ZqS+yHNKPkK54waAgkdUnDOYaWf+rv4B+AA==", -> "license": "MIT", -25458c25755,25756 -< "integrity": "sha1-dFz0GlNZexj2iImFREBe+isG2Q8=" ---- -> "integrity": "sha512-+odPJa4PE2UgYnQgJgkLs0UD03QU78R2ivhrFnG9GdtYOZdE6ObxOj7KiUEUlqOOgatFT+ZqSypFjDSduTigKg==", -> "license": "MIT" -25463c25761,25762 -< "integrity": "sha1-hMOVlt1xGBqXsGUpE6fJZ15Jsao=", ---- -> "integrity": "sha512-AqQ4AJz5buSx9ELXWt5dONwJyVPd4NTADMKhoVYWCugjoVf172/LpvVhwmSJn4g8/Dc0S8hxTe8rt5Dob3X9KQ==", -> "license": "MIT", -25473c25772,25773 -< "integrity": "sha1-XRn6AFyMTSNvr0dCx7eh/Kvikmc=", ---- -> "integrity": "sha512-hn2VWYZ+N9aYncRad4jORvlGgpFrn+axnPIWRvFxjk6CWcZH5b5alI8EymYsHITI23Z9wrW/+ORq+azrVFpOfw==", -> "license": "MIT", -25487c25787,25788 -< "integrity": "sha1-8pIDtAsS/uCkXTYxZIJZvrq8eGg=", ---- -> "integrity": "sha512-zj5vReFLkR+lJOBKP1wyteZ13zut/KSmXtdCBgxcy/m4UTitcBxpeVZT7gwk8BQrztPI5dIgO4bhBppXV4rpTQ==", -> "license": "MIT", -25517c25818,25819 -< "integrity": "sha1-/j/Do0yGyUyrb5UiVgKCdB4BYwk=", ---- -> "integrity": "sha512-AvOobAkE7qBtIiHU5QHQIfveWH5Usr9pIcFIzBv7u4S6bvb3FWpFrh9ltqBY7UeL5lw6e8d+SggiUXQVyh+FpA==", -> "license": "MIT", -25526c25828,25829 -< "integrity": "sha1-eLQer+FAX6lmRZ6kGT/VAtCEUks=", ---- -> "integrity": "sha512-VC+CKm/zSs5t3i/MHv71HZoQphuqOvez1xhjWBwHU5zAbsCYrqwHr+MyQyMk14HzA3hSRNA5lCqDMSw5G2Qscg==", -> "license": "MIT", -25547c25850,25851 -< "integrity": "sha1-ZpTP+mX++TH3wxzobHRZfPVg9PE=" ---- -> "integrity": "sha512-VRYX+8XipeLjorag5bz3YBBRJ+5kj8hVBzfnaHgXPZAVTYowBdY5l0M5ZnOmlAMCOXBFabQtm7f5VqjMKEji0w==", -> "license": "MIT" -25562c25866,25867 -< "integrity": "sha1-tSoybB9i9tfac6MdVAHfbvRPD6E=", ---- -> "integrity": "sha512-yRDd0z+APziDqbk0MqR6Qfwj/Qn3jLxFJbI9U8MuvdTnqIXdZ5YXyGLnwuzCpZmjr26F1GNOjKLMMZ10i/wy6A==", -> "license": "MIT", -25575c25880,25882 -< "integrity": "sha1-QVxEePK8wwEgwizhDtMib30+GOA=" ---- -> "integrity": "sha512-pDo3lu8Jhfjqls6GkMgpahsF9kCyayhgykjyLMNFTKWrpVdAQtYyB4muAMWozBB4ig/dtWAmsMxLEI8wuz+DYQ==", -> "deprecated": "This package is deprecated. Use require('node:util').isDeepStrictEqual instead.", -> "license": "MIT" -25580c25887,25888 -< "integrity": "sha1-LP1XXHPkmKtX4xm3f6Aq3vE6lNE=" ---- -> "integrity": "sha512-6XcAB3izeQxPOQQNAJbbdjXbvWEt2Pn9ezPrjr4CwoLwmqsLVbsiEXD19cmmt4mbzOCOCdHzOQiUivUOJLra7w==", -> "license": "MIT" -25595c25903,25904 -< "integrity": "sha1-Wi5H/mmVPx7mMafrof5k0tBlWPU=", ---- -> "integrity": "sha512-sTebg2a1PoicYEZXD5PBdQcTlIJ6hUslrlWr7iV0O7n+i4596s2NQ9I5CaZ5FbXSfya/9WQsrYLANUJv9paYVA==", -> "license": "MIT", -25613c25922,25923 -< "integrity": "sha1-SN6kbfj/djKxDXBrissmWR4rNyc=", ---- -> "integrity": "sha512-ZpJhwvUXHSNL5wYd1RM6CUa2ZuqorG9ngoJ9Ix5Cce+uX7I5O/E06FCJdhSZ33b5dVyeQDnIlWH7B2s5uByZ7g==", -> "license": "MIT", -25634c25944,25945 -< "integrity": "sha1-T7VPgWZS5a4Q6PcvcXo4jHMmU4o=" ---- -> "integrity": "sha512-uNcV98/blRhInPUGQEnj9ekXXfG+q+rfoNSFZgl/eBfog9yBDW9gfUv2AHX/rAF7zZRlzWhbslGhbGQFZlCkZA==", -> "license": "MIT" -25650c25961,25962 -< "integrity": "sha1-Mg4LZwMWc8KNeiu12eAzGkUkBRU=", ---- -> "integrity": "sha512-6SwqWwGFHhTXEiqB/yQgu8FYd//tm786d49y7kizHVCJH7zdzs191UQn3ES3tkkDbUddNRfkCRYqJFHtbLnbCw==", -> "license": "MIT", -25997a26310,26318 -> "node_modules/math-intrinsics": { -> "version": "1.1.0", -> "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", -> "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", -> "license": "MIT", -> "engines": { -> "node": ">= 0.4" -> } -> }, -26577a26899,26910 -> "node_modules/mime": { -> "version": "1.6.0", -> "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", -> "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", -> "license": "MIT", -> "bin": { -> "mime": "cli.js" -> }, -> "engines": { -> "node": ">=4" -> } -> }, -26579,26581c26912,26915 -< "version": "1.51.0", -< "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.51.0.tgz", -< "integrity": "sha512-5y8A56jg7XVQx2mbv1lu49NR4dokRnhZYTtL+KGfaa27uq4pSTXkwQkFJl4pkRMyNFz/EtYDSkiiEHx3F7UN6g==", ---- -> "version": "1.52.0", -> "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", -> "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", -> "license": "MIT", -26597,26599c26931,26934 -< "version": "2.1.34", -< "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.34.tgz", -< "integrity": "sha512-6cP692WwGIs9XXdOO4++N+7qjqv0rqxxVvJ3VHPh/Sc9mVZcQP+ZGhkKiTvWMQRr2tbHkJP/Yn7Y0npb3ZBs4A==", ---- -> "version": "2.1.35", -> "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", -> "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", -> "license": "MIT", -26601c26936 -< "mime-db": "1.51.0" ---- -> "mime-db": "1.52.0" -26792c27127,27128 -< "integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==" ---- -> "integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==", -> "license": "MIT" -27248,27250c27584,27586 -< "version": "2.0.0", -< "resolved": "https://registry.npmjs.org/multer/-/multer-2.0.0.tgz", -< "integrity": "sha512-bS8rPZurbAuHGAnApbM9d4h1wSoYqrOqkE+6a64KLMK9yWU7gJXBDDVklKQ3TPi9DRb85cRs6yXaC0+cjxRtRg==", ---- -> "version": "2.0.1", -> "resolved": "https://registry.npmjs.org/multer/-/multer-2.0.1.tgz", -> "integrity": "sha512-Ug8bXeTIUlxurg8xLTEskKShvcKDZALo1THEX5E41pYCD2sCVub5/kIRIGqWNoqV6szyLyQKV6mD4QUrWE5GCQ==", -27254,27256c27590,27592 -< "busboy": "^1.0.0", -< "concat-stream": "^1.5.2", -< "mkdirp": "^0.5.4", ---- -> "busboy": "^1.6.0", -> "concat-stream": "^2.0.0", -> "mkdirp": "^0.5.6", -27258,27259c27594,27595 -< "type-is": "^1.6.4", -< "xtend": "^4.0.0" ---- -> "type-is": "^1.6.18", -> "xtend": "^4.0.2" -27384c27720,27721 -< "integrity": "sha1-IKMYwwy0X3H+et+/eyHJnBRy7xE=" ---- -> "integrity": "sha512-zkVhZUA3y8mbz652WrL5x0fB0ehrBkulWT3TomAQ9iDtyXZvzKeEA6GPxAItBYeNYl5yngKRX612qHOhvMkDeg==", -> "license": "MIT" -27842,27844c28179,28185 -< "version": "1.13.1", -< "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.1.tgz", -< "integrity": "sha512-5qoj1RUiKOMsCCNLV1CBiPYE10sziTsnmNxkAI/rZhiD63CF7IqdFGC/XzjWjpSgLf0LxXX3bDFIh0E18f6UhQ==", ---- -> "version": "1.13.4", -> "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz", -> "integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==", -> "license": "MIT", -> "engines": { -> "node": ">= 0.4" -> }, -27886,27888c28227,28229 -< "version": "4.1.5", -< "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.5.tgz", -< "integrity": "sha512-byy+U7gp+FVwmyzKPYhW2h5l3crpmGsxl7X2s8y43IgxvG4g3QZ6CffDtsNQy1WsmZpQbO+ybo0AlW7TY6DcBQ==", ---- -> "version": "4.1.7", -> "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.7.tgz", -> "integrity": "sha512-nK28WOo+QIjBkDduTINE4JkF/UJJKyf2EJxvJKfblDpyg0Q+pkOHNTL0Qwy6NP6FhE/EnzV73BxxqcJaXY9anw==", -27891c28232,28233 -< "call-bind": "^1.0.5", ---- -> "call-bind": "^1.0.8", -> "call-bound": "^1.0.3", -27893c28235,28236 -< "has-symbols": "^1.0.3", ---- -> "es-object-atoms": "^1.0.0", -> "has-symbols": "^1.1.0", -28129a28473,28489 -> "node_modules/own-keys": { -> "version": "1.0.1", -> "resolved": "https://registry.npmjs.org/own-keys/-/own-keys-1.0.1.tgz", -> "integrity": "sha512-qFOyK5PjiWZd+QQIh+1jhdb9LpxTF0qs7Pm8o5QHYZ0M3vKqSqzsZaEB6oWlxZ+q2sJBMI/Ktgd2N5ZwQoRHfg==", -> "license": "MIT", -> "dependencies": { -> "get-intrinsic": "^1.2.6", -> "object-keys": "^1.1.1", -> "safe-push-apply": "^1.0.0" -> }, -> "engines": { -> "node": ">= 0.4" -> }, -> "funding": { -> "url": "https://github.com/sponsors/ljharb" -> } -> }, -28684,28686c29044,29047 -< "version": "1.0.10", -< "resolved": "https://registry.npmjs.org/path-loader/-/path-loader-1.0.10.tgz", -< "integrity": "sha512-CMP0v6S6z8PHeJ6NFVyVJm6WyJjIwFvyz2b0n2/4bKdS/0uZa/9sKUlYZzubrn3zuDRU0zIuEDX9DZYQ2ZI8TA==", ---- -> "version": "1.0.12", -> "resolved": "https://registry.npmjs.org/path-loader/-/path-loader-1.0.12.tgz", -> "integrity": "sha512-n7oDG8B+k/p818uweWrOixY9/Dsr89o2TkCm6tOTex3fpdo2+BFDgR+KpB37mGKBRsBAlR8CIJMFN0OEy/7hIQ==", -> "license": "MIT", -28689c29050,29129 -< "superagent": "^3.8.3" ---- -> "superagent": "^7.1.6" -> } -> }, -> "node_modules/path-loader/node_modules/debug": { -> "version": "4.4.1", -> "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz", -> "integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==", -> "license": "MIT", -> "dependencies": { -> "ms": "^2.1.3" -> }, -> "engines": { -> "node": ">=6.0" -> }, -> "peerDependenciesMeta": { -> "supports-color": { -> "optional": true -> } -> } -> }, -> "node_modules/path-loader/node_modules/formidable": { -> "version": "2.1.5", -> "resolved": "https://registry.npmjs.org/formidable/-/formidable-2.1.5.tgz", -> "integrity": "sha512-Oz5Hwvwak/DCaXVVUtPn4oLMLLy1CdclLKO1LFgU7XzDpVMUU5UjlSLpGMocyQNNk8F6IJW9M/YdooSn2MRI+Q==", -> "license": "MIT", -> "dependencies": { -> "@paralleldrive/cuid2": "^2.2.2", -> "dezalgo": "^1.0.4", -> "once": "^1.4.0", -> "qs": "^6.11.0" -> }, -> "funding": { -> "url": "https://ko-fi.com/tunnckoCore/commissions" -> } -> }, -> "node_modules/path-loader/node_modules/mime": { -> "version": "2.6.0", -> "resolved": "https://registry.npmjs.org/mime/-/mime-2.6.0.tgz", -> "integrity": "sha512-USPkMeET31rOMiarsBNIHZKLGgvKc/LrjofAnBlOttf5ajRvqiRA8QsenbcooctK6d6Ts6aqZXBA+XbkKthiQg==", -> "license": "MIT", -> "bin": { -> "mime": "cli.js" -> }, -> "engines": { -> "node": ">=4.0.0" -> } -> }, -> "node_modules/path-loader/node_modules/semver": { -> "version": "7.7.2", -> "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", -> "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", -> "license": "ISC", -> "bin": { -> "semver": "bin/semver.js" -> }, -> "engines": { -> "node": ">=10" -> } -> }, -> "node_modules/path-loader/node_modules/superagent": { -> "version": "7.1.6", -> "resolved": "https://registry.npmjs.org/superagent/-/superagent-7.1.6.tgz", -> "integrity": "sha512-gZkVCQR1gy/oUXr+kxJMLDjla434KmSOKbx5iGD30Ql+AkJQ/YlPKECJy2nhqOsHLjGHzoDTXNSjhnvWhzKk7g==", -> "deprecated": "Please upgrade to v9.0.0+ as we have fixed a public vulnerability with formidable dependency. Note that v9.0.0+ requires Node.js v14.18.0+. See https://github.com/ladjs/superagent/pull/1800 for insight. This project is supported and maintained by the team at Forward Email @ https://forwardemail.net", -> "license": "MIT", -> "dependencies": { -> "component-emitter": "^1.3.0", -> "cookiejar": "^2.1.3", -> "debug": "^4.3.4", -> "fast-safe-stringify": "^2.1.1", -> "form-data": "^4.0.0", -> "formidable": "^2.0.1", -> "methods": "^1.1.2", -> "mime": "2.6.0", -> "qs": "^6.10.3", -> "readable-stream": "^3.6.0", -> "semver": "^7.3.7" -> }, -> "engines": { -> "node": ">=6.4.0 <13 || >=14" -31146a31587,31608 -> "node_modules/reflect.getprototypeof": { -> "version": "1.0.10", -> "resolved": "https://registry.npmjs.org/reflect.getprototypeof/-/reflect.getprototypeof-1.0.10.tgz", -> "integrity": "sha512-00o4I+DVrefhv+nX0ulyi3biSHCPDe+yLv5o/p6d/UVlirijB8E16FtfwSAi4g3tcqrQ4lRAqQSoFEZJehYEcw==", -> "license": "MIT", -> "dependencies": { -> "call-bind": "^1.0.8", -> "define-properties": "^1.2.1", -> "es-abstract": "^1.23.9", -> "es-errors": "^1.3.0", -> "es-object-atoms": "^1.0.0", -> "get-intrinsic": "^1.2.7", -> "get-proto": "^1.0.1", -> "which-builtin-type": "^1.2.1" -> }, -> "engines": { -> "node": ">= 0.4" -> }, -> "funding": { -> "url": "https://github.com/sponsors/ljharb" -> } -> }, -31211,31213c31673,31675 -< "version": "1.5.2", -< "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.2.tgz", -< "integrity": "sha512-NcDiDkTLuPR+++OCKB0nWafEmhg/Da8aUPLPMQbK+bxKKCm1/S5he+AqYa4PlMCVBalb4/yxIRub6qkEx5yJbw==", ---- -> "version": "1.5.4", -> "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.4.tgz", -> "integrity": "sha512-dYqgNSZbDwkaJ2ceRd9ojCGjBq+mOm9LmtXnAnEGyHhN/5R7iDW2TRw3h+o/jCFxus3P2LfWIIiwowAjANm7IA==", -31216c31678 -< "call-bind": "^1.0.6", ---- -> "call-bind": "^1.0.8", -31219c31681,31683 -< "set-function-name": "^2.0.1" ---- -> "get-proto": "^1.0.1", -> "gopd": "^1.2.0", -> "set-function-name": "^2.0.2" -31679,31681c32143,32145 -< "version": "1.1.2", -< "resolved": "https://registry.npmjs.org/safe-array-concat/-/safe-array-concat-1.1.2.tgz", -< "integrity": "sha512-vj6RsCsWBCf19jIeHEfkRMw8DPiBb+DMXklQ/1SGDHOMlHdPUkZXFQ2YdplS23zESTijAcurb1aSgJA3AgMu1Q==", ---- -> "version": "1.1.3", -> "resolved": "https://registry.npmjs.org/safe-array-concat/-/safe-array-concat-1.1.3.tgz", -> "integrity": "sha512-AURm5f0jYEOydBj7VQlVvDrjeFgthDdEF5H1dP+6mNpoXOMo1quQqJ4wvJDyRZ9+pO3kGWoOdmV08cSv2aJV6Q==", -31684,31686c32148,32151 -< "call-bind": "^1.0.7", -< "get-intrinsic": "^1.2.4", -< "has-symbols": "^1.0.3", ---- -> "call-bind": "^1.0.8", -> "call-bound": "^1.0.2", -> "get-intrinsic": "^1.2.6", -> "has-symbols": "^1.1.0", -31706a32172,32187 -> "node_modules/safe-push-apply": { -> "version": "1.0.0", -> "resolved": "https://registry.npmjs.org/safe-push-apply/-/safe-push-apply-1.0.0.tgz", -> "integrity": "sha512-iKE9w/Z7xCzUMIZqdBsp6pEQvwuEebH4vdpjcDWnyzaI6yl6O9FHvVpmGelvEHNsoY6wGblkxR6Zty/h00WiSA==", -> "license": "MIT", -> "dependencies": { -> "es-errors": "^1.3.0", -> "isarray": "^2.0.5" -> }, -> "engines": { -> "node": ">= 0.4" -> }, -> "funding": { -> "url": "https://github.com/sponsors/ljharb" -> } -> }, -31717,31719c32198,32200 -< "version": "1.0.3", -< "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.0.3.tgz", -< "integrity": "sha512-CdASjNJPvRa7roO6Ra/gLYBTzYzzPyyBXxIMdGW3USQLyjWEls2RgW5UBTXaQVp+OrpeCK3bLem8smtmheoRuw==", ---- -> "version": "1.1.0", -> "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.1.0.tgz", -> "integrity": "sha512-x/+Cz4YrimQxQccJf5mKEbIa1NzeCRNI5Ecl/ekmlYaampdNLPalVyIcCZNNH3MvmqBugV5TMYZXv0ljslUlaw==", -31722c32203 -< "call-bind": "^1.0.6", ---- -> "call-bound": "^1.0.2", -31724c32205 -< "is-regex": "^1.1.4" ---- -> "is-regex": "^1.2.1" -32123,32133d32603 -< "node_modules/send/node_modules/mime": { -< "version": "1.6.0", -< "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", -< "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", -< "bin": { -< "mime": "cli.js" -< }, -< "engines": { -< "node": ">=4" -< } -< }, -32288,32290c32758,32761 -< "version": "2.0.1", -< "resolved": "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.1.tgz", -< "integrity": "sha512-tMNCiqYVkXIZgc2Hnoy2IvC/f8ezc5koaRFkCjrpWzGpCd3qbZXPzVy9MAZzK1ch/X0jvSkojys3oqJN0qCmdA==", ---- -> "version": "2.0.2", -> "resolved": "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.2.tgz", -> "integrity": "sha512-7PGFlmtwsEADb0WYyvCMa1t+yke6daIG4Wirafur5kcf+MhUnPms1UeR0CKQdTZD81yESwMHbtn+TR+dMviakQ==", -> "license": "MIT", -32292c32763,32764 -< "define-data-property": "^1.0.1", ---- -> "define-data-property": "^1.1.4", -> "es-errors": "^1.3.0", -32294c32766,32780 -< "has-property-descriptors": "^1.0.0" ---- -> "has-property-descriptors": "^1.0.2" -> }, -> "engines": { -> "node": ">= 0.4" -> } -> }, -> "node_modules/set-proto": { -> "version": "1.0.0", -> "resolved": "https://registry.npmjs.org/set-proto/-/set-proto-1.0.0.tgz", -> "integrity": "sha512-RJRdvCo6IAnPdsvP/7m6bsQqNnn1FCBX5ZNtFL98MmFF/4xAIJTIg1YbHW5DC2W5SKZanrC6i4HsJqlajw/dZw==", -> "license": "MIT", -> "dependencies": { -> "dunder-proto": "^1.0.1", -> "es-errors": "^1.3.0", -> "es-object-atoms": "^1.0.0" -32391,32393c32877,32880 -< "version": "1.0.6", -< "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.6.tgz", -< "integrity": "sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==", ---- -> "version": "1.1.0", -> "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz", -> "integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==", -> "license": "MIT", -32395d32881 -< "call-bind": "^1.0.7", -32397,32398c32883,32939 -< "get-intrinsic": "^1.2.4", -< "object-inspect": "^1.13.1" ---- -> "object-inspect": "^1.13.3", -> "side-channel-list": "^1.0.0", -> "side-channel-map": "^1.0.1", -> "side-channel-weakmap": "^1.0.2" -> }, -> "engines": { -> "node": ">= 0.4" -> }, -> "funding": { -> "url": "https://github.com/sponsors/ljharb" -> } -> }, -> "node_modules/side-channel-list": { -> "version": "1.0.0", -> "resolved": "https://registry.npmjs.org/side-channel-list/-/side-channel-list-1.0.0.tgz", -> "integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==", -> "license": "MIT", -> "dependencies": { -> "es-errors": "^1.3.0", -> "object-inspect": "^1.13.3" -> }, -> "engines": { -> "node": ">= 0.4" -> }, -> "funding": { -> "url": "https://github.com/sponsors/ljharb" -> } -> }, -> "node_modules/side-channel-map": { -> "version": "1.0.1", -> "resolved": "https://registry.npmjs.org/side-channel-map/-/side-channel-map-1.0.1.tgz", -> "integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==", -> "license": "MIT", -> "dependencies": { -> "call-bound": "^1.0.2", -> "es-errors": "^1.3.0", -> "get-intrinsic": "^1.2.5", -> "object-inspect": "^1.13.3" -> }, -> "engines": { -> "node": ">= 0.4" -> }, -> "funding": { -> "url": "https://github.com/sponsors/ljharb" -> } -> }, -> "node_modules/side-channel-weakmap": { -> "version": "1.0.2", -> "resolved": "https://registry.npmjs.org/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz", -> "integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==", -> "license": "MIT", -> "dependencies": { -> "call-bound": "^1.0.2", -> "es-errors": "^1.3.0", -> "get-intrinsic": "^1.2.5", -> "object-inspect": "^1.13.3", -> "side-channel-map": "^1.0.1" -32871c33412,33413 -< "integrity": "sha512-wcFzz9cDfbuqe0FZzfi2or1sgyIrsDwmPwfZC4hiNidPdPINjeUwNfv5kldczoEAcjl9Y1L3SM7Uz2PUEQzxQw==" ---- -> "integrity": "sha512-wcFzz9cDfbuqe0FZzfi2or1sgyIrsDwmPwfZC4hiNidPdPINjeUwNfv5kldczoEAcjl9Y1L3SM7Uz2PUEQzxQw==", -> "license": "(WTFPL OR MIT)" -32960c33502,33503 -< "integrity": "sha1-bIOv82kvphJW4M0ZfgXp3hV2kaY=" ---- -> "integrity": "sha512-Q5thBSxp5t8WPTTJQS59LrGqOZqOsrhDGDVm8azCqIBjSBd7nd9o2PM+mDulQQkh8h//4U6hFZnc/mul8t5pWQ==", -> "license": "ISC" -32994a33538,33554 -> "node_modules/ssh2": { -> "version": "1.16.0", -> "resolved": "https://registry.npmjs.org/ssh2/-/ssh2-1.16.0.tgz", -> "integrity": "sha512-r1X4KsBGedJqo7h8F5c4Ybpcr5RjyP+aWIG007uBPRjmdQWfEiVLzSK71Zji1B9sKxwaCvD8y8cwSkYrlLiRRg==", -> "hasInstallScript": true, -> "dependencies": { -> "asn1": "^0.2.6", -> "bcrypt-pbkdf": "^1.0.2" -> }, -> "engines": { -> "node": ">=10.16.0" -> }, -> "optionalDependencies": { -> "cpu-features": "~0.0.10", -> "nan": "^2.20.0" -> } -> }, -33095,33098c33655,33658 -< "version": "1.0.0", -< "resolved": "https://registry.npmjs.org/stop-iteration-iterator/-/stop-iteration-iterator-1.0.0.tgz", -< "integrity": "sha512-iCGQj+0l0HOdZ2AEeBADlsRC+vsnDsZsbdSiH1yNSjcfKM7fdpCMfqAL/dwF5BLiw/XhRft/Wax6zQbhq2BcjQ==", -< "dev": true, ---- -> "version": "1.1.0", -> "resolved": "https://registry.npmjs.org/stop-iteration-iterator/-/stop-iteration-iterator-1.1.0.tgz", -> "integrity": "sha512-eLoXW/DHyl62zxY4SCaIgnRhuMr6ri4juEYARS8E6sCEqzKpOiE521Ucofdx+KnDZl5xmvGYaaKCk5FEOxJCoQ==", -> "license": "MIT", -33100c33660,33661 -< "internal-slot": "^1.0.4" ---- -> "es-errors": "^1.3.0", -> "internal-slot": "^1.1.0" -33265,33267c33826,33828 -< "version": "1.2.9", -< "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.9.tgz", -< "integrity": "sha512-klHuCNxiMZ8MlsOihJhJEBJAiMVqU3Z2nEXWfWnIqjN0gEFS9J9+IxKozWWtQGcgoa1WUZzLjKPTr4ZHNFTFxw==", ---- -> "version": "1.2.10", -> "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.10.tgz", -> "integrity": "sha512-Rs66F0P/1kedk5lyYyH9uBzuiI/kNRmwJAR9quK6VOtIpZ2G+hMZd+HQbbv25MgCA6gEffoMZYxlTod4WcdrKA==", -33270c33831,33833 -< "call-bind": "^1.0.7", ---- -> "call-bind": "^1.0.8", -> "call-bound": "^1.0.2", -> "define-data-property": "^1.1.4", -33272,33273c33835,33837 -< "es-abstract": "^1.23.0", -< "es-object-atoms": "^1.0.0" ---- -> "es-abstract": "^1.23.5", -> "es-object-atoms": "^1.0.0", -> "has-property-descriptors": "^1.0.2" -33283,33285c33847,33849 -< "version": "1.0.8", -< "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.8.tgz", -< "integrity": "sha512-p73uL5VCHCO2BZZ6krwwQE3kCzM7NKmis8S//xEC6fQonchbum4eP6kR4DLEjQFO3Wnj3Fuo8NM0kOSjVdHjZQ==", ---- -> "version": "1.0.9", -> "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.9.tgz", -> "integrity": "sha512-G7Ok5C6E/j4SGfyLCloXTrngQIQU3PWtXGst3yM7Bea9FRURf1S42ZHlZZtsNque2FN2PoUhfZXYLNWwEr4dLQ==", -33288c33852,33853 -< "call-bind": "^1.0.7", ---- -> "call-bind": "^1.0.8", -> "call-bound": "^1.0.2", -33291a33857,33859 -> "engines": { -> "node": ">= 0.4" -> }, -33781c34349,34350 -< "deprecated": "Please upgrade to v7.0.2+ of superagent. We have fixed numerous issues with streams, form-data, attach(), filesystem errors not bubbling up (ENOENT on attach()), and all tests are now passing. See the releases tab for more information at . Thanks to @shadowgate15, @spence-s, and @niftylettuce. Superagent is sponsored by Forward Email at .", ---- -> "deprecated": "Please upgrade to v9.0.0+ as we have fixed a public vulnerability with formidable dependency. Note that v9.0.0+ requires Node.js v14.18.0+. See https://github.com/ladjs/superagent/pull/1800 for insight. This project is supported and maintained by the team at Forward Email @ https://forwardemail.net", -> "license": "MIT", -33801a34371 -> "license": "MIT", -33807,33809c34377,34380 -< "version": "2.5.1", -< "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.1.tgz", -< "integrity": "sha512-m21N3WOmEEURgk6B9GLOE4RuWOFf28Lhh9qGYeNlGq4VDXUlJy2th2slBNU8Gp8EzloYZOibZJ7t5ecIrFSjVA==", ---- -> "version": "2.5.3", -> "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.3.tgz", -> "integrity": "sha512-XHIrMD0NpDrNM/Ckf7XJiBbLl57KEhT3+i3yY+eWm+cqYZJQTZrKo8Y8AWKnuV5GT4scfuUGt9LzNoIx3dU1nQ==", -> "license": "MIT", -33812,33813c34383,34386 -< "combined-stream": "^1.0.6", -< "mime-types": "^2.1.12" ---- -> "combined-stream": "^1.0.8", -> "es-set-tostringtag": "^2.1.0", -> "mime-types": "^2.1.35", -> "safe-buffer": "^5.2.1" -33818a34392,34411 -> "node_modules/superagent/node_modules/form-data/node_modules/safe-buffer": { -> "version": "5.2.1", -> "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", -> "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", -> "funding": [ -> { -> "type": "github", -> "url": "https://github.com/sponsors/feross" -> }, -> { -> "type": "patreon", -> "url": "https://www.patreon.com/feross" -> }, -> { -> "type": "consulting", -> "url": "https://feross.org/support" -> } -> ], -> "license": "MIT" -> }, -33822,33833c34415,34416 -< "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=" -< }, -< "node_modules/superagent/node_modules/mime": { -< "version": "1.6.0", -< "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", -< "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", -< "bin": { -< "mime": "cli.js" -< }, -< "engines": { -< "node": ">=4" -< } ---- -> "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==", -> "license": "MIT" -33836,33838c34419,34422 -< "version": "2.3.7", -< "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", -< "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", ---- -> "version": "2.3.8", -> "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", -> "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", -> "license": "MIT", -33852a34437 -> "license": "MIT", -34099c34684,34685 -< "integrity": "sha1-oJdRnG8e5N1n4wjZtT3cnCslf5c=", ---- -> "integrity": "sha512-O2hZbWqq8x6j0uZ4qWj5dw45WPoAxKsJLJZqOgTqRtPNi8IqA+rDkDV/48S8qanS3KGv1QcVoPNLivMbyHHdAQ==", -> "license": "MIT", -34150,34155d34735 -< "node_modules/swagger-tools/node_modules/commander": { -< "version": "2.11.0", -< "resolved": "https://registry.npmjs.org/commander/-/commander-2.11.0.tgz", -< "integrity": "sha512-b0553uYA5YAEGgyYIGYROzKQ7X5RAqedkfjiZxwi0kL1g3bOaBNNZfYkzt/CL0umgD5wc9Jec2FbB98CjkMRvQ==", -< "license": "MIT" -< }, -34257,34259c34837,34839 -< "version": "3.0.8", -< "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-3.0.8.tgz", -< "integrity": "sha512-ZoROL70jptorGAlgAYiLoBLItEKw/fUxg9BSYK/dF/GAGYFJOJJJMvjPAKDJraCXFwadD456FCuvLWgfhMsPwg==", ---- -> "version": "3.0.9", -> "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-3.0.9.tgz", -> "integrity": "sha512-XF4w9Xp+ZQgifKakjZYmFdkLoSWd34VGKcsTCwlNWM7QG3ZbaxnTsaBwnjFZqHRf/rROxaR8rXnbtwdvaDI+lA==", -34943,34945c35523,35525 -< "version": "0.6.9", -< "resolved": "https://registry.npmjs.org/traverse/-/traverse-0.6.9.tgz", -< "integrity": "sha512-7bBrcF+/LQzSgFmT0X5YclVqQxtv7TDJ1f8Wj7ibBu/U6BMLeOpUxuZjV7rMc44UtKxlnMFigdhFAIszSX1DMg==", ---- -> "version": "0.6.11", -> "resolved": "https://registry.npmjs.org/traverse/-/traverse-0.6.11.tgz", -> "integrity": "sha512-vxXDZg8/+p3gblxB6BhhG5yWVn1kGRlaL8O78UDXc3wRnPizB5g83dcvWV1jpDMIPnjZjOFuxlMmE82XJ4407w==", -34948,34950c35528,35530 -< "gopd": "^1.0.1", -< "typedarray.prototype.slice": "^1.0.3", -< "which-typed-array": "^1.1.15" ---- -> "gopd": "^1.2.0", -> "typedarray.prototype.slice": "^1.0.5", -> "which-typed-array": "^1.1.18" -35110,35112c35690,35692 -< "version": "1.0.2", -< "resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.2.tgz", -< "integrity": "sha512-gEymJYKZtKXzzBzM4jqa9w6Q1Jjm7x2d+sh19AdsD4wqnMPDYyvwpsIc2Q/835kHuo3BEQ7CjelGhfTsoBb2MQ==", ---- -> "version": "1.0.3", -> "resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.3.tgz", -> "integrity": "sha512-nAYYwfY3qnzX30IkA6AQZjVbtK6duGontcQm1WSG1MD94YLqK0515GNApXkoxKOWMusVssAHWLh9SeaoefYFGw==", -35115c35695 -< "call-bind": "^1.0.7", ---- -> "call-bound": "^1.0.3", -35117c35697 -< "is-typed-array": "^1.1.13" ---- -> "is-typed-array": "^1.1.14" -35124,35126c35704,35706 -< "version": "1.0.1", -< "resolved": "https://registry.npmjs.org/typed-array-byte-length/-/typed-array-byte-length-1.0.1.tgz", -< "integrity": "sha512-3iMJ9q0ao7WE9tWcaYKIptkNBuOIcZCCT0d4MRvuuH88fEoEH62IuQe0OtraD3ebQEoTRk8XCBoknUNc1Y67pw==", ---- -> "version": "1.0.3", -> "resolved": "https://registry.npmjs.org/typed-array-byte-length/-/typed-array-byte-length-1.0.3.tgz", -> "integrity": "sha512-BaXgOuIxz8n8pIq3e7Atg/7s+DpiYrxn4vdot3w9KbnBhcRQq6o3xemQdIfynqSeXeDrF32x+WvfzmOjPiY9lg==", -35129c35709 -< "call-bind": "^1.0.7", ---- -> "call-bind": "^1.0.8", -35131,35133c35711,35713 -< "gopd": "^1.0.1", -< "has-proto": "^1.0.3", -< "is-typed-array": "^1.1.13" ---- -> "gopd": "^1.2.0", -> "has-proto": "^1.2.0", -> "is-typed-array": "^1.1.14" -35143,35145c35723,35725 -< "version": "1.0.2", -< "resolved": "https://registry.npmjs.org/typed-array-byte-offset/-/typed-array-byte-offset-1.0.2.tgz", -< "integrity": "sha512-Ous0vodHa56FviZucS2E63zkgtgrACj7omjwd/8lTEMEPFFyjfixMZ1ZXenpgCFBBt4EC1J2XsyVS2gkG0eTFA==", ---- -> "version": "1.0.4", -> "resolved": "https://registry.npmjs.org/typed-array-byte-offset/-/typed-array-byte-offset-1.0.4.tgz", -> "integrity": "sha512-bTlAFB/FBYMcuX81gbL4OcpH5PmlFHqlCCpAl8AlEzMz5k53oNDvN8p1PNOWLEmI2x4orp3raOFB51tv9X+MFQ==", -35149c35729 -< "call-bind": "^1.0.7", ---- -> "call-bind": "^1.0.8", -35151,35153c35731,35734 -< "gopd": "^1.0.1", -< "has-proto": "^1.0.3", -< "is-typed-array": "^1.1.13" ---- -> "gopd": "^1.2.0", -> "has-proto": "^1.2.0", -> "is-typed-array": "^1.1.15", -> "reflect.getprototypeof": "^1.0.9" -35163,35165c35744,35746 -< "version": "1.0.6", -< "resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.6.tgz", -< "integrity": "sha512-/OxDN6OtAk5KBpGb28T+HZc2M+ADtvRxXrKKbUwtsLgdoxgX13hyy7ek6bFRl5+aBs2yZzB0c4CnQfAtVypW/g==", ---- -> "version": "1.0.7", -> "resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.7.tgz", -> "integrity": "sha512-3KS2b+kL7fsuk/eJZ7EQdnEmQoaho/r6KUef7hxvltNA5DR8NAUM+8wJMbJyZ4G9/7i3v5zPBIMN5aybAh2/Jg==", -35171d35751 -< "has-proto": "^1.0.3", -35173c35753,35754 -< "possible-typed-array-names": "^1.0.0" ---- -> "possible-typed-array-names": "^1.0.0", -> "reflect.getprototypeof": "^1.0.6" -35188,35190c35769,35771 -< "version": "1.0.3", -< "resolved": "https://registry.npmjs.org/typedarray.prototype.slice/-/typedarray.prototype.slice-1.0.3.tgz", -< "integrity": "sha512-8WbVAQAUlENo1q3c3zZYuy5k9VzBQvp8AX9WOtbvyWlLM1v5JaSRmjubLjzHF4JFtptjH/5c/i95yaElvcjC0A==", ---- -> "version": "1.0.5", -> "resolved": "https://registry.npmjs.org/typedarray.prototype.slice/-/typedarray.prototype.slice-1.0.5.tgz", -> "integrity": "sha512-q7QNVDGTdl702bVFiI5eY4l/HkgCM6at9KhcFbgUAzezHFbOVy4+0O/lCjsABEQwbZPravVfBIiBVGo89yzHFg==", -35193c35774 -< "call-bind": "^1.0.7", ---- -> "call-bind": "^1.0.8", -35195c35776 -< "es-abstract": "^1.23.0", ---- -> "es-abstract": "^1.23.9", -35197,35198c35778,35781 -< "typed-array-buffer": "^1.0.2", -< "typed-array-byte-offset": "^1.0.2" ---- -> "get-proto": "^1.0.1", -> "math-intrinsics": "^1.1.0", -> "typed-array-buffer": "^1.0.3", -> "typed-array-byte-offset": "^1.0.4" -35274,35276c35857,35860 -< "version": "1.0.2", -< "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.2.tgz", -< "integrity": "sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw==", ---- -> "version": "1.1.0", -> "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.1.0.tgz", -> "integrity": "sha512-nWJ91DjeOkej/TA8pXQ3myruKpKEYgqvpw9lz4OPHj/NWFNluYrjbz9j01CJ8yKQd2g4jFoOkINCTW2I5LEEyw==", -> "license": "MIT", -35278c35862 -< "call-bind": "^1.0.2", ---- -> "call-bound": "^1.0.3", -35280,35281c35864,35868 -< "has-symbols": "^1.0.3", -< "which-boxed-primitive": "^1.0.2" ---- -> "has-symbols": "^1.1.0", -> "which-boxed-primitive": "^1.1.1" -> }, -> "engines": { -> "node": ">= 0.4" -35709a36297,36305 -> "node_modules/validator": { -> "version": "10.11.0", -> "resolved": "https://registry.npmjs.org/validator/-/validator-10.11.0.tgz", -> "integrity": "sha512-X/p3UZerAIsbBfN/IwahhYaBbY68EN/UQBWHtsbXGT5bfrH/p4NQzUCG1kF/rtKaNpnJ7jAu6NGTdSNtyNIXMw==", -> "license": "MIT", -> "engines": { -> "node": ">= 0.10" -> } -> }, -36697,36699c37293,37296 -< "version": "1.0.2", -< "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz", -< "integrity": "sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==", ---- -> "version": "1.1.1", -> "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.1.1.tgz", -> "integrity": "sha512-TbX3mj8n0odCBFVlY8AxkqcHASw3L60jIuF8jFP78az3C2YhmGvqbHBpAjTRH2/xqYunrJ9g1jSyjCjpoWzIAA==", -> "license": "MIT", -36701,36705c37298,37332 -< "is-bigint": "^1.0.1", -< "is-boolean-object": "^1.1.0", -< "is-number-object": "^1.0.4", -< "is-string": "^1.0.5", -< "is-symbol": "^1.0.3" ---- -> "is-bigint": "^1.1.0", -> "is-boolean-object": "^1.2.1", -> "is-number-object": "^1.1.1", -> "is-string": "^1.1.1", -> "is-symbol": "^1.1.1" -> }, -> "engines": { -> "node": ">= 0.4" -> }, -> "funding": { -> "url": "https://github.com/sponsors/ljharb" -> } -> }, -> "node_modules/which-builtin-type": { -> "version": "1.2.1", -> "resolved": "https://registry.npmjs.org/which-builtin-type/-/which-builtin-type-1.2.1.tgz", -> "integrity": "sha512-6iBczoX+kDQ7a3+YJBnh3T+KZRxM/iYNPXicqk66/Qfm1b93iu+yOImkg0zHbj5LNOcNv1TEADiZ0xa34B4q6Q==", -> "license": "MIT", -> "dependencies": { -> "call-bound": "^1.0.2", -> "function.prototype.name": "^1.1.6", -> "has-tostringtag": "^1.0.2", -> "is-async-function": "^2.0.0", -> "is-date-object": "^1.1.0", -> "is-finalizationregistry": "^1.1.0", -> "is-generator-function": "^1.0.10", -> "is-regex": "^1.2.1", -> "is-weakref": "^1.0.2", -> "isarray": "^2.0.5", -> "which-boxed-primitive": "^1.1.0", -> "which-collection": "^1.0.2", -> "which-typed-array": "^1.1.16" -> }, -> "engines": { -> "node": ">= 0.4" -36712,36715c37339,37342 -< "version": "1.0.1", -< "resolved": "https://registry.npmjs.org/which-collection/-/which-collection-1.0.1.tgz", -< "integrity": "sha512-W8xeTUwaln8i3K/cY1nGXzdnVZlidBcagyNFtBdD5kxnb4TvGKR7FfSIS3mYpwWS1QUCutfKz8IY8RjftB0+1A==", -< "dev": true, ---- -> "version": "1.0.2", -> "resolved": "https://registry.npmjs.org/which-collection/-/which-collection-1.0.2.tgz", -> "integrity": "sha512-K4jVyjnBdgvc86Y6BkaLZEN933SwYOuBFkdmBu9ZfkcAbdVbpITnDmjvZ/aQjRXQrv5EPkTnD1s39GiiqbngCw==", -> "license": "MIT", -36717,36720c37344,37350 -< "is-map": "^2.0.1", -< "is-set": "^2.0.1", -< "is-weakmap": "^2.0.1", -< "is-weakset": "^2.0.1" ---- -> "is-map": "^2.0.3", -> "is-set": "^2.0.3", -> "is-weakmap": "^2.0.2", -> "is-weakset": "^2.0.3" -> }, -> "engines": { -> "node": ">= 0.4" -36733,36735c37363,37365 -< "version": "1.1.15", -< "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.15.tgz", -< "integrity": "sha512-oV0jmFtUky6CXfkqehVvBP/LSWJ2sy4vWMioiENyJLePrBO/yKyV9OyJySfAKosh+RYkIl5zJCNZ8/4JncrpdA==", ---- -> "version": "1.1.19", -> "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.19.tgz", -> "integrity": "sha512-rEvr90Bck4WZt9HHFC4DJMsjvu7x+r6bImz0/BrbWb7A2djJ8hnZMrWnHo9F8ssv0OMErasDhftrfROTyqSDrw==", -36739,36741c37369,37373 -< "call-bind": "^1.0.7", -< "for-each": "^0.3.3", -< "gopd": "^1.0.1", ---- -> "call-bind": "^1.0.8", -> "call-bound": "^1.0.4", -> "for-each": "^0.3.5", -> "get-proto": "^1.0.1", -> "gopd": "^1.2.0", -37217a37850 -> "license": "MIT", -37231,37236d37863 -< "node_modules/z-schema/node_modules/commander": { -< "version": "2.20.3", -< "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", -< "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==", -< "optional": true -< }, -37241,37250c37868,37870 -< "deprecated": "core-js@<3.4 is no longer maintained and not recommended for usage due to the number of issues. Because of the V8 engine whims, feature detection in old core-js versions could cause a slowdown up to 100x even if nothing is polyfilled. Please, upgrade your dependencies to the actual version of core-js.", -< "hasInstallScript": true -< }, -< "node_modules/z-schema/node_modules/validator": { -< "version": "10.11.0", -< "resolved": "https://registry.npmjs.org/validator/-/validator-10.11.0.tgz", -< "integrity": "sha512-X/p3UZerAIsbBfN/IwahhYaBbY68EN/UQBWHtsbXGT5bfrH/p4NQzUCG1kF/rtKaNpnJ7jAu6NGTdSNtyNIXMw==", -< "engines": { -< "node": ">= 0.10" -< } ---- -> "deprecated": "core-js@<3.23.3 is no longer maintained and not recommended for usage due to the number of issues. Because of the V8 engine whims, feature detection in old core-js versions could cause a slowdown up to 100x even if nothing is polyfilled. Some versions have web compatibility issues. Please, upgrade your dependencies to the actual version of core-js.", -> "hasInstallScript": true, -> "license": "MIT" -37358c37978 -< "dockerode": "^4.0.5", ---- -> "dockerode": "^4.0.7", -37364c37984 -< "tar-fs": "^3.0.4", ---- -> "tar-fs": "^3.0.9", -37425,37451d38044 -< "services/clsi/node_modules/@grpc/grpc-js": { -< "version": "1.13.2", -< "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.13.2.tgz", -< "integrity": "sha512-nnR5nmL6lxF8YBqb6gWvEgLdLh/Fn+kvAdX5hUOnt48sNSb0riz/93ASd2E5gvanPA41X6Yp25bIfGRp1SMb2g==", -< "license": "Apache-2.0", -< "dependencies": { -< "@grpc/proto-loader": "^0.7.13", -< "@js-sdsl/ordered-map": "^4.4.2" -< }, -< "engines": { -< "node": ">=12.10.0" -< } -< }, -< "services/clsi/node_modules/cpu-features": { -< "version": "0.0.10", -< "resolved": "https://registry.npmjs.org/cpu-features/-/cpu-features-0.0.10.tgz", -< "integrity": "sha512-9IkYqtX3YHPCzoVg1Py+o9057a3i0fp7S530UWokCSaFVTc7CwXPRiOjRjBQQ18ZCNafx78YfnG+HALxtVmOGA==", -< "hasInstallScript": true, -< "optional": true, -< "dependencies": { -< "buildcheck": "~0.0.6", -< "nan": "^2.19.0" -< }, -< "engines": { -< "node": ">=10.0.0" -< } -< }, -37461,37529d38053 -< "services/clsi/node_modules/docker-modem": { -< "version": "5.0.6", -< "resolved": "https://registry.npmjs.org/docker-modem/-/docker-modem-5.0.6.tgz", -< "integrity": "sha512-ens7BiayssQz/uAxGzH8zGXCtiV24rRWXdjNha5V4zSOcxmAZsfGVm/PPFbwQdqEkDnhG+SyR9E3zSHUbOKXBQ==", -< "license": "Apache-2.0", -< "dependencies": { -< "debug": "^4.1.1", -< "readable-stream": "^3.5.0", -< "split-ca": "^1.0.1", -< "ssh2": "^1.15.0" -< }, -< "engines": { -< "node": ">= 8.0" -< } -< }, -< "services/clsi/node_modules/dockerode": { -< "version": "4.0.5", -< "resolved": "https://registry.npmjs.org/dockerode/-/dockerode-4.0.5.tgz", -< "integrity": "sha512-ZPmKSr1k1571Mrh7oIBS/j0AqAccoecY2yH420ni5j1KyNMgnoTh4Nu4FWunh0HZIJmRSmSysJjBIpa/zyWUEA==", -< "license": "Apache-2.0", -< "dependencies": { -< "@balena/dockerignore": "^1.0.2", -< "@grpc/grpc-js": "^1.11.1", -< "@grpc/proto-loader": "^0.7.13", -< "docker-modem": "^5.0.6", -< "protobufjs": "^7.3.2", -< "tar-fs": "~2.1.2", -< "uuid": "^10.0.0" -< }, -< "engines": { -< "node": ">= 8.0" -< } -< }, -< "services/clsi/node_modules/dockerode/node_modules/tar-fs": { -< "version": "2.1.2", -< "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.2.tgz", -< "integrity": "sha512-EsaAXwxmx8UB7FRKqeozqEPop69DXcmYwTQwXvyAPF352HJsPdkVhvTaDPYqfNgruveJIJy3TA2l+2zj8LJIJA==", -< "license": "MIT", -< "dependencies": { -< "chownr": "^1.1.1", -< "mkdirp-classic": "^0.5.2", -< "pump": "^3.0.0", -< "tar-stream": "^2.1.4" -< } -< }, -< "services/clsi/node_modules/protobufjs": { -< "version": "7.4.0", -< "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-7.4.0.tgz", -< "integrity": "sha512-mRUWCc3KUU4w1jU8sGxICXH/gNS94DvI1gxqDvBzhj1JpcsimQkYiOJfwsPUykUI5ZaspFbSgmBLER8IrQ3tqw==", -< "hasInstallScript": true, -< "license": "BSD-3-Clause", -< "dependencies": { -< "@protobufjs/aspromise": "^1.1.2", -< "@protobufjs/base64": "^1.1.2", -< "@protobufjs/codegen": "^2.0.4", -< "@protobufjs/eventemitter": "^1.1.0", -< "@protobufjs/fetch": "^1.1.0", -< "@protobufjs/float": "^1.0.2", -< "@protobufjs/inquire": "^1.1.0", -< "@protobufjs/path": "^1.1.2", -< "@protobufjs/pool": "^1.1.0", -< "@protobufjs/utf8": "^1.1.0", -< "@types/node": ">=13.7.0", -< "long": "^5.0.0" -< }, -< "engines": { -< "node": ">=12.0.0" -< } -< }, -37549,37565d38072 -< "services/clsi/node_modules/ssh2": { -< "version": "1.16.0", -< "resolved": "https://registry.npmjs.org/ssh2/-/ssh2-1.16.0.tgz", -< "integrity": "sha512-r1X4KsBGedJqo7h8F5c4Ybpcr5RjyP+aWIG007uBPRjmdQWfEiVLzSK71Zji1B9sKxwaCvD8y8cwSkYrlLiRRg==", -< "hasInstallScript": true, -< "dependencies": { -< "asn1": "^0.2.6", -< "bcrypt-pbkdf": "^1.0.2" -< }, -< "engines": { -< "node": ">=10.16.0" -< }, -< "optionalDependencies": { -< "cpu-features": "~0.0.10", -< "nan": "^2.20.0" -< } -< }, -37578,37590d38084 -< "services/clsi/node_modules/uuid": { -< "version": "10.0.0", -< "resolved": "https://registry.npmjs.org/uuid/-/uuid-10.0.0.tgz", -< "integrity": "sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ==", -< "funding": [ -< "https://github.com/sponsors/broofa", -< "https://github.com/sponsors/ctavan" -< ], -< "license": "MIT", -< "bin": { -< "uuid": "dist/bin/uuid" -< } -< }, -38683c39177 -< "multer": "overleaf/multer#199c5ff05bd375c508f4074498237baead7f5148", ---- -> "multer": "github:overleaf/multer#4dbceda355efc3fc8ac3cf5c66c3778c8a6fdb23", -39602,39604c40096,40098 -< "version": "2.0.0", -< "resolved": "git+ssh://git@github.com/overleaf/multer.git#199c5ff05bd375c508f4074498237baead7f5148", -< "integrity": "sha512-S5MlIoOgrDr+a2jLS8z7jQlbzvZ0m30U2tRwdyLrxhnnMUQZYEzkVysEv10Dw41RTpM5bQQDs563Vzl1LLhxhQ==", ---- -> "version": "2.0.1", -> "resolved": "git+ssh://git@github.com/overleaf/multer.git#4dbceda355efc3fc8ac3cf5c66c3778c8a6fdb23", -> "integrity": "sha512-kkvPK48OQibR5vIoTQBbZp1uWVCvT9MrW3Y0mqdhFYJP/HVJujb4eSCEU0yj+hyf0Y+H/BKCmPdM4fJnzqAO4w==", -39608,39610c40102,40104 -< "busboy": "^1.0.0", -< "concat-stream": "^1.5.2", -< "mkdirp": "^0.5.4", ---- -> "busboy": "^1.6.0", -> "concat-stream": "^2.0.0", -> "mkdirp": "^0.5.6", -39612,39613c40106,40107 -< "type-is": "^1.6.4", -< "xtend": "^4.0.0" ---- -> "type-is": "^1.6.18", -> "xtend": "^4.0.2" diff --git a/server-ce/hotfix/5.5.1/pr_25168.patch b/server-ce/hotfix/5.5.1/pr_25168.patch deleted file mode 100644 index 5d496d1f67..0000000000 --- a/server-ce/hotfix/5.5.1/pr_25168.patch +++ /dev/null @@ -1,19 +0,0 @@ ---- a/services/history-v1/config/custom-environment-variables.json -+++ b/services/history-v1/config/custom-environment-variables.json -@@ -50,12 +50,14 @@ - "history": { - "host": "OVERLEAF_REDIS_HOST", - "password": "OVERLEAF_REDIS_PASS", -- "port": "OVERLEAF_REDIS_PORT" -+ "port": "OVERLEAF_REDIS_PORT", -+ "tls": "OVERLEAF_REDIS_TLS" - }, - "lock": { - "host": "OVERLEAF_REDIS_HOST", - "password": "OVERLEAF_REDIS_PASS", -- "port": "OVERLEAF_REDIS_PORT" -+ "port": "OVERLEAF_REDIS_PORT", -+ "tls": "OVERLEAF_REDIS_TLS" - } - } - } diff --git a/server-ce/hotfix/5.5.1/pr_26086.patch b/server-ce/hotfix/5.5.1/pr_26086.patch deleted file mode 100644 index fec417b3a5..0000000000 --- a/server-ce/hotfix/5.5.1/pr_26086.patch +++ /dev/null @@ -1,200 +0,0 @@ ---- a/services/history-v1/api/controllers/project_import.js -+++ b/services/history-v1/api/controllers/project_import.js -@@ -35,6 +35,7 @@ async function importSnapshot(req, res) { - try { - snapshot = Snapshot.fromRaw(rawSnapshot) - } catch (err) { -+ logger.warn({ err, projectId }, 'failed to import snapshot') - return render.unprocessableEntity(res) - } - -@@ -43,6 +44,7 @@ async function importSnapshot(req, res) { - historyId = await chunkStore.initializeProject(projectId, snapshot) - } catch (err) { - if (err instanceof chunkStore.AlreadyInitialized) { -+ logger.warn({ err, projectId }, 'already initialized') - return render.conflict(res) - } else { - throw err ---- a/services/history-v1/api/controllers/projects.js -+++ b/services/history-v1/api/controllers/projects.js -@@ -34,6 +34,7 @@ async function initializeProject(req, res, next) { - res.status(HTTPStatus.OK).json({ projectId }) - } catch (err) { - if (err instanceof chunkStore.AlreadyInitialized) { -+ logger.warn({ err, projectId }, 'failed to initialize') - render.conflict(res) - } else { - throw err -@@ -242,11 +243,15 @@ async function createProjectBlob(req, res, next) { - const sizeLimit = new StreamSizeLimit(maxUploadSize) - await pipeline(req, sizeLimit, fs.createWriteStream(tmpPath)) - if (sizeLimit.sizeLimitExceeded) { -+ logger.warn( -+ { projectId, expectedHash, maxUploadSize }, -+ 'blob exceeds size threshold' -+ ) - return render.requestEntityTooLarge(res) - } - const hash = await blobHash.fromFile(tmpPath) - if (hash !== expectedHash) { -- logger.debug({ hash, expectedHash }, 'Hash mismatch') -+ logger.warn({ projectId, hash, expectedHash }, 'Hash mismatch') - return render.conflict(res, 'File hash mismatch') - } - -@@ -343,6 +348,10 @@ async function copyProjectBlob(req, res, next) { - targetBlobStore.getBlob(blobHash), - ]) - if (!sourceBlob) { -+ logger.warn( -+ { sourceProjectId, targetProjectId, blobHash }, -+ 'missing source blob when copying across projects' -+ ) - return render.notFound(res) - } - // Exit early if the blob exists in the target project. ---- a/services/history-v1/app.js -+++ b/services/history-v1/app.js -@@ -100,11 +100,13 @@ function setupErrorHandling() { - }) - } - if (err.code === 'ENUM_MISMATCH') { -+ logger.warn({ err, projectId }, err.message) - return res.status(HTTPStatus.UNPROCESSABLE_ENTITY).json({ - message: 'invalid enum value: ' + err.paramName, - }) - } - if (err.code === 'REQUIRED') { -+ logger.warn({ err, projectId }, err.message) - return res.status(HTTPStatus.UNPROCESSABLE_ENTITY).json({ - message: err.message, - }) ---- a/services/project-history/app/js/HistoryStoreManager.js -+++ b/services/project-history/app/js/HistoryStoreManager.js -@@ -35,7 +35,10 @@ class StringStream extends stream.Readable { - _mocks.getMostRecentChunk = (projectId, historyId, callback) => { - const path = `projects/${historyId}/latest/history` - logger.debug({ projectId, historyId }, 'getting chunk from history service') -- _requestChunk({ path, json: true }, callback) -+ _requestChunk({ path, json: true }, (err, chunk) => { -+ if (err) return callback(OError.tag(err)) -+ callback(null, chunk) -+ }) - } - - /** -@@ -54,7 +57,10 @@ export function getChunkAtVersion(projectId, historyId, version, callback) { - { projectId, historyId, version }, - 'getting chunk from history service for version' - ) -- _requestChunk({ path, json: true }, callback) -+ _requestChunk({ path, json: true }, (err, chunk) => { -+ if (err) return callback(OError.tag(err)) -+ callback(null, chunk) -+ }) - } - - export function getMostRecentVersion(projectId, historyId, callback) { -@@ -68,8 +74,10 @@ export function getMostRecentVersion(projectId, historyId, callback) { - _.sortBy(chunk.chunk.history.changes || [], x => x.timestamp) - ) - // find the latest project and doc versions in the chunk -- _getLatestProjectVersion(projectId, chunk, (err1, projectVersion) => -+ _getLatestProjectVersion(projectId, chunk, (err1, projectVersion) => { -+ if (err1) err1 = OError.tag(err1) - _getLatestV2DocVersions(projectId, chunk, (err2, v2DocVersions) => { -+ if (err2) err2 = OError.tag(err2) - // return the project and doc versions - const projectStructureAndDocVersions = { - project: projectVersion, -@@ -83,7 +91,7 @@ export function getMostRecentVersion(projectId, historyId, callback) { - chunk - ) - }) -- ) -+ }) - }) - } - -@@ -211,7 +219,10 @@ export function getProjectBlob(historyId, blobHash, callback) { - logger.debug({ historyId, blobHash }, 'getting blob from history service') - _requestHistoryService( - { path: `projects/${historyId}/blobs/${blobHash}` }, -- callback -+ (err, blob) => { -+ if (err) return callback(OError.tag(err)) -+ callback(null, blob) -+ } - ) - } - -@@ -277,7 +288,10 @@ function createBlobFromString(historyId, data, fileId, callback) { - (fsPath, cb) => { - _createBlob(historyId, fsPath, cb) - }, -- callback -+ (err, hash) => { -+ if (err) return callback(OError.tag(err)) -+ callback(null, hash) -+ } - ) - } - -@@ -330,7 +344,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) { - try { - ranges = HistoryBlobTranslator.createRangeBlobDataFromUpdate(update) - } catch (error) { -- return callback(error) -+ return callback(OError.tag(error)) - } - createBlobFromString( - historyId, -@@ -338,7 +352,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) { - `project-${projectId}-doc-${update.doc}`, - (err, fileHash) => { - if (err) { -- return callback(err) -+ return callback(OError.tag(err)) - } - if (ranges) { - createBlobFromString( -@@ -347,7 +361,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) { - `project-${projectId}-doc-${update.doc}-ranges`, - (err, rangesHash) => { - if (err) { -- return callback(err) -+ return callback(OError.tag(err)) - } - logger.debug( - { fileHash, rangesHash }, -@@ -415,7 +429,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) { - }, - (err, fileHash) => { - if (err) { -- return callback(err) -+ return callback(OError.tag(err)) - } - if (update.hash && update.hash !== fileHash) { - logger.warn( -@@ -447,7 +461,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) { - }, - (err, fileHash) => { - if (err) { -- return callback(err) -+ return callback(OError.tag(err)) - } - logger.debug({ fileHash }, 'created empty blob for file') - callback(null, { file: fileHash }) -@@ -520,7 +534,10 @@ export function initializeProject(historyId, callback) { - export function deleteProject(projectId, callback) { - _requestHistoryService( - { method: 'DELETE', path: `projects/${projectId}` }, -- callback -+ err => { -+ if (err) return callback(OError.tag(err)) -+ callback(null) -+ } - ) - } - diff --git a/server-ce/hotfix/5.5.1/pr_26091.patch b/server-ce/hotfix/5.5.1/pr_26091.patch deleted file mode 100644 index c88618b8d0..0000000000 --- a/server-ce/hotfix/5.5.1/pr_26091.patch +++ /dev/null @@ -1,60 +0,0 @@ ---- a/services/web/modules/server-ce-scripts/scripts/check-mongodb.mjs -+++ b/services/web/modules/server-ce-scripts/scripts/check-mongodb.mjs -@@ -7,6 +7,7 @@ import { - const { ObjectId } = mongodb - - const MIN_MONGO_VERSION = [6, 0] -+const MIN_MONGO_FEATURE_COMPATIBILITY_VERSION = [6, 0] - - async function main() { - let mongoClient -@@ -18,6 +19,7 @@ async function main() { - } - - await checkMongoVersion(mongoClient) -+ await checkFeatureCompatibilityVersion(mongoClient) - - try { - await testTransactions(mongoClient) -@@ -53,6 +55,41 @@ async function checkMongoVersion(mongoClient) { - } - } - -+async function checkFeatureCompatibilityVersion(mongoClient) { -+ const { -+ featureCompatibilityVersion: { version }, -+ } = await mongoClient -+ .db() -+ .admin() -+ .command({ getParameter: 1, featureCompatibilityVersion: 1 }) -+ const [major, minor] = version.split('.').map(v => parseInt(v)) -+ const [minMajor, minMinor] = MIN_MONGO_FEATURE_COMPATIBILITY_VERSION -+ -+ if (major < minMajor || (major === minMajor && minor < minMinor)) { -+ const minVersion = MIN_MONGO_FEATURE_COMPATIBILITY_VERSION.join('.') -+ console.error(` -+The MongoDB server has featureCompatibilityVersion=${version}, but Overleaf requires at least version ${minVersion}. -+ -+Open a mongo shell: -+- Overleaf Toolkit deployments: $ bin/mongo -+- Legacy docker-compose.yml deployments: $ docker exec -it mongo mongosh localhost/sharelatex -+ -+In the mongo shell: -+> db.adminCommand( { setFeatureCompatibilityVersion: "${minMajor}.${minMinor}" } ) -+ -+Verify the new value: -+> db.adminCommand( { getParameter: 1, featureCompatibilityVersion: 1 } ) -+ ... -+ { -+ featureCompatibilityVersion: { version: ${minMajor}.${minMinor}' }, -+... -+ -+Aborting. -+`) -+ process.exit(1) -+ } -+} -+ - main() - .then(() => { - console.error('Mongodb is up.') diff --git a/server-ce/hotfix/5.5.1/pr_26152.patch b/server-ce/hotfix/5.5.1/pr_26152.patch deleted file mode 100644 index 9dc5d50e28..0000000000 --- a/server-ce/hotfix/5.5.1/pr_26152.patch +++ /dev/null @@ -1,16 +0,0 @@ ---- a/services/web/modules/server-ce-scripts/scripts/create-user.mjs -+++ b/services/web/modules/server-ce-scripts/scripts/create-user.mjs -@@ -48,3 +48,13 @@ Please visit the following URL to set a password for ${email} and log in: - ) - }) - } -+ -+if (filename === process.argv[1]) { -+ try { -+ await main() -+ process.exit(0) -+ } catch (error) { -+ console.error({ error }) -+ process.exit(1) -+ } -+} diff --git a/server-ce/test/Makefile b/server-ce/test/Makefile index 6c56b7e8fe..18f4446902 100644 --- a/server-ce/test/Makefile +++ b/server-ce/test/Makefile @@ -6,8 +6,8 @@ all: test-e2e # Notable the container labels com.docker.compose.project.working_dir and com.docker.compose.project.config_files need to match when creating containers from the docker host (how you started things) and from host-admin (how tests reconfigure the instance). export PWD = $(shell pwd) -export TEX_LIVE_DOCKER_IMAGE ?= us-east1-docker.pkg.dev/overleaf-ops/ol-docker/texlive-full:2023.1 -export ALL_TEX_LIVE_DOCKER_IMAGES ?= us-east1-docker.pkg.dev/overleaf-ops/ol-docker/texlive-full:2023.1,us-east1-docker.pkg.dev/overleaf-ops/ol-docker/texlive-full:2022.1 +export TEX_LIVE_DOCKER_IMAGE ?= gcr.io/overleaf-ops/texlive-full:2023.1 +export ALL_TEX_LIVE_DOCKER_IMAGES ?= gcr.io/overleaf-ops/texlive-full:2023.1,gcr.io/overleaf-ops/texlive-full:2022.1 export IMAGE_TAG_PRO ?= us-east1-docker.pkg.dev/overleaf-ops/ol-docker/pro:latest export CYPRESS_SHARD ?= export COMPOSE_PROJECT_NAME ?= test @@ -20,7 +20,6 @@ test-e2e-native: npm run cypress:open test-e2e: - docker compose build host-admin docker compose up --no-log-prefix --exit-code-from=e2e e2e test-e2e-open: @@ -46,7 +45,7 @@ prefetch_custom_compose_pull: prefetch_custom: prefetch_custom_texlive prefetch_custom_texlive: echo "$$ALL_TEX_LIVE_DOCKER_IMAGES" | tr ',' '\n' | xargs -I% \ - sh -exc 'tag=%; re_tag=quay.io/sharelatex/$${tag#*/*/*/}; docker pull $$tag; docker tag $$tag $$re_tag' + sh -exc 'tag=%; re_tag=quay.io/sharelatex/$${tag#*/*/}; docker pull $$tag; docker tag $$tag $$re_tag' prefetch_custom: prefetch_old prefetch_old: diff --git a/server-ce/test/admin.spec.ts b/server-ce/test/admin.spec.ts index 50a89fb855..9031e21b68 100644 --- a/server-ce/test/admin.spec.ts +++ b/server-ce/test/admin.spec.ts @@ -179,21 +179,6 @@ describe('admin panel', function () { cy.get('nav').findByText('Manage Users').click() }) - it('displays expected tabs', () => { - const tabs = ['Users', 'License Usage'] - cy.get('[role="tab"]').each((el, index) => { - cy.wrap(el).findByText(tabs[index]).click() - }) - cy.get('[role="tab"]').should('have.length', tabs.length) - }) - - it('license usage tab', () => { - cy.get('a').contains('License Usage').click() - cy.findByText( - 'An active user is one who has opened a project in this Server Pro instance in the last 12 months.' - ) - }) - describe('create users', () => { beforeEach(() => { cy.get('a').contains('New User').click() diff --git a/server-ce/test/docker-compose.yml b/server-ce/test/docker-compose.yml index 52d4b1d5b7..501979d63a 100644 --- a/server-ce/test/docker-compose.yml +++ b/server-ce/test/docker-compose.yml @@ -131,7 +131,7 @@ services: saml: restart: always - image: us-east1-docker.pkg.dev/overleaf-ops/ol-docker/saml-test + image: gcr.io/overleaf-ops/saml-test environment: SAML_TEST_SP_ENTITY_ID: 'sharelatex-test-saml' SAML_BASE_URL_PATH: 'http://saml/simplesaml/' diff --git a/services/chat/docker-compose.ci.yml b/services/chat/docker-compose.ci.yml index 24b57ab084..8fd86c1fbb 100644 --- a/services/chat/docker-compose.ci.yml +++ b/services/chat/docker-compose.ci.yml @@ -24,13 +24,10 @@ services: MOCHA_GREP: ${MOCHA_GREP} NODE_ENV: test NODE_OPTIONS: "--unhandled-rejections=strict" - volumes: - - ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it depends_on: mongo: condition: service_started user: node - entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 -- command: npm run test:acceptance diff --git a/services/chat/docker-compose.yml b/services/chat/docker-compose.yml index 43a30e8cc7..89a48339bd 100644 --- a/services/chat/docker-compose.yml +++ b/services/chat/docker-compose.yml @@ -26,7 +26,6 @@ services: - .:/overleaf/services/chat - ../../node_modules:/overleaf/node_modules - ../../libraries:/overleaf/libraries - - ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it working_dir: /overleaf/services/chat environment: ELASTIC_SEARCH_DSN: es:9200 @@ -40,7 +39,6 @@ services: depends_on: mongo: condition: service_started - entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 -- command: npm run --silent test:acceptance mongo: diff --git a/services/clsi/README.md b/services/clsi/README.md index f1cf927d3d..16e40b8990 100644 --- a/services/clsi/README.md +++ b/services/clsi/README.md @@ -19,18 +19,18 @@ The CLSI can be configured through the following environment variables: * `ALLOWED_IMAGES` - Space separated list of allowed Docker TeX Live images * `CATCH_ERRORS` - Set to `true` to log uncaught exceptions * `COMPILE_GROUP_DOCKER_CONFIGS` - JSON string of Docker configs for compile groups -* `SANDBOXED_COMPILES` - Set to true to use sibling containers -* `SANDBOXED_COMPILES_HOST_DIR_COMPILES` - Working directory for LaTeX compiles -* `SANDBOXED_COMPILES_HOST_DIR_OUTPUT` - Output directory for LaTeX compiles +* `COMPILES_HOST_DIR` - Working directory for LaTeX compiles +* `OUTPUT_HOST_DIR` - Output directory for LaTeX compiles * `COMPILE_SIZE_LIMIT` - Sets the body-parser [limit](https://github.com/expressjs/body-parser#limit) +* `DOCKER_RUNNER` - Set to true to use sibling containers * `DOCKER_RUNTIME` - * `FILESTORE_DOMAIN_OVERRIDE` - The url for the filestore service e.g.`http://$FILESTORE_HOST:3009` * `FILESTORE_PARALLEL_FILE_DOWNLOADS` - Number of parallel file downloads * `LISTEN_ADDRESS` - The address for the RESTful service to listen on. Set to `0.0.0.0` to listen on all network interfaces * `PROCESS_LIFE_SPAN_LIMIT_MS` - Process life span limit in milliseconds * `SMOKE_TEST` - Whether to run smoke tests -* `TEXLIVE_IMAGE` - The TeX Live Docker image to use for sibling containers, e.g. `us-east1-docker.pkg.dev/overleaf-ops/ol-docker/texlive-full:2017.1` -* `TEX_LIVE_IMAGE_NAME_OVERRIDE` - The name of the registry for the Docker image e.g. `us-east1-docker.pkg.dev/overleaf-ops/ol-docker` +* `TEXLIVE_IMAGE` - The TeX Live Docker image to use for sibling containers, e.g. `gcr.io/overleaf-ops/texlive-full:2017.1` +* `TEX_LIVE_IMAGE_NAME_OVERRIDE` - The name of the registry for the Docker image e.g. `gcr.io/overleaf-ops` * `TEXLIVE_IMAGE_USER` - When using sibling containers, the user to run as in the TeX Live image. Defaults to `tex` * `TEXLIVE_OPENOUT_ANY` - Sets the `openout_any` environment variable for TeX Live (see the `\openout` primitive [documentation](http://tug.org/texinfohtml/web2c.html#tex-invocation)) @@ -63,10 +63,10 @@ Then start the Docker container: docker run --rm \ -p 127.0.0.1:3013:3013 \ -e LISTEN_ADDRESS=0.0.0.0 \ - -e SANDBOXED_COMPILES=true \ + -e DOCKER_RUNNER=true \ -e TEXLIVE_IMAGE=texlive/texlive \ -e TEXLIVE_IMAGE_USER=root \ - -e SANDBOXED_COMPILES_HOST_DIR_COMPILES="$PWD/compiles" \ + -e COMPILES_HOST_DIR="$PWD/compiles" \ -v "$PWD/compiles:/overleaf/services/clsi/compiles" \ -v "$PWD/cache:/overleaf/services/clsi/cache" \ -v /var/run/docker.sock:/var/run/docker.sock \ diff --git a/services/clsi/buildscript.txt b/services/clsi/buildscript.txt index 58975135d0..709ade18c3 100644 --- a/services/clsi/buildscript.txt +++ b/services/clsi/buildscript.txt @@ -2,7 +2,7 @@ clsi --data-dirs=cache,compiles,output --dependencies= --docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker ---env-add=ENABLE_PDF_CACHING="true",PDF_CACHING_ENABLE_WORKER_POOL="true",ALLOWED_IMAGES=quay.io/sharelatex/texlive-full:2017.1,TEXLIVE_IMAGE=quay.io/sharelatex/texlive-full:2017.1,TEX_LIVE_IMAGE_NAME_OVERRIDE=us-east1-docker.pkg.dev/overleaf-ops/ol-docker,TEXLIVE_IMAGE_USER="tex",SANDBOXED_COMPILES="true",SANDBOXED_COMPILES_HOST_DIR_COMPILES=$PWD/compiles,SANDBOXED_COMPILES_HOST_DIR_OUTPUT=$PWD/output +--env-add=ENABLE_PDF_CACHING="true",PDF_CACHING_ENABLE_WORKER_POOL="true",ALLOWED_IMAGES=quay.io/sharelatex/texlive-full:2017.1,TEXLIVE_IMAGE=quay.io/sharelatex/texlive-full:2017.1,TEX_LIVE_IMAGE_NAME_OVERRIDE=us-east1-docker.pkg.dev/overleaf-ops/ol-docker,TEXLIVE_IMAGE_USER="tex",DOCKER_RUNNER="true",COMPILES_HOST_DIR=$PWD/compiles,OUTPUT_HOST_DIR=$PWD/output --env-pass-through= --esmock-loader=False --node-version=22.15.1 diff --git a/services/clsi/docker-compose.ci.yml b/services/clsi/docker-compose.ci.yml index 77a45615b7..b6643008f7 100644 --- a/services/clsi/docker-compose.ci.yml +++ b/services/clsi/docker-compose.ci.yml @@ -29,9 +29,9 @@ services: TEXLIVE_IMAGE: quay.io/sharelatex/texlive-full:2017.1 TEX_LIVE_IMAGE_NAME_OVERRIDE: us-east1-docker.pkg.dev/overleaf-ops/ol-docker TEXLIVE_IMAGE_USER: "tex" - SANDBOXED_COMPILES: "true" - SANDBOXED_COMPILES_HOST_DIR_COMPILES: $PWD/compiles - SANDBOXED_COMPILES_HOST_DIR_OUTPUT: $PWD/output + DOCKER_RUNNER: "true" + COMPILES_HOST_DIR: $PWD/compiles + OUTPUT_HOST_DIR: $PWD/output volumes: - ./compiles:/overleaf/services/clsi/compiles - /var/run/docker.sock:/var/run/docker.sock diff --git a/services/clsi/docker-compose.yml b/services/clsi/docker-compose.yml index b8112a8e17..e0f29ab09d 100644 --- a/services/clsi/docker-compose.yml +++ b/services/clsi/docker-compose.yml @@ -47,8 +47,8 @@ services: TEXLIVE_IMAGE: quay.io/sharelatex/texlive-full:2017.1 TEX_LIVE_IMAGE_NAME_OVERRIDE: us-east1-docker.pkg.dev/overleaf-ops/ol-docker TEXLIVE_IMAGE_USER: "tex" - SANDBOXED_COMPILES: "true" - SANDBOXED_COMPILES_HOST_DIR_COMPILES: $PWD/compiles - SANDBOXED_COMPILES_HOST_DIR_OUTPUT: $PWD/output + DOCKER_RUNNER: "true" + COMPILES_HOST_DIR: $PWD/compiles + OUTPUT_HOST_DIR: $PWD/output command: npm run --silent test:acceptance diff --git a/services/clsi/package.json b/services/clsi/package.json index b07430391a..86566e0f59 100644 --- a/services/clsi/package.json +++ b/services/clsi/package.json @@ -27,13 +27,13 @@ "async": "^3.2.5", "body-parser": "^1.20.3", "bunyan": "^1.8.15", - "dockerode": "^4.0.7", + "dockerode": "^4.0.5", "express": "^4.21.2", "lodash": "^4.17.21", "p-limit": "^3.1.0", "request": "^2.88.2", "send": "^0.19.0", - "tar-fs": "^3.0.9", + "tar-fs": "^3.0.4", "workerpool": "^6.1.5" }, "devDependencies": { diff --git a/services/contacts/docker-compose.ci.yml b/services/contacts/docker-compose.ci.yml index 24b57ab084..8fd86c1fbb 100644 --- a/services/contacts/docker-compose.ci.yml +++ b/services/contacts/docker-compose.ci.yml @@ -24,13 +24,10 @@ services: MOCHA_GREP: ${MOCHA_GREP} NODE_ENV: test NODE_OPTIONS: "--unhandled-rejections=strict" - volumes: - - ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it depends_on: mongo: condition: service_started user: node - entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 -- command: npm run test:acceptance diff --git a/services/contacts/docker-compose.yml b/services/contacts/docker-compose.yml index 305232b55d..65e1a578cd 100644 --- a/services/contacts/docker-compose.yml +++ b/services/contacts/docker-compose.yml @@ -26,7 +26,6 @@ services: - .:/overleaf/services/contacts - ../../node_modules:/overleaf/node_modules - ../../libraries:/overleaf/libraries - - ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it working_dir: /overleaf/services/contacts environment: ELASTIC_SEARCH_DSN: es:9200 @@ -40,7 +39,6 @@ services: depends_on: mongo: condition: service_started - entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 -- command: npm run --silent test:acceptance mongo: diff --git a/services/contacts/package.json b/services/contacts/package.json index db707e55c9..f81f947d6a 100644 --- a/services/contacts/package.json +++ b/services/contacts/package.json @@ -6,9 +6,9 @@ "main": "app.js", "scripts": { "start": "node app.js", - "test:acceptance:_run": "mocha --loader=esmock --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js", + "test:acceptance:_run": "LOG_LEVEL=fatal mocha --loader=esmock --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js", "test:acceptance": "npm run test:acceptance:_run -- --grep=$MOCHA_GREP", - "test:unit:_run": "mocha --loader=esmock --recursive --reporter spec $@ test/unit/js", + "test:unit:_run": "LOG_LEVEL=fatal mocha --loader=esmock --recursive --reporter spec $@ test/unit/js", "test:unit": "npm run test:unit:_run -- --grep=$MOCHA_GREP", "nodemon": "node --watch app.js", "lint": "eslint --max-warnings 0 --format unix .", diff --git a/services/docstore/app.js b/services/docstore/app.js index ef755c4bb1..76659e8411 100644 --- a/services/docstore/app.js +++ b/services/docstore/app.js @@ -50,14 +50,6 @@ app.param('doc_id', function (req, res, next, docId) { app.get('/project/:project_id/doc-deleted', HttpController.getAllDeletedDocs) app.get('/project/:project_id/doc', HttpController.getAllDocs) app.get('/project/:project_id/ranges', HttpController.getAllRanges) -app.get( - '/project/:project_id/comment-thread-ids', - HttpController.getCommentThreadIds -) -app.get( - '/project/:project_id/tracked-changes-user-ids', - HttpController.getTrackedChangesUserIds -) app.get('/project/:project_id/has-ranges', HttpController.projectHasRanges) app.get('/project/:project_id/doc/:doc_id', HttpController.getDoc) app.get('/project/:project_id/doc/:doc_id/deleted', HttpController.isDocDeleted) diff --git a/services/docstore/app/js/DocArchiveManager.js b/services/docstore/app/js/DocArchiveManager.js index d03ee161a8..4390afe18f 100644 --- a/services/docstore/app/js/DocArchiveManager.js +++ b/services/docstore/app/js/DocArchiveManager.js @@ -1,4 +1,5 @@ -const MongoManager = require('./MongoManager') +const { callbackify } = require('node:util') +const MongoManager = require('./MongoManager').promises const Errors = require('./Errors') const logger = require('@overleaf/logger') const Settings = require('@overleaf/settings') @@ -7,12 +8,29 @@ const { ReadableString } = require('@overleaf/stream-utils') const RangeManager = require('./RangeManager') const PersistorManager = require('./PersistorManager') const pMap = require('p-map') -const { streamToBuffer } = require('./StreamToBuffer') +const { streamToBuffer } = require('./StreamToBuffer').promises const { BSON } = require('mongodb-legacy') const PARALLEL_JOBS = Settings.parallelArchiveJobs const UN_ARCHIVE_BATCH_SIZE = Settings.unArchiveBatchSize +module.exports = { + archiveAllDocs: callbackify(archiveAllDocs), + archiveDoc: callbackify(archiveDoc), + unArchiveAllDocs: callbackify(unArchiveAllDocs), + unarchiveDoc: callbackify(unarchiveDoc), + destroyProject: callbackify(destroyProject), + getDoc: callbackify(getDoc), + promises: { + archiveAllDocs, + archiveDoc, + unArchiveAllDocs, + unarchiveDoc, + destroyProject, + getDoc, + }, +} + async function archiveAllDocs(projectId) { if (!_isArchivingEnabled()) { return @@ -44,8 +62,6 @@ async function archiveDoc(projectId, docId) { throw new Error('doc has no lines') } - RangeManager.fixCommentIds(doc) - // warn about any oversized docs already in mongo const linesSize = BSON.calculateObjectSize(doc.lines || {}) const rangesSize = BSON.calculateObjectSize(doc.ranges || {}) @@ -209,12 +225,3 @@ function _isArchivingEnabled() { return true } - -module.exports = { - archiveAllDocs, - archiveDoc, - unArchiveAllDocs, - unarchiveDoc, - destroyProject, - getDoc, -} diff --git a/services/docstore/app/js/DocManager.js b/services/docstore/app/js/DocManager.js index c9e8dadc2c..a9ed99425c 100644 --- a/services/docstore/app/js/DocManager.js +++ b/services/docstore/app/js/DocManager.js @@ -5,6 +5,7 @@ const _ = require('lodash') const DocArchive = require('./DocArchiveManager') const RangeManager = require('./RangeManager') const Settings = require('@overleaf/settings') +const { callbackifyAll } = require('@overleaf/promise-utils') const { setTimeout } = require('node:timers/promises') /** @@ -28,7 +29,7 @@ const DocManager = { throw new Error('must include inS3 when getting doc') } - const doc = await MongoManager.findDoc(projectId, docId, filter) + const doc = await MongoManager.promises.findDoc(projectId, docId, filter) if (doc == null) { throw new Errors.NotFoundError( @@ -37,19 +38,15 @@ const DocManager = { } if (doc.inS3) { - await DocArchive.unarchiveDoc(projectId, docId) + await DocArchive.promises.unarchiveDoc(projectId, docId) return await DocManager._getDoc(projectId, docId, filter) } - if (filter.ranges) { - RangeManager.fixCommentIds(doc) - } - return doc }, async isDocDeleted(projectId, docId) { - const doc = await MongoManager.findDoc(projectId, docId, { + const doc = await MongoManager.promises.findDoc(projectId, docId, { deleted: true, }) @@ -77,7 +74,7 @@ const DocManager = { // returns the doc without any version information async _peekRawDoc(projectId, docId) { - const doc = await MongoManager.findDoc(projectId, docId, { + const doc = await MongoManager.promises.findDoc(projectId, docId, { lines: true, rev: true, deleted: true, @@ -94,7 +91,7 @@ const DocManager = { if (doc.inS3) { // skip the unarchiving to mongo when getting a doc - const archivedDoc = await DocArchive.getDoc(projectId, docId) + const archivedDoc = await DocArchive.promises.getDoc(projectId, docId) Object.assign(doc, archivedDoc) } @@ -105,7 +102,7 @@ const DocManager = { // without unarchiving it (avoids unnecessary writes to mongo) async peekDoc(projectId, docId) { const doc = await DocManager._peekRawDoc(projectId, docId) - await MongoManager.checkRevUnchanged(doc) + await MongoManager.promises.checkRevUnchanged(doc) return doc }, @@ -114,18 +111,16 @@ const DocManager = { lines: true, inS3: true, }) - if (!doc) throw new Errors.NotFoundError() - if (!Array.isArray(doc.lines)) throw new Errors.DocWithoutLinesError() - return doc.lines.join('\n') + return doc }, async getAllDeletedDocs(projectId, filter) { - return await MongoManager.getProjectsDeletedDocs(projectId, filter) + return await MongoManager.promises.getProjectsDeletedDocs(projectId, filter) }, async getAllNonDeletedDocs(projectId, filter) { - await DocArchive.unArchiveAllDocs(projectId) - const docs = await MongoManager.getProjectsDocs( + await DocArchive.promises.unArchiveAllDocs(projectId) + const docs = await MongoManager.promises.getProjectsDocs( projectId, { include_deleted: false }, filter @@ -133,46 +128,15 @@ const DocManager = { if (docs == null) { throw new Errors.NotFoundError(`No docs for project ${projectId}`) } - if (filter.ranges) { - for (const doc of docs) { - RangeManager.fixCommentIds(doc) - } - } return docs }, - async getCommentThreadIds(projectId) { - const docs = await DocManager.getAllNonDeletedDocs(projectId, { - _id: true, - ranges: true, - }) - const byDoc = new Map() - for (const doc of docs) { - const ids = new Set() - for (const comment of doc.ranges?.comments || []) { - ids.add(comment.op.t) - } - if (ids.size > 0) byDoc.set(doc._id.toString(), Array.from(ids)) - } - return Object.fromEntries(byDoc.entries()) - }, - - async getTrackedChangesUserIds(projectId) { - const docs = await DocManager.getAllNonDeletedDocs(projectId, { - ranges: true, - }) - const userIds = new Set() - for (const doc of docs) { - for (const change of doc.ranges?.changes || []) { - if (change.metadata.user_id === 'anonymous-user') continue - userIds.add(change.metadata.user_id) - } - } - return Array.from(userIds) - }, - async projectHasRanges(projectId) { - const docs = await MongoManager.getProjectsDocs(projectId, {}, { _id: 1 }) + const docs = await MongoManager.promises.getProjectsDocs( + projectId, + {}, + { _id: 1 } + ) const docIds = docs.map(doc => doc._id) for (const docId of docIds) { const doc = await DocManager.peekDoc(projectId, docId) @@ -283,7 +247,7 @@ const DocManager = { } modified = true - await MongoManager.upsertIntoDocCollection( + await MongoManager.promises.upsertIntoDocCollection( projectId, docId, doc?.rev, @@ -298,7 +262,11 @@ const DocManager = { async patchDoc(projectId, docId, meta) { const projection = { _id: 1, deleted: true } - const doc = await MongoManager.findDoc(projectId, docId, projection) + const doc = await MongoManager.promises.findDoc( + projectId, + docId, + projection + ) if (!doc) { throw new Errors.NotFoundError( `No such project/doc to delete: ${projectId}/${docId}` @@ -307,7 +275,7 @@ const DocManager = { if (meta.deleted && Settings.docstore.archiveOnSoftDelete) { // The user will not read this doc anytime soon. Flush it out of mongo. - DocArchive.archiveDoc(projectId, docId).catch(err => { + DocArchive.promises.archiveDoc(projectId, docId).catch(err => { logger.warn( { projectId, docId, err }, 'archiving a single doc in the background failed' @@ -315,8 +283,15 @@ const DocManager = { }) } - await MongoManager.patchDoc(projectId, docId, meta) + await MongoManager.promises.patchDoc(projectId, docId, meta) }, } -module.exports = DocManager +module.exports = { + ...callbackifyAll(DocManager, { + multiResult: { + updateDoc: ['modified', 'rev'], + }, + }), + promises: DocManager, +} diff --git a/services/docstore/app/js/Errors.js b/services/docstore/app/js/Errors.js index 7b150cc0db..bbdbe75c08 100644 --- a/services/docstore/app/js/Errors.js +++ b/services/docstore/app/js/Errors.js @@ -10,13 +10,10 @@ class DocRevValueError extends OError {} class DocVersionDecrementedError extends OError {} -class DocWithoutLinesError extends OError {} - module.exports = { Md5MismatchError, DocModifiedError, DocRevValueError, DocVersionDecrementedError, - DocWithoutLinesError, ...Errors, } diff --git a/services/docstore/app/js/HealthChecker.js b/services/docstore/app/js/HealthChecker.js index a5b7ad7e9a..34cd5c973c 100644 --- a/services/docstore/app/js/HealthChecker.js +++ b/services/docstore/app/js/HealthChecker.js @@ -1,35 +1,67 @@ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ const { db, ObjectId } = require('./mongodb') +const request = require('request') +const async = require('async') const _ = require('lodash') const crypto = require('node:crypto') const settings = require('@overleaf/settings') const { port } = settings.internal.docstore const logger = require('@overleaf/logger') -const { fetchNothing, fetchJson } = require('@overleaf/fetch-utils') -async function check() { - const docId = new ObjectId() - const projectId = new ObjectId(settings.docstore.healthCheck.project_id) - const url = `http://127.0.0.1:${port}/project/${projectId}/doc/${docId}` - const lines = [ - 'smoke test - delete me', - `${crypto.randomBytes(32).toString('hex')}`, - ] - logger.debug({ lines, url, docId, projectId }, 'running health check') - let body - try { - await fetchNothing(url, { - method: 'POST', - json: { lines, version: 42, ranges: {} }, - signal: AbortSignal.timeout(3_000), - }) - body = await fetchJson(url, { signal: AbortSignal.timeout(3_000) }) - } finally { - await db.docs.deleteOne({ _id: docId, project_id: projectId }) - } - if (!_.isEqual(body?.lines, lines)) { - throw new Error(`health check lines not equal ${body.lines} != ${lines}`) - } -} module.exports = { - check, + check(callback) { + const docId = new ObjectId() + const projectId = new ObjectId(settings.docstore.healthCheck.project_id) + const url = `http://127.0.0.1:${port}/project/${projectId}/doc/${docId}` + const lines = [ + 'smoke test - delete me', + `${crypto.randomBytes(32).toString('hex')}`, + ] + const getOpts = () => ({ + url, + timeout: 3000, + }) + logger.debug({ lines, url, docId, projectId }, 'running health check') + const jobs = [ + function (cb) { + const opts = getOpts() + opts.json = { lines, version: 42, ranges: {} } + return request.post(opts, cb) + }, + function (cb) { + const opts = getOpts() + opts.json = true + return request.get(opts, function (err, res, body) { + if (err != null) { + logger.err({ err }, 'docstore returned a error in health check get') + return cb(err) + } else if (res == null) { + return cb(new Error('no response from docstore with get check')) + } else if ((res != null ? res.statusCode : undefined) !== 200) { + return cb(new Error(`status code not 200, its ${res.statusCode}`)) + } else if ( + _.isEqual(body != null ? body.lines : undefined, lines) && + (body != null ? body._id : undefined) === docId.toString() + ) { + return cb() + } else { + return cb( + new Error( + `health check lines not equal ${body.lines} != ${lines}` + ) + ) + } + }) + }, + cb => db.docs.deleteOne({ _id: docId, project_id: projectId }, cb), + ] + return async.series(jobs, callback) + }, } diff --git a/services/docstore/app/js/HttpController.js b/services/docstore/app/js/HttpController.js index 50c4302aeb..1c4e137033 100644 --- a/services/docstore/app/js/HttpController.js +++ b/services/docstore/app/js/HttpController.js @@ -4,104 +4,143 @@ const DocArchive = require('./DocArchiveManager') const HealthChecker = require('./HealthChecker') const Errors = require('./Errors') const Settings = require('@overleaf/settings') -const { expressify } = require('@overleaf/promise-utils') -async function getDoc(req, res) { +function getDoc(req, res, next) { const { doc_id: docId, project_id: projectId } = req.params const includeDeleted = req.query.include_deleted === 'true' logger.debug({ projectId, docId }, 'getting doc') - const doc = await DocManager.getFullDoc(projectId, docId) - logger.debug({ docId, projectId }, 'got doc') - if (doc.deleted && !includeDeleted) { - res.sendStatus(404) - } else { - res.json(_buildDocView(doc)) - } + DocManager.getFullDoc(projectId, docId, function (error, doc) { + if (error) { + return next(error) + } + logger.debug({ docId, projectId }, 'got doc') + if (doc == null) { + res.sendStatus(404) + } else if (doc.deleted && !includeDeleted) { + res.sendStatus(404) + } else { + res.json(_buildDocView(doc)) + } + }) } -async function peekDoc(req, res) { +function peekDoc(req, res, next) { const { doc_id: docId, project_id: projectId } = req.params logger.debug({ projectId, docId }, 'peeking doc') - const doc = await DocManager.peekDoc(projectId, docId) - res.setHeader('x-doc-status', doc.inS3 ? 'archived' : 'active') - res.json(_buildDocView(doc)) + DocManager.peekDoc(projectId, docId, function (error, doc) { + if (error) { + return next(error) + } + if (doc == null) { + res.sendStatus(404) + } else { + res.setHeader('x-doc-status', doc.inS3 ? 'archived' : 'active') + res.json(_buildDocView(doc)) + } + }) } -async function isDocDeleted(req, res) { +function isDocDeleted(req, res, next) { const { doc_id: docId, project_id: projectId } = req.params - const deleted = await DocManager.isDocDeleted(projectId, docId) - res.json({ deleted }) + DocManager.isDocDeleted(projectId, docId, function (error, deleted) { + if (error) { + return next(error) + } + res.json({ deleted }) + }) } -async function getRawDoc(req, res) { +function getRawDoc(req, res, next) { const { doc_id: docId, project_id: projectId } = req.params logger.debug({ projectId, docId }, 'getting raw doc') - const content = await DocManager.getDocLines(projectId, docId) - res.setHeader('content-type', 'text/plain') - res.send(content) + DocManager.getDocLines(projectId, docId, function (error, doc) { + if (error) { + return next(error) + } + if (doc == null) { + res.sendStatus(404) + } else { + res.setHeader('content-type', 'text/plain') + res.send(_buildRawDocView(doc)) + } + }) } -async function getAllDocs(req, res) { +function getAllDocs(req, res, next) { const { project_id: projectId } = req.params logger.debug({ projectId }, 'getting all docs') - const docs = await DocManager.getAllNonDeletedDocs(projectId, { - lines: true, - rev: true, - }) - const docViews = _buildDocsArrayView(projectId, docs) - for (const docView of docViews) { - if (!docView.lines) { - logger.warn({ projectId, docId: docView._id }, 'missing doc lines') - docView.lines = [] + DocManager.getAllNonDeletedDocs( + projectId, + { lines: true, rev: true }, + function (error, docs) { + if (docs == null) { + docs = [] + } + if (error) { + return next(error) + } + const docViews = _buildDocsArrayView(projectId, docs) + for (const docView of docViews) { + if (!docView.lines) { + logger.warn({ projectId, docId: docView._id }, 'missing doc lines') + docView.lines = [] + } + } + res.json(docViews) } - } - res.json(docViews) -} - -async function getAllDeletedDocs(req, res) { - const { project_id: projectId } = req.params - logger.debug({ projectId }, 'getting all deleted docs') - const docs = await DocManager.getAllDeletedDocs(projectId, { - name: true, - deletedAt: true, - }) - res.json( - docs.map(doc => ({ - _id: doc._id.toString(), - name: doc.name, - deletedAt: doc.deletedAt, - })) ) } -async function getAllRanges(req, res) { +function getAllDeletedDocs(req, res, next) { + const { project_id: projectId } = req.params + logger.debug({ projectId }, 'getting all deleted docs') + DocManager.getAllDeletedDocs( + projectId, + { name: true, deletedAt: true }, + function (error, docs) { + if (error) { + return next(error) + } + res.json( + docs.map(doc => ({ + _id: doc._id.toString(), + name: doc.name, + deletedAt: doc.deletedAt, + })) + ) + } + ) +} + +function getAllRanges(req, res, next) { const { project_id: projectId } = req.params logger.debug({ projectId }, 'getting all ranges') - const docs = await DocManager.getAllNonDeletedDocs(projectId, { - ranges: true, + DocManager.getAllNonDeletedDocs( + projectId, + { ranges: true }, + function (error, docs) { + if (docs == null) { + docs = [] + } + if (error) { + return next(error) + } + res.json(_buildDocsArrayView(projectId, docs)) + } + ) +} + +function projectHasRanges(req, res, next) { + const { project_id: projectId } = req.params + DocManager.projectHasRanges(projectId, (err, projectHasRanges) => { + if (err) { + return next(err) + } + res.json({ projectHasRanges }) }) - res.json(_buildDocsArrayView(projectId, docs)) } -async function getCommentThreadIds(req, res) { - const { project_id: projectId } = req.params - const threadIds = await DocManager.getCommentThreadIds(projectId) - res.json(threadIds) -} - -async function getTrackedChangesUserIds(req, res) { - const { project_id: projectId } = req.params - const userIds = await DocManager.getTrackedChangesUserIds(projectId) - res.json(userIds) -} - -async function projectHasRanges(req, res) { - const { project_id: projectId } = req.params - const projectHasRanges = await DocManager.projectHasRanges(projectId) - res.json({ projectHasRanges }) -} - -async function updateDoc(req, res) { +function updateDoc(req, res, next) { const { doc_id: docId, project_id: projectId } = req.params const lines = req.body?.lines const version = req.body?.version @@ -133,20 +172,25 @@ async function updateDoc(req, res) { } logger.debug({ projectId, docId }, 'got http request to update doc') - const { modified, rev } = await DocManager.updateDoc( + DocManager.updateDoc( projectId, docId, lines, version, - ranges + ranges, + function (error, modified, rev) { + if (error) { + return next(error) + } + res.json({ + modified, + rev, + }) + } ) - res.json({ - modified, - rev, - }) } -async function patchDoc(req, res) { +function patchDoc(req, res, next) { const { doc_id: docId, project_id: projectId } = req.params logger.debug({ projectId, docId }, 'patching doc') @@ -159,8 +203,12 @@ async function patchDoc(req, res) { logger.fatal({ field }, 'joi validation for pathDoc is broken') } }) - await DocManager.patchDoc(projectId, docId, meta) - res.sendStatus(204) + DocManager.patchDoc(projectId, docId, meta, function (error) { + if (error) { + return next(error) + } + res.sendStatus(204) + }) } function _buildDocView(doc) { @@ -173,6 +221,10 @@ function _buildDocView(doc) { return docView } +function _buildRawDocView(doc) { + return (doc?.lines ?? []).join('\n') +} + function _buildDocsArrayView(projectId, docs) { const docViews = [] for (const doc of docs) { @@ -189,69 +241,79 @@ function _buildDocsArrayView(projectId, docs) { return docViews } -async function archiveAllDocs(req, res) { +function archiveAllDocs(req, res, next) { const { project_id: projectId } = req.params logger.debug({ projectId }, 'archiving all docs') - await DocArchive.archiveAllDocs(projectId) - res.sendStatus(204) + DocArchive.archiveAllDocs(projectId, function (error) { + if (error) { + return next(error) + } + res.sendStatus(204) + }) } -async function archiveDoc(req, res) { +function archiveDoc(req, res, next) { const { doc_id: docId, project_id: projectId } = req.params logger.debug({ projectId, docId }, 'archiving a doc') - await DocArchive.archiveDoc(projectId, docId) - res.sendStatus(204) + DocArchive.archiveDoc(projectId, docId, function (error) { + if (error) { + return next(error) + } + res.sendStatus(204) + }) } -async function unArchiveAllDocs(req, res) { +function unArchiveAllDocs(req, res, next) { const { project_id: projectId } = req.params logger.debug({ projectId }, 'unarchiving all docs') - try { - await DocArchive.unArchiveAllDocs(projectId) - } catch (err) { - if (err instanceof Errors.DocRevValueError) { - logger.warn({ err }, 'Failed to unarchive doc') - return res.sendStatus(409) + DocArchive.unArchiveAllDocs(projectId, function (err) { + if (err) { + if (err instanceof Errors.DocRevValueError) { + logger.warn({ err }, 'Failed to unarchive doc') + return res.sendStatus(409) + } + return next(err) } - throw err - } - res.sendStatus(200) + res.sendStatus(200) + }) } -async function destroyProject(req, res) { +function destroyProject(req, res, next) { const { project_id: projectId } = req.params logger.debug({ projectId }, 'destroying all docs') - await DocArchive.destroyProject(projectId) - res.sendStatus(204) + DocArchive.destroyProject(projectId, function (error) { + if (error) { + return next(error) + } + res.sendStatus(204) + }) } -async function healthCheck(req, res) { - try { - await HealthChecker.check() - } catch (err) { - logger.err({ err }, 'error performing health check') - res.sendStatus(500) - return - } - res.sendStatus(200) +function healthCheck(req, res) { + HealthChecker.check(function (err) { + if (err) { + logger.err({ err }, 'error performing health check') + res.sendStatus(500) + } else { + res.sendStatus(200) + } + }) } module.exports = { - getDoc: expressify(getDoc), - peekDoc: expressify(peekDoc), - isDocDeleted: expressify(isDocDeleted), - getRawDoc: expressify(getRawDoc), - getAllDocs: expressify(getAllDocs), - getAllDeletedDocs: expressify(getAllDeletedDocs), - getAllRanges: expressify(getAllRanges), - getTrackedChangesUserIds: expressify(getTrackedChangesUserIds), - getCommentThreadIds: expressify(getCommentThreadIds), - projectHasRanges: expressify(projectHasRanges), - updateDoc: expressify(updateDoc), - patchDoc: expressify(patchDoc), - archiveAllDocs: expressify(archiveAllDocs), - archiveDoc: expressify(archiveDoc), - unArchiveAllDocs: expressify(unArchiveAllDocs), - destroyProject: expressify(destroyProject), - healthCheck: expressify(healthCheck), + getDoc, + peekDoc, + isDocDeleted, + getRawDoc, + getAllDocs, + getAllDeletedDocs, + getAllRanges, + projectHasRanges, + updateDoc, + patchDoc, + archiveAllDocs, + archiveDoc, + unArchiveAllDocs, + destroyProject, + healthCheck, } diff --git a/services/docstore/app/js/MongoManager.js b/services/docstore/app/js/MongoManager.js index ef101f91c0..ad1a2d2b40 100644 --- a/services/docstore/app/js/MongoManager.js +++ b/services/docstore/app/js/MongoManager.js @@ -1,6 +1,7 @@ const { db, ObjectId } = require('./mongodb') const Settings = require('@overleaf/settings') const Errors = require('./Errors') +const { callbackify } = require('node:util') const ARCHIVING_LOCK_DURATION_MS = Settings.archivingLockDurationMs @@ -240,17 +241,34 @@ async function destroyProject(projectId) { } module.exports = { - findDoc, - getProjectsDeletedDocs, - getProjectsDocs, - getArchivedProjectDocs, - getNonArchivedProjectDocIds, - getNonDeletedArchivedProjectDocs, - upsertIntoDocCollection, - restoreArchivedDoc, - patchDoc, - getDocForArchiving, - markDocAsArchived, - checkRevUnchanged, - destroyProject, + findDoc: callbackify(findDoc), + getProjectsDeletedDocs: callbackify(getProjectsDeletedDocs), + getProjectsDocs: callbackify(getProjectsDocs), + getArchivedProjectDocs: callbackify(getArchivedProjectDocs), + getNonArchivedProjectDocIds: callbackify(getNonArchivedProjectDocIds), + getNonDeletedArchivedProjectDocs: callbackify( + getNonDeletedArchivedProjectDocs + ), + upsertIntoDocCollection: callbackify(upsertIntoDocCollection), + restoreArchivedDoc: callbackify(restoreArchivedDoc), + patchDoc: callbackify(patchDoc), + getDocForArchiving: callbackify(getDocForArchiving), + markDocAsArchived: callbackify(markDocAsArchived), + checkRevUnchanged: callbackify(checkRevUnchanged), + destroyProject: callbackify(destroyProject), + promises: { + findDoc, + getProjectsDeletedDocs, + getProjectsDocs, + getArchivedProjectDocs, + getNonArchivedProjectDocIds, + getNonDeletedArchivedProjectDocs, + upsertIntoDocCollection, + restoreArchivedDoc, + patchDoc, + getDocForArchiving, + markDocAsArchived, + checkRevUnchanged, + destroyProject, + }, } diff --git a/services/docstore/app/js/RangeManager.js b/services/docstore/app/js/RangeManager.js index 2fbadf9468..f36f68fe35 100644 --- a/services/docstore/app/js/RangeManager.js +++ b/services/docstore/app/js/RangeManager.js @@ -49,25 +49,15 @@ module.exports = RangeManager = { updateMetadata(change.metadata) } for (const comment of Array.from(ranges.comments || [])) { - // Two bugs resulted in mismatched ids, prefer the thread id from the op: https://github.com/overleaf/internal/issues/23272 - comment.id = RangeManager._safeObjectId(comment.op?.t || comment.id) - if (comment.op) comment.op.t = comment.id - - // resolved property is added to comments when they are obtained from history, but this state doesn't belong in mongo docs collection - // more info: https://github.com/overleaf/internal/issues/24371#issuecomment-2913095174 - delete comment.op?.resolved + comment.id = RangeManager._safeObjectId(comment.id) + if ((comment.op != null ? comment.op.t : undefined) != null) { + comment.op.t = RangeManager._safeObjectId(comment.op.t) + } updateMetadata(comment.metadata) } return ranges }, - fixCommentIds(doc) { - for (const comment of doc?.ranges?.comments || []) { - // Two bugs resulted in mismatched ids, prefer the thread id from the op: https://github.com/overleaf/internal/issues/23272 - if (comment.op?.t) comment.id = comment.op.t - } - }, - _safeObjectId(data) { try { return new ObjectId(data) diff --git a/services/docstore/app/js/StreamToBuffer.js b/services/docstore/app/js/StreamToBuffer.js index 09215a7367..7de146cd11 100644 --- a/services/docstore/app/js/StreamToBuffer.js +++ b/services/docstore/app/js/StreamToBuffer.js @@ -2,9 +2,13 @@ const { LoggerStream, WritableBuffer } = require('@overleaf/stream-utils') const Settings = require('@overleaf/settings') const logger = require('@overleaf/logger/logging-manager') const { pipeline } = require('node:stream/promises') +const { callbackify } = require('node:util') module.exports = { - streamToBuffer, + streamToBuffer: callbackify(streamToBuffer), + promises: { + streamToBuffer, + }, } async function streamToBuffer(projectId, docId, stream) { diff --git a/services/docstore/docker-compose.ci.yml b/services/docstore/docker-compose.ci.yml index 40decc4aea..ff222f6514 100644 --- a/services/docstore/docker-compose.ci.yml +++ b/services/docstore/docker-compose.ci.yml @@ -27,15 +27,12 @@ services: MOCHA_GREP: ${MOCHA_GREP} NODE_ENV: test NODE_OPTIONS: "--unhandled-rejections=strict" - volumes: - - ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it depends_on: mongo: condition: service_started gcs: condition: service_healthy user: node - entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 -- command: npm run test:acceptance diff --git a/services/docstore/docker-compose.yml b/services/docstore/docker-compose.yml index a58b862b9a..4a4fa2f10c 100644 --- a/services/docstore/docker-compose.yml +++ b/services/docstore/docker-compose.yml @@ -26,7 +26,6 @@ services: - .:/overleaf/services/docstore - ../../node_modules:/overleaf/node_modules - ../../libraries:/overleaf/libraries - - ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it working_dir: /overleaf/services/docstore environment: ELASTIC_SEARCH_DSN: es:9200 @@ -45,7 +44,6 @@ services: condition: service_started gcs: condition: service_healthy - entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 -- command: npm run --silent test:acceptance mongo: diff --git a/services/docstore/package.json b/services/docstore/package.json index bf5857fd49..e505f731d3 100644 --- a/services/docstore/package.json +++ b/services/docstore/package.json @@ -17,7 +17,6 @@ "types:check": "tsc --noEmit" }, "dependencies": { - "@overleaf/fetch-utils": "*", "@overleaf/logger": "*", "@overleaf/metrics": "*", "@overleaf/o-error": "*", diff --git a/services/docstore/test/acceptance/js/ArchiveDocsTests.js b/services/docstore/test/acceptance/js/ArchiveDocsTests.js index 7e254c7e84..d9228103b6 100644 --- a/services/docstore/test/acceptance/js/ArchiveDocsTests.js +++ b/services/docstore/test/acceptance/js/ArchiveDocsTests.js @@ -1001,15 +1001,6 @@ describe('Archiving', function () { }, version: 2, } - this.fixedRanges = { - ...this.doc.ranges, - comments: [ - { - ...this.doc.ranges.comments[0], - id: this.doc.ranges.comments[0].op.t, - }, - ], - } return DocstoreClient.createDoc( this.project_id, this.doc._id, @@ -1057,7 +1048,7 @@ describe('Archiving', function () { throw error } s3Doc.lines.should.deep.equal(this.doc.lines) - const ranges = JSON.parse(JSON.stringify(this.fixedRanges)) // ObjectId -> String + const ranges = JSON.parse(JSON.stringify(this.doc.ranges)) // ObjectId -> String s3Doc.ranges.should.deep.equal(ranges) return done() } @@ -1084,7 +1075,7 @@ describe('Archiving', function () { throw error } doc.lines.should.deep.equal(this.doc.lines) - doc.ranges.should.deep.equal(this.fixedRanges) + doc.ranges.should.deep.equal(this.doc.ranges) expect(doc.inS3).not.to.exist return done() }) diff --git a/services/docstore/test/acceptance/js/GettingAllDocsTests.js b/services/docstore/test/acceptance/js/GettingAllDocsTests.js index 57851b2c3b..8fe5e7d91b 100644 --- a/services/docstore/test/acceptance/js/GettingAllDocsTests.js +++ b/services/docstore/test/acceptance/js/GettingAllDocsTests.js @@ -20,73 +20,30 @@ const DocstoreClient = require('./helpers/DocstoreClient') describe('Getting all docs', function () { beforeEach(function (done) { this.project_id = new ObjectId() - this.threadId1 = new ObjectId().toString() - this.threadId2 = new ObjectId().toString() this.docs = [ { _id: new ObjectId(), lines: ['one', 'two', 'three'], - ranges: { - comments: [ - { id: new ObjectId().toString(), op: { t: this.threadId1 } }, - ], - changes: [ - { - id: new ObjectId().toString(), - metadata: { user_id: 'user-id-1' }, - }, - ], - }, + ranges: { mock: 'one' }, rev: 2, }, { _id: new ObjectId(), lines: ['aaa', 'bbb', 'ccc'], - ranges: { - changes: [ - { - id: new ObjectId().toString(), - metadata: { user_id: 'user-id-2' }, - }, - ], - }, + ranges: { mock: 'two' }, rev: 4, }, { _id: new ObjectId(), lines: ['111', '222', '333'], - ranges: { - comments: [ - { id: new ObjectId().toString(), op: { t: this.threadId2 } }, - ], - changes: [ - { - id: new ObjectId().toString(), - metadata: { user_id: 'anonymous-user' }, - }, - ], - }, + ranges: { mock: 'three' }, rev: 6, }, ] - this.fixedRanges = this.docs.map(doc => { - if (!doc.ranges?.comments?.length) return doc.ranges - return { - ...doc.ranges, - comments: [ - { ...doc.ranges.comments[0], id: doc.ranges.comments[0].op.t }, - ], - } - }) this.deleted_doc = { _id: new ObjectId(), lines: ['deleted'], - ranges: { - comments: [{ id: new ObjectId().toString(), op: { t: 'thread-id-3' } }], - changes: [ - { id: new ObjectId().toString(), metadata: { user_id: 'user-id-3' } }, - ], - }, + ranges: { mock: 'four' }, rev: 8, } const version = 42 @@ -139,7 +96,7 @@ describe('Getting all docs', function () { }) }) - it('getAllRanges should return all the (non-deleted) doc ranges', function (done) { + return it('getAllRanges should return all the (non-deleted) doc ranges', function (done) { return DocstoreClient.getAllRanges(this.project_id, (error, res, docs) => { if (error != null) { throw error @@ -147,38 +104,9 @@ describe('Getting all docs', function () { docs.length.should.equal(this.docs.length) for (let i = 0; i < docs.length; i++) { const doc = docs[i] - doc.ranges.should.deep.equal(this.fixedRanges[i]) + doc.ranges.should.deep.equal(this.docs[i].ranges) } return done() }) }) - - it('getTrackedChangesUserIds should return all the user ids from (non-deleted) ranges', function (done) { - DocstoreClient.getTrackedChangesUserIds( - this.project_id, - (error, res, userIds) => { - if (error != null) { - throw error - } - userIds.should.deep.equal(['user-id-1', 'user-id-2']) - done() - } - ) - }) - - it('getCommentThreadIds should return all the thread ids from (non-deleted) ranges', function (done) { - DocstoreClient.getCommentThreadIds( - this.project_id, - (error, res, threadIds) => { - if (error != null) { - throw error - } - threadIds.should.deep.equal({ - [this.docs[0]._id.toString()]: [this.threadId1], - [this.docs[2]._id.toString()]: [this.threadId2], - }) - done() - } - ) - }) }) diff --git a/services/docstore/test/acceptance/js/GettingDocsTests.js b/services/docstore/test/acceptance/js/GettingDocsTests.js index 1cfc53c5c6..121b3c1e24 100644 --- a/services/docstore/test/acceptance/js/GettingDocsTests.js +++ b/services/docstore/test/acceptance/js/GettingDocsTests.js @@ -28,26 +28,10 @@ describe('Getting a doc', function () { op: { i: 'foo', p: 3 }, meta: { user_id: new ObjectId().toString(), - ts: new Date().toJSON(), + ts: new Date().toString(), }, }, ], - comments: [ - { - id: new ObjectId().toString(), - op: { c: 'comment', p: 1, t: new ObjectId().toString() }, - metadata: { - user_id: new ObjectId().toString(), - ts: new Date().toJSON(), - }, - }, - ], - } - this.fixedRanges = { - ...this.ranges, - comments: [ - { ...this.ranges.comments[0], id: this.ranges.comments[0].op.t }, - ], } return DocstoreApp.ensureRunning(() => { return DocstoreClient.createDoc( @@ -76,7 +60,7 @@ describe('Getting a doc', function () { if (error) return done(error) doc.lines.should.deep.equal(this.lines) doc.version.should.equal(this.version) - doc.ranges.should.deep.equal(this.fixedRanges) + doc.ranges.should.deep.equal(this.ranges) return done() } ) @@ -130,7 +114,7 @@ describe('Getting a doc', function () { if (error) return done(error) doc.lines.should.deep.equal(this.lines) doc.version.should.equal(this.version) - doc.ranges.should.deep.equal(this.fixedRanges) + doc.ranges.should.deep.equal(this.ranges) doc.deleted.should.equal(true) return done() } diff --git a/services/docstore/test/acceptance/js/HealthCheckerTest.js b/services/docstore/test/acceptance/js/HealthCheckerTest.js deleted file mode 100644 index b25a45312b..0000000000 --- a/services/docstore/test/acceptance/js/HealthCheckerTest.js +++ /dev/null @@ -1,28 +0,0 @@ -const { db } = require('../../../app/js/mongodb') -const DocstoreApp = require('./helpers/DocstoreApp') -const DocstoreClient = require('./helpers/DocstoreClient') -const { expect } = require('chai') - -describe('HealthChecker', function () { - beforeEach('start', function (done) { - DocstoreApp.ensureRunning(done) - }) - beforeEach('clear docs collection', async function () { - await db.docs.deleteMany({}) - }) - let res - beforeEach('run health check', function (done) { - DocstoreClient.healthCheck((err, _res) => { - res = _res - done(err) - }) - }) - - it('should return 200', function () { - res.statusCode.should.equal(200) - }) - - it('should not leave any cruft behind', async function () { - expect(await db.docs.find({}).toArray()).to.deep.equal([]) - }) -}) diff --git a/services/docstore/test/acceptance/js/helpers/DocstoreClient.js b/services/docstore/test/acceptance/js/helpers/DocstoreClient.js index cb8bce2579..790ec8f237 100644 --- a/services/docstore/test/acceptance/js/helpers/DocstoreClient.js +++ b/services/docstore/test/acceptance/js/helpers/DocstoreClient.js @@ -100,26 +100,6 @@ module.exports = DocstoreClient = { ) }, - getCommentThreadIds(projectId, callback) { - request.get( - { - url: `http://127.0.0.1:${settings.internal.docstore.port}/project/${projectId}/comment-thread-ids`, - json: true, - }, - callback - ) - }, - - getTrackedChangesUserIds(projectId, callback) { - request.get( - { - url: `http://127.0.0.1:${settings.internal.docstore.port}/project/${projectId}/tracked-changes-user-ids`, - json: true, - }, - callback - ) - }, - updateDoc(projectId, docId, lines, version, ranges, callback) { return request.post( { @@ -201,13 +181,6 @@ module.exports = DocstoreClient = { ) }, - healthCheck(callback) { - request.get( - `http://127.0.0.1:${settings.internal.docstore.port}/health_check`, - callback - ) - }, - getS3Doc(projectId, docId, callback) { getStringFromPersistor( Persistor, diff --git a/services/docstore/test/unit/js/DocArchiveManagerTests.js b/services/docstore/test/unit/js/DocArchiveManagerTests.js index 2ec1cb2016..a57f9806c8 100644 --- a/services/docstore/test/unit/js/DocArchiveManagerTests.js +++ b/services/docstore/test/unit/js/DocArchiveManagerTests.js @@ -4,7 +4,7 @@ const modulePath = '../../../app/js/DocArchiveManager.js' const SandboxedModule = require('sandboxed-module') const { ObjectId } = require('mongodb-legacy') const Errors = require('../../../app/js/Errors') -const StreamToBuffer = require('../../../app/js/StreamToBuffer') +const StreamToBuffer = require('../../../app/js/StreamToBuffer').promises describe('DocArchiveManager', function () { let DocArchiveManager, @@ -31,7 +31,6 @@ describe('DocArchiveManager', function () { RangeManager = { jsonRangesToMongo: sinon.stub().returns({ mongo: 'ranges' }), - fixCommentIds: sinon.stub(), } Settings = { docstore: { @@ -143,33 +142,37 @@ describe('DocArchiveManager', function () { } MongoManager = { - markDocAsArchived: sinon.stub().resolves(), - restoreArchivedDoc: sinon.stub().resolves(), - upsertIntoDocCollection: sinon.stub().resolves(), - getProjectsDocs: sinon.stub().resolves(mongoDocs), - getNonDeletedArchivedProjectDocs: getArchivedProjectDocs, - getNonArchivedProjectDocIds, - getArchivedProjectDocs, - findDoc: sinon.stub().callsFake(fakeGetDoc), - getDocForArchiving: sinon.stub().callsFake(fakeGetDoc), - destroyProject: sinon.stub().resolves(), + promises: { + markDocAsArchived: sinon.stub().resolves(), + restoreArchivedDoc: sinon.stub().resolves(), + upsertIntoDocCollection: sinon.stub().resolves(), + getProjectsDocs: sinon.stub().resolves(mongoDocs), + getNonDeletedArchivedProjectDocs: getArchivedProjectDocs, + getNonArchivedProjectDocIds, + getArchivedProjectDocs, + findDoc: sinon.stub().callsFake(fakeGetDoc), + getDocForArchiving: sinon.stub().callsFake(fakeGetDoc), + destroyProject: sinon.stub().resolves(), + }, } // Wrap streamToBuffer so that we can pass in something that it expects (in // this case, a Promise) rather than a stubbed stream object streamToBuffer = { - streamToBuffer: async () => { - const inputStream = new Promise(resolve => { - stream.on('data', data => resolve(data)) - }) + promises: { + streamToBuffer: async () => { + const inputStream = new Promise(resolve => { + stream.on('data', data => resolve(data)) + }) - const value = await StreamToBuffer.streamToBuffer( - 'testProjectId', - 'testDocId', - inputStream - ) + const value = await StreamToBuffer.streamToBuffer( + 'testProjectId', + 'testDocId', + inputStream + ) - return value + return value + }, }, } @@ -189,13 +192,9 @@ describe('DocArchiveManager', function () { describe('archiveDoc', function () { it('should resolve when passed a valid document', async function () { - await expect(DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id)).to - .eventually.be.fulfilled - }) - - it('should fix comment ids', async function () { - await DocArchiveManager.archiveDoc(projectId, mongoDocs[1]._id) - expect(RangeManager.fixCommentIds).to.have.been.called + await expect( + DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id) + ).to.eventually.be.fulfilled }) it('should throw an error if the doc has no lines', async function () { @@ -203,26 +202,26 @@ describe('DocArchiveManager', function () { doc.lines = null await expect( - DocArchiveManager.archiveDoc(projectId, doc._id) + DocArchiveManager.promises.archiveDoc(projectId, doc._id) ).to.eventually.be.rejectedWith('doc has no lines') }) it('should add the schema version', async function () { - await DocArchiveManager.archiveDoc(projectId, mongoDocs[1]._id) + await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[1]._id) expect(StreamUtils.ReadableString).to.have.been.calledWith( sinon.match(/"schema_v":1/) ) }) it('should calculate the hex md5 sum of the content', async function () { - await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id) + await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id) expect(Crypto.createHash).to.have.been.calledWith('md5') expect(HashUpdate).to.have.been.calledWith(archivedDocJson) expect(HashDigest).to.have.been.calledWith('hex') }) it('should pass the md5 hash to the object persistor for verification', async function () { - await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id) + await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id) expect(PersistorManager.sendStream).to.have.been.calledWith( sinon.match.any, @@ -233,7 +232,7 @@ describe('DocArchiveManager', function () { }) it('should pass the correct bucket and key to the persistor', async function () { - await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id) + await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id) expect(PersistorManager.sendStream).to.have.been.calledWith( Settings.docstore.bucket, @@ -242,7 +241,7 @@ describe('DocArchiveManager', function () { }) it('should create a stream from the encoded json and send it', async function () { - await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id) + await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id) expect(StreamUtils.ReadableString).to.have.been.calledWith( archivedDocJson ) @@ -254,8 +253,8 @@ describe('DocArchiveManager', function () { }) it('should mark the doc as archived', async function () { - await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id) - expect(MongoManager.markDocAsArchived).to.have.been.calledWith( + await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id) + expect(MongoManager.promises.markDocAsArchived).to.have.been.calledWith( projectId, mongoDocs[0]._id, mongoDocs[0].rev @@ -268,8 +267,8 @@ describe('DocArchiveManager', function () { }) it('should bail out early', async function () { - await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id) - expect(MongoManager.getDocForArchiving).to.not.have.been.called + await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id) + expect(MongoManager.promises.getDocForArchiving).to.not.have.been.called }) }) @@ -286,7 +285,7 @@ describe('DocArchiveManager', function () { it('should return an error', async function () { await expect( - DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id) + DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id) ).to.eventually.be.rejectedWith('null bytes detected') }) }) @@ -297,19 +296,21 @@ describe('DocArchiveManager', function () { describe('when the doc is in S3', function () { beforeEach(function () { - MongoManager.findDoc = sinon.stub().resolves({ inS3: true, rev }) + MongoManager.promises.findDoc = sinon + .stub() + .resolves({ inS3: true, rev }) docId = mongoDocs[0]._id lines = ['doc', 'lines'] rev = 123 }) it('should resolve when passed a valid document', async function () { - await expect(DocArchiveManager.unarchiveDoc(projectId, docId)).to - .eventually.be.fulfilled + await expect(DocArchiveManager.promises.unarchiveDoc(projectId, docId)) + .to.eventually.be.fulfilled }) it('should test md5 validity with the raw buffer', async function () { - await DocArchiveManager.unarchiveDoc(projectId, docId) + await DocArchiveManager.promises.unarchiveDoc(projectId, docId) expect(HashUpdate).to.have.been.calledWith( sinon.match.instanceOf(Buffer) ) @@ -318,17 +319,15 @@ describe('DocArchiveManager', function () { it('should throw an error if the md5 does not match', async function () { PersistorManager.getObjectMd5Hash.resolves('badf00d') await expect( - DocArchiveManager.unarchiveDoc(projectId, docId) + DocArchiveManager.promises.unarchiveDoc(projectId, docId) ).to.eventually.be.rejected.and.be.instanceof(Errors.Md5MismatchError) }) it('should restore the doc in Mongo', async function () { - await DocArchiveManager.unarchiveDoc(projectId, docId) - expect(MongoManager.restoreArchivedDoc).to.have.been.calledWith( - projectId, - docId, - archivedDoc - ) + await DocArchiveManager.promises.unarchiveDoc(projectId, docId) + expect( + MongoManager.promises.restoreArchivedDoc + ).to.have.been.calledWith(projectId, docId, archivedDoc) }) describe('when archiving is not configured', function () { @@ -338,15 +337,15 @@ describe('DocArchiveManager', function () { it('should error out on archived doc', async function () { await expect( - DocArchiveManager.unarchiveDoc(projectId, docId) + DocArchiveManager.promises.unarchiveDoc(projectId, docId) ).to.eventually.be.rejected.and.match( /found archived doc, but archiving backend is not configured/ ) }) it('should return early on non-archived doc', async function () { - MongoManager.findDoc = sinon.stub().resolves({ rev }) - await DocArchiveManager.unarchiveDoc(projectId, docId) + MongoManager.promises.findDoc = sinon.stub().resolves({ rev }) + await DocArchiveManager.promises.unarchiveDoc(projectId, docId) expect(PersistorManager.getObjectMd5Hash).to.not.have.been.called }) }) @@ -364,12 +363,10 @@ describe('DocArchiveManager', function () { }) it('should return the docs lines', async function () { - await DocArchiveManager.unarchiveDoc(projectId, docId) - expect(MongoManager.restoreArchivedDoc).to.have.been.calledWith( - projectId, - docId, - { lines, rev } - ) + await DocArchiveManager.promises.unarchiveDoc(projectId, docId) + expect( + MongoManager.promises.restoreArchivedDoc + ).to.have.been.calledWith(projectId, docId, { lines, rev }) }) }) @@ -388,16 +385,14 @@ describe('DocArchiveManager', function () { }) it('should return the doc lines and ranges', async function () { - await DocArchiveManager.unarchiveDoc(projectId, docId) - expect(MongoManager.restoreArchivedDoc).to.have.been.calledWith( - projectId, - docId, - { - lines, - ranges: { mongo: 'ranges' }, - rev: 456, - } - ) + await DocArchiveManager.promises.unarchiveDoc(projectId, docId) + expect( + MongoManager.promises.restoreArchivedDoc + ).to.have.been.calledWith(projectId, docId, { + lines, + ranges: { mongo: 'ranges' }, + rev: 456, + }) }) }) @@ -411,12 +406,10 @@ describe('DocArchiveManager', function () { }) it('should return only the doc lines', async function () { - await DocArchiveManager.unarchiveDoc(projectId, docId) - expect(MongoManager.restoreArchivedDoc).to.have.been.calledWith( - projectId, - docId, - { lines, rev: 456 } - ) + await DocArchiveManager.promises.unarchiveDoc(projectId, docId) + expect( + MongoManager.promises.restoreArchivedDoc + ).to.have.been.calledWith(projectId, docId, { lines, rev: 456 }) }) }) @@ -430,12 +423,10 @@ describe('DocArchiveManager', function () { }) it('should use the rev obtained from Mongo', async function () { - await DocArchiveManager.unarchiveDoc(projectId, docId) - expect(MongoManager.restoreArchivedDoc).to.have.been.calledWith( - projectId, - docId, - { lines, rev } - ) + await DocArchiveManager.promises.unarchiveDoc(projectId, docId) + expect( + MongoManager.promises.restoreArchivedDoc + ).to.have.been.calledWith(projectId, docId, { lines, rev }) }) }) @@ -450,7 +441,7 @@ describe('DocArchiveManager', function () { it('should throw an error', async function () { await expect( - DocArchiveManager.unarchiveDoc(projectId, docId) + DocArchiveManager.promises.unarchiveDoc(projectId, docId) ).to.eventually.be.rejectedWith( "I don't understand the doc format in s3" ) @@ -460,8 +451,8 @@ describe('DocArchiveManager', function () { }) it('should not do anything if the file is already unarchived', async function () { - MongoManager.findDoc.resolves({ inS3: false }) - await DocArchiveManager.unarchiveDoc(projectId, docId) + MongoManager.promises.findDoc.resolves({ inS3: false }) + await DocArchiveManager.promises.unarchiveDoc(projectId, docId) expect(PersistorManager.getObjectStream).not.to.have.been.called }) @@ -470,7 +461,7 @@ describe('DocArchiveManager', function () { .stub() .rejects(new Errors.NotFoundError()) await expect( - DocArchiveManager.unarchiveDoc(projectId, docId) + DocArchiveManager.promises.unarchiveDoc(projectId, docId) ).to.eventually.be.rejected.and.be.instanceof(Errors.NotFoundError) }) }) @@ -478,11 +469,13 @@ describe('DocArchiveManager', function () { describe('destroyProject', function () { describe('when archiving is enabled', function () { beforeEach(async function () { - await DocArchiveManager.destroyProject(projectId) + await DocArchiveManager.promises.destroyProject(projectId) }) it('should delete the project in Mongo', function () { - expect(MongoManager.destroyProject).to.have.been.calledWith(projectId) + expect(MongoManager.promises.destroyProject).to.have.been.calledWith( + projectId + ) }) it('should delete the project in the persistor', function () { @@ -496,11 +489,13 @@ describe('DocArchiveManager', function () { describe('when archiving is disabled', function () { beforeEach(async function () { Settings.docstore.backend = '' - await DocArchiveManager.destroyProject(projectId) + await DocArchiveManager.promises.destroyProject(projectId) }) it('should delete the project in Mongo', function () { - expect(MongoManager.destroyProject).to.have.been.calledWith(projectId) + expect(MongoManager.promises.destroyProject).to.have.been.calledWith( + projectId + ) }) it('should not delete the project in the persistor', function () { @@ -511,35 +506,33 @@ describe('DocArchiveManager', function () { describe('archiveAllDocs', function () { it('should resolve with valid arguments', async function () { - await expect(DocArchiveManager.archiveAllDocs(projectId)).to.eventually.be - .fulfilled + await expect(DocArchiveManager.promises.archiveAllDocs(projectId)).to + .eventually.be.fulfilled }) it('should archive all project docs which are not in s3', async function () { - await DocArchiveManager.archiveAllDocs(projectId) + await DocArchiveManager.promises.archiveAllDocs(projectId) // not inS3 - expect(MongoManager.markDocAsArchived).to.have.been.calledWith( + expect(MongoManager.promises.markDocAsArchived).to.have.been.calledWith( projectId, mongoDocs[0]._id ) - expect(MongoManager.markDocAsArchived).to.have.been.calledWith( + expect(MongoManager.promises.markDocAsArchived).to.have.been.calledWith( projectId, mongoDocs[1]._id ) - expect(MongoManager.markDocAsArchived).to.have.been.calledWith( + expect(MongoManager.promises.markDocAsArchived).to.have.been.calledWith( projectId, mongoDocs[4]._id ) // inS3 - expect(MongoManager.markDocAsArchived).not.to.have.been.calledWith( - projectId, - mongoDocs[2]._id - ) - expect(MongoManager.markDocAsArchived).not.to.have.been.calledWith( - projectId, - mongoDocs[3]._id - ) + expect( + MongoManager.promises.markDocAsArchived + ).not.to.have.been.calledWith(projectId, mongoDocs[2]._id) + expect( + MongoManager.promises.markDocAsArchived + ).not.to.have.been.calledWith(projectId, mongoDocs[3]._id) }) describe('when archiving is not configured', function () { @@ -548,20 +541,21 @@ describe('DocArchiveManager', function () { }) it('should bail out early', async function () { - await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id) - expect(MongoManager.getNonArchivedProjectDocIds).to.not.have.been.called + await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id) + expect(MongoManager.promises.getNonArchivedProjectDocIds).to.not.have + .been.called }) }) }) describe('unArchiveAllDocs', function () { it('should resolve with valid arguments', async function () { - await expect(DocArchiveManager.unArchiveAllDocs(projectId)).to.eventually - .be.fulfilled + await expect(DocArchiveManager.promises.unArchiveAllDocs(projectId)).to + .eventually.be.fulfilled }) it('should unarchive all inS3 docs', async function () { - await DocArchiveManager.unArchiveAllDocs(projectId) + await DocArchiveManager.promises.unArchiveAllDocs(projectId) for (const doc of archivedDocs) { expect(PersistorManager.getObjectStream).to.have.been.calledWith( @@ -577,9 +571,9 @@ describe('DocArchiveManager', function () { }) it('should bail out early', async function () { - await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id) - expect(MongoManager.getNonDeletedArchivedProjectDocs).to.not.have.been - .called + await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id) + expect(MongoManager.promises.getNonDeletedArchivedProjectDocs).to.not + .have.been.called }) }) }) diff --git a/services/docstore/test/unit/js/DocManagerTests.js b/services/docstore/test/unit/js/DocManagerTests.js index 67a2f26547..8405520e6e 100644 --- a/services/docstore/test/unit/js/DocManagerTests.js +++ b/services/docstore/test/unit/js/DocManagerTests.js @@ -17,22 +17,25 @@ describe('DocManager', function () { this.version = 42 this.MongoManager = { - findDoc: sinon.stub(), - getProjectsDocs: sinon.stub(), - patchDoc: sinon.stub().resolves(), - upsertIntoDocCollection: sinon.stub().resolves(), + promises: { + findDoc: sinon.stub(), + getProjectsDocs: sinon.stub(), + patchDoc: sinon.stub().resolves(), + upsertIntoDocCollection: sinon.stub().resolves(), + }, } this.DocArchiveManager = { - unarchiveDoc: sinon.stub(), - unArchiveAllDocs: sinon.stub(), - archiveDoc: sinon.stub().resolves(), + promises: { + unarchiveDoc: sinon.stub(), + unArchiveAllDocs: sinon.stub(), + archiveDoc: sinon.stub().resolves(), + }, } this.RangeManager = { jsonRangesToMongo(r) { return r }, shouldUpdateRanges: sinon.stub().returns(false), - fixCommentIds: sinon.stub(), } this.settings = { docstore: {} } @@ -49,7 +52,7 @@ describe('DocManager', function () { describe('getFullDoc', function () { beforeEach(function () { - this.DocManager._getDoc = sinon.stub() + this.DocManager.promises._getDoc = sinon.stub() this.doc = { _id: this.doc_id, lines: ['2134'], @@ -57,10 +60,13 @@ describe('DocManager', function () { }) it('should call get doc with a quick filter', async function () { - this.DocManager._getDoc.resolves(this.doc) - const doc = await this.DocManager.getFullDoc(this.project_id, this.doc_id) + this.DocManager.promises._getDoc.resolves(this.doc) + const doc = await this.DocManager.promises.getFullDoc( + this.project_id, + this.doc_id + ) doc.should.equal(this.doc) - this.DocManager._getDoc + this.DocManager.promises._getDoc .calledWith(this.project_id, this.doc_id, { lines: true, rev: true, @@ -73,27 +79,27 @@ describe('DocManager', function () { }) it('should return error when get doc errors', async function () { - this.DocManager._getDoc.rejects(this.stubbedError) + this.DocManager.promises._getDoc.rejects(this.stubbedError) await expect( - this.DocManager.getFullDoc(this.project_id, this.doc_id) + this.DocManager.promises.getFullDoc(this.project_id, this.doc_id) ).to.be.rejectedWith(this.stubbedError) }) }) describe('getRawDoc', function () { beforeEach(function () { - this.DocManager._getDoc = sinon.stub() + this.DocManager.promises._getDoc = sinon.stub() this.doc = { lines: ['2134'] } }) it('should call get doc with a quick filter', async function () { - this.DocManager._getDoc.resolves(this.doc) - const content = await this.DocManager.getDocLines( + this.DocManager.promises._getDoc.resolves(this.doc) + const doc = await this.DocManager.promises.getDocLines( this.project_id, this.doc_id ) - content.should.equal(this.doc.lines.join('\n')) - this.DocManager._getDoc + doc.should.equal(this.doc) + this.DocManager.promises._getDoc .calledWith(this.project_id, this.doc_id, { lines: true, inS3: true, @@ -102,46 +108,11 @@ describe('DocManager', function () { }) it('should return error when get doc errors', async function () { - this.DocManager._getDoc.rejects(this.stubbedError) + this.DocManager.promises._getDoc.rejects(this.stubbedError) await expect( - this.DocManager.getDocLines(this.project_id, this.doc_id) + this.DocManager.promises.getDocLines(this.project_id, this.doc_id) ).to.be.rejectedWith(this.stubbedError) }) - - it('should return error when get doc does not exist', async function () { - this.DocManager._getDoc.resolves(null) - await expect( - this.DocManager.getDocLines(this.project_id, this.doc_id) - ).to.be.rejectedWith(Errors.NotFoundError) - }) - - it('should return error when get doc has no lines', async function () { - this.DocManager._getDoc.resolves({}) - await expect( - this.DocManager.getDocLines(this.project_id, this.doc_id) - ).to.be.rejectedWith(Errors.DocWithoutLinesError) - }) - }) - - describe('_getDoc', function () { - it('should return error when get doc does not exist', async function () { - this.MongoManager.findDoc.resolves(null) - await expect( - this.DocManager._getDoc(this.project_id, this.doc_id, { inS3: true }) - ).to.be.rejectedWith(Errors.NotFoundError) - }) - - it('should fix comment ids', async function () { - this.MongoManager.findDoc.resolves({ - _id: this.doc_id, - ranges: {}, - }) - await this.DocManager._getDoc(this.project_id, this.doc_id, { - inS3: true, - ranges: true, - }) - expect(this.RangeManager.fixCommentIds).to.have.been.called - }) }) describe('getDoc', function () { @@ -157,25 +128,26 @@ describe('DocManager', function () { describe('when using a filter', function () { beforeEach(function () { - this.MongoManager.findDoc.resolves(this.doc) + this.MongoManager.promises.findDoc.resolves(this.doc) }) it('should error if inS3 is not set to true', async function () { await expect( - this.DocManager._getDoc(this.project_id, this.doc_id, { + this.DocManager.promises._getDoc(this.project_id, this.doc_id, { inS3: false, }) ).to.be.rejected }) it('should always get inS3 even when no filter is passed', async function () { - await expect(this.DocManager._getDoc(this.project_id, this.doc_id)).to - .be.rejected - this.MongoManager.findDoc.called.should.equal(false) + await expect( + this.DocManager.promises._getDoc(this.project_id, this.doc_id) + ).to.be.rejected + this.MongoManager.promises.findDoc.called.should.equal(false) }) it('should not error if inS3 is set to true', async function () { - await this.DocManager._getDoc(this.project_id, this.doc_id, { + await this.DocManager.promises._getDoc(this.project_id, this.doc_id, { inS3: true, }) }) @@ -183,8 +155,8 @@ describe('DocManager', function () { describe('when the doc is in the doc collection', function () { beforeEach(async function () { - this.MongoManager.findDoc.resolves(this.doc) - this.result = await this.DocManager._getDoc( + this.MongoManager.promises.findDoc.resolves(this.doc) + this.result = await this.DocManager.promises._getDoc( this.project_id, this.doc_id, { version: true, inS3: true } @@ -192,7 +164,7 @@ describe('DocManager', function () { }) it('should get the doc from the doc collection', function () { - this.MongoManager.findDoc + this.MongoManager.promises.findDoc .calledWith(this.project_id, this.doc_id) .should.equal(true) }) @@ -205,9 +177,9 @@ describe('DocManager', function () { describe('when MongoManager.findDoc errors', function () { it('should return the error', async function () { - this.MongoManager.findDoc.rejects(this.stubbedError) + this.MongoManager.promises.findDoc.rejects(this.stubbedError) await expect( - this.DocManager._getDoc(this.project_id, this.doc_id, { + this.DocManager.promises._getDoc(this.project_id, this.doc_id, { version: true, inS3: true, }) @@ -230,15 +202,15 @@ describe('DocManager', function () { version: 2, inS3: false, } - this.MongoManager.findDoc.resolves(this.doc) - this.DocArchiveManager.unarchiveDoc.callsFake( + this.MongoManager.promises.findDoc.resolves(this.doc) + this.DocArchiveManager.promises.unarchiveDoc.callsFake( async (projectId, docId) => { - this.MongoManager.findDoc.resolves({ + this.MongoManager.promises.findDoc.resolves({ ...this.unarchivedDoc, }) } ) - this.result = await this.DocManager._getDoc( + this.result = await this.DocManager.promises._getDoc( this.project_id, this.doc_id, { @@ -249,13 +221,13 @@ describe('DocManager', function () { }) it('should call the DocArchive to unarchive the doc', function () { - this.DocArchiveManager.unarchiveDoc + this.DocArchiveManager.promises.unarchiveDoc .calledWith(this.project_id, this.doc_id) .should.equal(true) }) it('should look up the doc twice', function () { - this.MongoManager.findDoc.calledTwice.should.equal(true) + this.MongoManager.promises.findDoc.calledTwice.should.equal(true) }) it('should return the doc', function () { @@ -267,9 +239,9 @@ describe('DocManager', function () { describe('when the doc does not exist in the docs collection', function () { it('should return a NotFoundError', async function () { - this.MongoManager.findDoc.resolves(null) + this.MongoManager.promises.findDoc.resolves(null) await expect( - this.DocManager._getDoc(this.project_id, this.doc_id, { + this.DocManager.promises._getDoc(this.project_id, this.doc_id, { version: true, inS3: true, }) @@ -290,27 +262,23 @@ describe('DocManager', function () { lines: ['mock-lines'], }, ] - this.MongoManager.getProjectsDocs.resolves(this.docs) - this.DocArchiveManager.unArchiveAllDocs.resolves(this.docs) - this.filter = { lines: true, ranges: true } - this.result = await this.DocManager.getAllNonDeletedDocs( + this.MongoManager.promises.getProjectsDocs.resolves(this.docs) + this.DocArchiveManager.promises.unArchiveAllDocs.resolves(this.docs) + this.filter = { lines: true } + this.result = await this.DocManager.promises.getAllNonDeletedDocs( this.project_id, this.filter ) }) it('should get the project from the database', function () { - this.MongoManager.getProjectsDocs.should.have.been.calledWith( + this.MongoManager.promises.getProjectsDocs.should.have.been.calledWith( this.project_id, { include_deleted: false }, this.filter ) }) - it('should fix comment ids', async function () { - expect(this.RangeManager.fixCommentIds).to.have.been.called - }) - it('should return the docs', function () { expect(this.result).to.deep.equal(this.docs) }) @@ -318,10 +286,13 @@ describe('DocManager', function () { describe('when there are no docs for the project', function () { it('should return a NotFoundError', async function () { - this.MongoManager.getProjectsDocs.resolves(null) - this.DocArchiveManager.unArchiveAllDocs.resolves(null) + this.MongoManager.promises.getProjectsDocs.resolves(null) + this.DocArchiveManager.promises.unArchiveAllDocs.resolves(null) await expect( - this.DocManager.getAllNonDeletedDocs(this.project_id, this.filter) + this.DocManager.promises.getAllNonDeletedDocs( + this.project_id, + this.filter + ) ).to.be.rejectedWith(`No docs for project ${this.project_id}`) }) }) @@ -332,7 +303,7 @@ describe('DocManager', function () { beforeEach(function () { this.lines = ['mock', 'doc', 'lines'] this.rev = 77 - this.MongoManager.findDoc.resolves({ + this.MongoManager.promises.findDoc.resolves({ _id: new ObjectId(this.doc_id), }) this.meta = {} @@ -340,7 +311,7 @@ describe('DocManager', function () { describe('standard path', function () { beforeEach(async function () { - await this.DocManager.patchDoc( + await this.DocManager.promises.patchDoc( this.project_id, this.doc_id, this.meta @@ -348,14 +319,14 @@ describe('DocManager', function () { }) it('should get the doc', function () { - expect(this.MongoManager.findDoc).to.have.been.calledWith( + expect(this.MongoManager.promises.findDoc).to.have.been.calledWith( this.project_id, this.doc_id ) }) it('should persist the meta', function () { - expect(this.MongoManager.patchDoc).to.have.been.calledWith( + expect(this.MongoManager.promises.patchDoc).to.have.been.calledWith( this.project_id, this.doc_id, this.meta @@ -368,7 +339,7 @@ describe('DocManager', function () { this.settings.docstore.archiveOnSoftDelete = false this.meta.deleted = true - await this.DocManager.patchDoc( + await this.DocManager.promises.patchDoc( this.project_id, this.doc_id, this.meta @@ -376,7 +347,8 @@ describe('DocManager', function () { }) it('should not flush the doc out of mongo', function () { - expect(this.DocArchiveManager.archiveDoc).to.not.have.been.called + expect(this.DocArchiveManager.promises.archiveDoc).to.not.have.been + .called }) }) @@ -384,7 +356,7 @@ describe('DocManager', function () { beforeEach(async function () { this.settings.docstore.archiveOnSoftDelete = false this.meta.deleted = false - await this.DocManager.patchDoc( + await this.DocManager.promises.patchDoc( this.project_id, this.doc_id, this.meta @@ -392,7 +364,8 @@ describe('DocManager', function () { }) it('should not flush the doc out of mongo', function () { - expect(this.DocArchiveManager.archiveDoc).to.not.have.been.called + expect(this.DocArchiveManager.promises.archiveDoc).to.not.have.been + .called }) }) @@ -404,7 +377,7 @@ describe('DocManager', function () { describe('when the background flush succeeds', function () { beforeEach(async function () { - await this.DocManager.patchDoc( + await this.DocManager.promises.patchDoc( this.project_id, this.doc_id, this.meta @@ -416,18 +389,17 @@ describe('DocManager', function () { }) it('should flush the doc out of mongo', function () { - expect(this.DocArchiveManager.archiveDoc).to.have.been.calledWith( - this.project_id, - this.doc_id - ) + expect( + this.DocArchiveManager.promises.archiveDoc + ).to.have.been.calledWith(this.project_id, this.doc_id) }) }) describe('when the background flush fails', function () { beforeEach(async function () { this.err = new Error('foo') - this.DocArchiveManager.archiveDoc.rejects(this.err) - await this.DocManager.patchDoc( + this.DocArchiveManager.promises.archiveDoc.rejects(this.err) + await this.DocManager.promises.patchDoc( this.project_id, this.doc_id, this.meta @@ -450,9 +422,9 @@ describe('DocManager', function () { describe('when the doc does not exist', function () { it('should return a NotFoundError', async function () { - this.MongoManager.findDoc.resolves(null) + this.MongoManager.promises.findDoc.resolves(null) await expect( - this.DocManager.patchDoc(this.project_id, this.doc_id, {}) + this.DocManager.promises.patchDoc(this.project_id, this.doc_id, {}) ).to.be.rejectedWith( `No such project/doc to delete: ${this.project_id}/${this.doc_id}` ) @@ -498,13 +470,13 @@ describe('DocManager', function () { ranges: this.originalRanges, } - this.DocManager._getDoc = sinon.stub() + this.DocManager.promises._getDoc = sinon.stub() }) describe('when only the doc lines have changed', function () { beforeEach(async function () { - this.DocManager._getDoc = sinon.stub().resolves(this.doc) - this.result = await this.DocManager.updateDoc( + this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc) + this.result = await this.DocManager.promises.updateDoc( this.project_id, this.doc_id, this.newDocLines, @@ -514,7 +486,7 @@ describe('DocManager', function () { }) it('should get the existing doc', function () { - this.DocManager._getDoc + this.DocManager.promises._getDoc .calledWith(this.project_id, this.doc_id, { version: true, rev: true, @@ -526,7 +498,7 @@ describe('DocManager', function () { }) it('should upsert the document to the doc collection', function () { - this.MongoManager.upsertIntoDocCollection + this.MongoManager.promises.upsertIntoDocCollection .calledWith(this.project_id, this.doc_id, this.rev, { lines: this.newDocLines, }) @@ -540,9 +512,9 @@ describe('DocManager', function () { describe('when the doc ranges have changed', function () { beforeEach(async function () { - this.DocManager._getDoc = sinon.stub().resolves(this.doc) + this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc) this.RangeManager.shouldUpdateRanges.returns(true) - this.result = await this.DocManager.updateDoc( + this.result = await this.DocManager.promises.updateDoc( this.project_id, this.doc_id, this.oldDocLines, @@ -552,7 +524,7 @@ describe('DocManager', function () { }) it('should upsert the ranges', function () { - this.MongoManager.upsertIntoDocCollection + this.MongoManager.promises.upsertIntoDocCollection .calledWith(this.project_id, this.doc_id, this.rev, { ranges: this.newRanges, }) @@ -566,8 +538,8 @@ describe('DocManager', function () { describe('when only the version has changed', function () { beforeEach(async function () { - this.DocManager._getDoc = sinon.stub().resolves(this.doc) - this.result = await this.DocManager.updateDoc( + this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc) + this.result = await this.DocManager.promises.updateDoc( this.project_id, this.doc_id, this.oldDocLines, @@ -577,7 +549,7 @@ describe('DocManager', function () { }) it('should update the version', function () { - this.MongoManager.upsertIntoDocCollection.should.have.been.calledWith( + this.MongoManager.promises.upsertIntoDocCollection.should.have.been.calledWith( this.project_id, this.doc_id, this.rev, @@ -592,8 +564,8 @@ describe('DocManager', function () { describe('when the doc has not changed at all', function () { beforeEach(async function () { - this.DocManager._getDoc = sinon.stub().resolves(this.doc) - this.result = await this.DocManager.updateDoc( + this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc) + this.result = await this.DocManager.promises.updateDoc( this.project_id, this.doc_id, this.oldDocLines, @@ -603,7 +575,9 @@ describe('DocManager', function () { }) it('should not update the ranges or lines or version', function () { - this.MongoManager.upsertIntoDocCollection.called.should.equal(false) + this.MongoManager.promises.upsertIntoDocCollection.called.should.equal( + false + ) }) it('should return the old rev and modified == false', function () { @@ -614,7 +588,7 @@ describe('DocManager', function () { describe('when the version is null', function () { it('should return an error', async function () { await expect( - this.DocManager.updateDoc( + this.DocManager.promises.updateDoc( this.project_id, this.doc_id, this.newDocLines, @@ -628,7 +602,7 @@ describe('DocManager', function () { describe('when the lines are null', function () { it('should return an error', async function () { await expect( - this.DocManager.updateDoc( + this.DocManager.promises.updateDoc( this.project_id, this.doc_id, null, @@ -642,7 +616,7 @@ describe('DocManager', function () { describe('when the ranges are null', function () { it('should return an error', async function () { await expect( - this.DocManager.updateDoc( + this.DocManager.promises.updateDoc( this.project_id, this.doc_id, this.newDocLines, @@ -656,9 +630,9 @@ describe('DocManager', function () { describe('when there is a generic error getting the doc', function () { beforeEach(async function () { this.error = new Error('doc could not be found') - this.DocManager._getDoc = sinon.stub().rejects(this.error) + this.DocManager.promises._getDoc = sinon.stub().rejects(this.error) await expect( - this.DocManager.updateDoc( + this.DocManager.promises.updateDoc( this.project_id, this.doc_id, this.newDocLines, @@ -669,15 +643,16 @@ describe('DocManager', function () { }) it('should not upsert the document to the doc collection', function () { - this.MongoManager.upsertIntoDocCollection.should.not.have.been.called + this.MongoManager.promises.upsertIntoDocCollection.should.not.have.been + .called }) }) describe('when the version was decremented', function () { it('should return an error', async function () { - this.DocManager._getDoc = sinon.stub().resolves(this.doc) + this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc) await expect( - this.DocManager.updateDoc( + this.DocManager.promises.updateDoc( this.project_id, this.doc_id, this.newDocLines, @@ -690,8 +665,8 @@ describe('DocManager', function () { describe('when the doc lines have not changed', function () { beforeEach(async function () { - this.DocManager._getDoc = sinon.stub().resolves(this.doc) - this.result = await this.DocManager.updateDoc( + this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc) + this.result = await this.DocManager.promises.updateDoc( this.project_id, this.doc_id, this.oldDocLines.slice(), @@ -701,7 +676,9 @@ describe('DocManager', function () { }) it('should not update the doc', function () { - this.MongoManager.upsertIntoDocCollection.called.should.equal(false) + this.MongoManager.promises.upsertIntoDocCollection.called.should.equal( + false + ) }) it('should return the existing rev', function () { @@ -711,8 +688,8 @@ describe('DocManager', function () { describe('when the doc does not exist', function () { beforeEach(async function () { - this.DocManager._getDoc = sinon.stub().resolves(null) - this.result = await this.DocManager.updateDoc( + this.DocManager.promises._getDoc = sinon.stub().resolves(null) + this.result = await this.DocManager.promises.updateDoc( this.project_id, this.doc_id, this.newDocLines, @@ -722,7 +699,7 @@ describe('DocManager', function () { }) it('should upsert the document to the doc collection', function () { - this.MongoManager.upsertIntoDocCollection.should.have.been.calledWith( + this.MongoManager.promises.upsertIntoDocCollection.should.have.been.calledWith( this.project_id, this.doc_id, undefined, @@ -741,12 +718,12 @@ describe('DocManager', function () { describe('when another update is racing', function () { beforeEach(async function () { - this.DocManager._getDoc = sinon.stub().resolves(this.doc) - this.MongoManager.upsertIntoDocCollection + this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc) + this.MongoManager.promises.upsertIntoDocCollection .onFirstCall() .rejects(new Errors.DocRevValueError()) this.RangeManager.shouldUpdateRanges.returns(true) - this.result = await this.DocManager.updateDoc( + this.result = await this.DocManager.promises.updateDoc( this.project_id, this.doc_id, this.newDocLines, @@ -756,7 +733,7 @@ describe('DocManager', function () { }) it('should upsert the doc twice', function () { - this.MongoManager.upsertIntoDocCollection.should.have.been.calledWith( + this.MongoManager.promises.upsertIntoDocCollection.should.have.been.calledWith( this.project_id, this.doc_id, this.rev, @@ -766,7 +743,8 @@ describe('DocManager', function () { version: this.version + 1, } ) - this.MongoManager.upsertIntoDocCollection.should.have.been.calledTwice + this.MongoManager.promises.upsertIntoDocCollection.should.have.been + .calledTwice }) it('should return the new rev', function () { diff --git a/services/docstore/test/unit/js/HttpControllerTests.js b/services/docstore/test/unit/js/HttpControllerTests.js index ab491ec150..bf78696890 100644 --- a/services/docstore/test/unit/js/HttpControllerTests.js +++ b/services/docstore/test/unit/js/HttpControllerTests.js @@ -14,7 +14,7 @@ describe('HttpController', function () { max_doc_length: 2 * 1024 * 1024, } this.DocArchiveManager = { - unArchiveAllDocs: sinon.stub().returns(), + unArchiveAllDocs: sinon.stub().yields(), } this.DocManager = {} this.HttpController = SandboxedModule.require(modulePath, { @@ -54,13 +54,15 @@ describe('HttpController', function () { describe('getDoc', function () { describe('without deleted docs', function () { - beforeEach(async function () { + beforeEach(function () { this.req.params = { project_id: this.projectId, doc_id: this.docId, } - this.DocManager.getFullDoc = sinon.stub().resolves(this.doc) - await this.HttpController.getDoc(this.req, this.res, this.next) + this.DocManager.getFullDoc = sinon + .stub() + .callsArgWith(2, null, this.doc) + this.HttpController.getDoc(this.req, this.res, this.next) }) it('should get the document with the version (including deleted)', function () { @@ -87,24 +89,26 @@ describe('HttpController', function () { project_id: this.projectId, doc_id: this.docId, } - this.DocManager.getFullDoc = sinon.stub().resolves(this.deletedDoc) + this.DocManager.getFullDoc = sinon + .stub() + .callsArgWith(2, null, this.deletedDoc) }) - it('should get the doc from the doc manager', async function () { - await this.HttpController.getDoc(this.req, this.res, this.next) + it('should get the doc from the doc manager', function () { + this.HttpController.getDoc(this.req, this.res, this.next) this.DocManager.getFullDoc .calledWith(this.projectId, this.docId) .should.equal(true) }) - it('should return 404 if the query string delete is not set ', async function () { - await this.HttpController.getDoc(this.req, this.res, this.next) + it('should return 404 if the query string delete is not set ', function () { + this.HttpController.getDoc(this.req, this.res, this.next) this.res.sendStatus.calledWith(404).should.equal(true) }) - it('should return the doc as JSON if include_deleted is set to true', async function () { + it('should return the doc as JSON if include_deleted is set to true', function () { this.req.query.include_deleted = 'true' - await this.HttpController.getDoc(this.req, this.res, this.next) + this.HttpController.getDoc(this.req, this.res, this.next) this.res.json .calledWith({ _id: this.docId, @@ -119,15 +123,13 @@ describe('HttpController', function () { }) describe('getRawDoc', function () { - beforeEach(async function () { + beforeEach(function () { this.req.params = { project_id: this.projectId, doc_id: this.docId, } - this.DocManager.getDocLines = sinon - .stub() - .resolves(this.doc.lines.join('\n')) - await this.HttpController.getRawDoc(this.req, this.res, this.next) + this.DocManager.getDocLines = sinon.stub().callsArgWith(2, null, this.doc) + this.HttpController.getRawDoc(this.req, this.res, this.next) }) it('should get the document without the version', function () { @@ -152,7 +154,7 @@ describe('HttpController', function () { describe('getAllDocs', function () { describe('normally', function () { - beforeEach(async function () { + beforeEach(function () { this.req.params = { project_id: this.projectId } this.docs = [ { @@ -166,8 +168,10 @@ describe('HttpController', function () { rev: 4, }, ] - this.DocManager.getAllNonDeletedDocs = sinon.stub().resolves(this.docs) - await this.HttpController.getAllDocs(this.req, this.res, this.next) + this.DocManager.getAllNonDeletedDocs = sinon + .stub() + .callsArgWith(2, null, this.docs) + this.HttpController.getAllDocs(this.req, this.res, this.next) }) it('should get all the (non-deleted) docs', function () { @@ -195,7 +199,7 @@ describe('HttpController', function () { }) describe('with null lines', function () { - beforeEach(async function () { + beforeEach(function () { this.req.params = { project_id: this.projectId } this.docs = [ { @@ -209,8 +213,10 @@ describe('HttpController', function () { rev: 4, }, ] - this.DocManager.getAllNonDeletedDocs = sinon.stub().resolves(this.docs) - await this.HttpController.getAllDocs(this.req, this.res, this.next) + this.DocManager.getAllNonDeletedDocs = sinon + .stub() + .callsArgWith(2, null, this.docs) + this.HttpController.getAllDocs(this.req, this.res, this.next) }) it('should return the doc with fallback lines', function () { @@ -232,7 +238,7 @@ describe('HttpController', function () { }) describe('with a null doc', function () { - beforeEach(async function () { + beforeEach(function () { this.req.params = { project_id: this.projectId } this.docs = [ { @@ -247,8 +253,10 @@ describe('HttpController', function () { rev: 4, }, ] - this.DocManager.getAllNonDeletedDocs = sinon.stub().resolves(this.docs) - await this.HttpController.getAllDocs(this.req, this.res, this.next) + this.DocManager.getAllNonDeletedDocs = sinon + .stub() + .callsArgWith(2, null, this.docs) + this.HttpController.getAllDocs(this.req, this.res, this.next) }) it('should return the non null docs as JSON', function () { @@ -284,7 +292,7 @@ describe('HttpController', function () { describe('getAllRanges', function () { describe('normally', function () { - beforeEach(async function () { + beforeEach(function () { this.req.params = { project_id: this.projectId } this.docs = [ { @@ -296,8 +304,10 @@ describe('HttpController', function () { ranges: { mock_ranges: 'two' }, }, ] - this.DocManager.getAllNonDeletedDocs = sinon.stub().resolves(this.docs) - await this.HttpController.getAllRanges(this.req, this.res, this.next) + this.DocManager.getAllNonDeletedDocs = sinon + .stub() + .callsArgWith(2, null, this.docs) + this.HttpController.getAllRanges(this.req, this.res, this.next) }) it('should get all the (non-deleted) doc ranges', function () { @@ -332,17 +342,16 @@ describe('HttpController', function () { }) describe('when the doc lines exist and were updated', function () { - beforeEach(async function () { + beforeEach(function () { this.req.body = { lines: (this.lines = ['hello', 'world']), version: (this.version = 42), ranges: (this.ranges = { changes: 'mock' }), } - this.rev = 5 this.DocManager.updateDoc = sinon .stub() - .resolves({ modified: true, rev: this.rev }) - await this.HttpController.updateDoc(this.req, this.res, this.next) + .yields(null, true, (this.rev = 5)) + this.HttpController.updateDoc(this.req, this.res, this.next) }) it('should update the document', function () { @@ -365,17 +374,16 @@ describe('HttpController', function () { }) describe('when the doc lines exist and were not updated', function () { - beforeEach(async function () { + beforeEach(function () { this.req.body = { lines: (this.lines = ['hello', 'world']), version: (this.version = 42), ranges: {}, } - this.rev = 5 this.DocManager.updateDoc = sinon .stub() - .resolves({ modified: false, rev: this.rev }) - await this.HttpController.updateDoc(this.req, this.res, this.next) + .yields(null, false, (this.rev = 5)) + this.HttpController.updateDoc(this.req, this.res, this.next) }) it('should return a modified status', function () { @@ -386,12 +394,10 @@ describe('HttpController', function () { }) describe('when the doc lines are not provided', function () { - beforeEach(async function () { + beforeEach(function () { this.req.body = { version: 42, ranges: {} } - this.DocManager.updateDoc = sinon - .stub() - .resolves({ modified: false, rev: 0 }) - await this.HttpController.updateDoc(this.req, this.res, this.next) + this.DocManager.updateDoc = sinon.stub().yields(null, false) + this.HttpController.updateDoc(this.req, this.res, this.next) }) it('should not update the document', function () { @@ -404,12 +410,10 @@ describe('HttpController', function () { }) describe('when the doc version are not provided', function () { - beforeEach(async function () { + beforeEach(function () { this.req.body = { version: 42, lines: ['hello world'] } - this.DocManager.updateDoc = sinon - .stub() - .resolves({ modified: false, rev: 0 }) - await this.HttpController.updateDoc(this.req, this.res, this.next) + this.DocManager.updateDoc = sinon.stub().yields(null, false) + this.HttpController.updateDoc(this.req, this.res, this.next) }) it('should not update the document', function () { @@ -422,12 +426,10 @@ describe('HttpController', function () { }) describe('when the doc ranges is not provided', function () { - beforeEach(async function () { + beforeEach(function () { this.req.body = { lines: ['foo'], version: 42 } - this.DocManager.updateDoc = sinon - .stub() - .resolves({ modified: false, rev: 0 }) - await this.HttpController.updateDoc(this.req, this.res, this.next) + this.DocManager.updateDoc = sinon.stub().yields(null, false) + this.HttpController.updateDoc(this.req, this.res, this.next) }) it('should not update the document', function () { @@ -440,20 +442,13 @@ describe('HttpController', function () { }) describe('when the doc body is too large', function () { - beforeEach(async function () { + beforeEach(function () { this.req.body = { lines: (this.lines = Array(2049).fill('a'.repeat(1024))), version: (this.version = 42), ranges: (this.ranges = { changes: 'mock' }), } - this.DocManager.updateDoc = sinon - .stub() - .resolves({ modified: false, rev: 0 }) - await this.HttpController.updateDoc(this.req, this.res, this.next) - }) - - it('should not update the document', function () { - this.DocManager.updateDoc.called.should.equal(false) + this.HttpController.updateDoc(this.req, this.res, this.next) }) it('should return a 413 (too large) response', function () { @@ -467,14 +462,14 @@ describe('HttpController', function () { }) describe('patchDoc', function () { - beforeEach(async function () { + beforeEach(function () { this.req.params = { project_id: this.projectId, doc_id: this.docId, } this.req.body = { name: 'foo.tex' } - this.DocManager.patchDoc = sinon.stub().resolves() - await this.HttpController.patchDoc(this.req, this.res, this.next) + this.DocManager.patchDoc = sinon.stub().yields(null) + this.HttpController.patchDoc(this.req, this.res, this.next) }) it('should delete the document', function () { @@ -489,11 +484,11 @@ describe('HttpController', function () { }) describe('with an invalid payload', function () { - beforeEach(async function () { + beforeEach(function () { this.req.body = { cannot: 'happen' } - this.DocManager.patchDoc = sinon.stub().resolves() - await this.HttpController.patchDoc(this.req, this.res, this.next) + this.DocManager.patchDoc = sinon.stub().yields(null) + this.HttpController.patchDoc(this.req, this.res, this.next) }) it('should log a message', function () { @@ -514,10 +509,10 @@ describe('HttpController', function () { }) describe('archiveAllDocs', function () { - beforeEach(async function () { + beforeEach(function () { this.req.params = { project_id: this.projectId } - this.DocArchiveManager.archiveAllDocs = sinon.stub().resolves() - await this.HttpController.archiveAllDocs(this.req, this.res, this.next) + this.DocArchiveManager.archiveAllDocs = sinon.stub().callsArg(1) + this.HttpController.archiveAllDocs(this.req, this.res, this.next) }) it('should archive the project', function () { @@ -537,12 +532,9 @@ describe('HttpController', function () { }) describe('on success', function () { - beforeEach(async function () { - await this.HttpController.unArchiveAllDocs( - this.req, - this.res, - this.next - ) + beforeEach(function (done) { + this.res.sendStatus.callsFake(() => done()) + this.HttpController.unArchiveAllDocs(this.req, this.res, this.next) }) it('returns a 200', function () { @@ -551,15 +543,12 @@ describe('HttpController', function () { }) describe("when the archived rev doesn't match", function () { - beforeEach(async function () { - this.DocArchiveManager.unArchiveAllDocs.rejects( + beforeEach(function (done) { + this.res.sendStatus.callsFake(() => done()) + this.DocArchiveManager.unArchiveAllDocs.yields( new Errors.DocRevValueError('bad rev') ) - await this.HttpController.unArchiveAllDocs( - this.req, - this.res, - this.next - ) + this.HttpController.unArchiveAllDocs(this.req, this.res, this.next) }) it('returns a 409', function () { @@ -569,10 +558,10 @@ describe('HttpController', function () { }) describe('destroyProject', function () { - beforeEach(async function () { + beforeEach(function () { this.req.params = { project_id: this.projectId } - this.DocArchiveManager.destroyProject = sinon.stub().resolves() - await this.HttpController.destroyProject(this.req, this.res, this.next) + this.DocArchiveManager.destroyProject = sinon.stub().callsArg(1) + this.HttpController.destroyProject(this.req, this.res, this.next) }) it('should destroy the docs', function () { diff --git a/services/docstore/test/unit/js/MongoManagerTests.js b/services/docstore/test/unit/js/MongoManagerTests.js index b96b661df4..4f8467db76 100644 --- a/services/docstore/test/unit/js/MongoManagerTests.js +++ b/services/docstore/test/unit/js/MongoManagerTests.js @@ -41,7 +41,7 @@ describe('MongoManager', function () { this.doc = { name: 'mock-doc' } this.db.docs.findOne = sinon.stub().resolves(this.doc) this.filter = { lines: true } - this.result = await this.MongoManager.findDoc( + this.result = await this.MongoManager.promises.findDoc( this.projectId, this.docId, this.filter @@ -70,7 +70,11 @@ describe('MongoManager', function () { describe('patchDoc', function () { beforeEach(async function () { this.meta = { name: 'foo.tex' } - await this.MongoManager.patchDoc(this.projectId, this.docId, this.meta) + await this.MongoManager.promises.patchDoc( + this.projectId, + this.docId, + this.meta + ) }) it('should pass the parameter along', function () { @@ -100,7 +104,7 @@ describe('MongoManager', function () { describe('with included_deleted = false', function () { beforeEach(async function () { - this.result = await this.MongoManager.getProjectsDocs( + this.result = await this.MongoManager.promises.getProjectsDocs( this.projectId, { include_deleted: false }, this.filter @@ -128,7 +132,7 @@ describe('MongoManager', function () { describe('with included_deleted = true', function () { beforeEach(async function () { - this.result = await this.MongoManager.getProjectsDocs( + this.result = await this.MongoManager.promises.getProjectsDocs( this.projectId, { include_deleted: true }, this.filter @@ -163,7 +167,7 @@ describe('MongoManager', function () { this.db.docs.find = sinon.stub().returns({ toArray: sinon.stub().resolves([this.doc1, this.doc2, this.doc3]), }) - this.result = await this.MongoManager.getProjectsDeletedDocs( + this.result = await this.MongoManager.promises.getProjectsDeletedDocs( this.projectId, this.filter ) @@ -199,7 +203,7 @@ describe('MongoManager', function () { }) it('should upsert the document', async function () { - await this.MongoManager.upsertIntoDocCollection( + await this.MongoManager.promises.upsertIntoDocCollection( this.projectId, this.docId, this.oldRev, @@ -219,7 +223,7 @@ describe('MongoManager', function () { it('should handle update error', async function () { this.db.docs.updateOne.rejects(this.stubbedErr) await expect( - this.MongoManager.upsertIntoDocCollection( + this.MongoManager.promises.upsertIntoDocCollection( this.projectId, this.docId, this.rev, @@ -231,7 +235,7 @@ describe('MongoManager', function () { }) it('should insert without a previous rev', async function () { - await this.MongoManager.upsertIntoDocCollection( + await this.MongoManager.promises.upsertIntoDocCollection( this.projectId, this.docId, null, @@ -250,7 +254,7 @@ describe('MongoManager', function () { it('should handle generic insert error', async function () { this.db.docs.insertOne.rejects(this.stubbedErr) await expect( - this.MongoManager.upsertIntoDocCollection( + this.MongoManager.promises.upsertIntoDocCollection( this.projectId, this.docId, null, @@ -262,7 +266,7 @@ describe('MongoManager', function () { it('should handle duplicate insert error', async function () { this.db.docs.insertOne.rejects({ code: 11000 }) await expect( - this.MongoManager.upsertIntoDocCollection( + this.MongoManager.promises.upsertIntoDocCollection( this.projectId, this.docId, null, @@ -276,7 +280,7 @@ describe('MongoManager', function () { beforeEach(async function () { this.projectId = new ObjectId() this.db.docs.deleteMany = sinon.stub().resolves() - await this.MongoManager.destroyProject(this.projectId) + await this.MongoManager.promises.destroyProject(this.projectId) }) it('should destroy all docs', function () { @@ -293,13 +297,13 @@ describe('MongoManager', function () { it('should not error when the rev has not changed', async function () { this.db.docs.findOne = sinon.stub().resolves({ rev: 1 }) - await this.MongoManager.checkRevUnchanged(this.doc) + await this.MongoManager.promises.checkRevUnchanged(this.doc) }) it('should return an error when the rev has changed', async function () { this.db.docs.findOne = sinon.stub().resolves({ rev: 2 }) await expect( - this.MongoManager.checkRevUnchanged(this.doc) + this.MongoManager.promises.checkRevUnchanged(this.doc) ).to.be.rejectedWith(Errors.DocModifiedError) }) @@ -307,14 +311,14 @@ describe('MongoManager', function () { this.db.docs.findOne = sinon.stub().resolves({ rev: 2 }) this.doc = { _id: new ObjectId(), name: 'mock-doc', rev: NaN } await expect( - this.MongoManager.checkRevUnchanged(this.doc) + this.MongoManager.promises.checkRevUnchanged(this.doc) ).to.be.rejectedWith(Errors.DocRevValueError) }) it('should return a value error if checked doc rev is NaN', async function () { this.db.docs.findOne = sinon.stub().resolves({ rev: NaN }) await expect( - this.MongoManager.checkRevUnchanged(this.doc) + this.MongoManager.promises.checkRevUnchanged(this.doc) ).to.be.rejectedWith(Errors.DocRevValueError) }) }) @@ -330,7 +334,7 @@ describe('MongoManager', function () { describe('complete doc', function () { beforeEach(async function () { - await this.MongoManager.restoreArchivedDoc( + await this.MongoManager.promises.restoreArchivedDoc( this.projectId, this.docId, this.archivedDoc @@ -360,7 +364,7 @@ describe('MongoManager', function () { describe('without ranges', function () { beforeEach(async function () { delete this.archivedDoc.ranges - await this.MongoManager.restoreArchivedDoc( + await this.MongoManager.promises.restoreArchivedDoc( this.projectId, this.docId, this.archivedDoc @@ -391,7 +395,7 @@ describe('MongoManager', function () { it('throws a DocRevValueError', async function () { this.db.docs.updateOne.resolves({ matchedCount: 0 }) await expect( - this.MongoManager.restoreArchivedDoc( + this.MongoManager.promises.restoreArchivedDoc( this.projectId, this.docId, this.archivedDoc diff --git a/services/docstore/test/unit/js/RangeManagerTests.js b/services/docstore/test/unit/js/RangeManagerTests.js index ba99280a7a..7a2de7352e 100644 --- a/services/docstore/test/unit/js/RangeManagerTests.js +++ b/services/docstore/test/unit/js/RangeManagerTests.js @@ -30,7 +30,7 @@ describe('RangeManager', function () { }) describe('jsonRangesToMongo', function () { - it('should convert ObjectIds and dates to proper objects and fix comment id', function () { + it('should convert ObjectIds and dates to proper objects', function () { const changeId = new ObjectId().toString() const commentId = new ObjectId().toString() const userId = new ObjectId().toString() @@ -66,7 +66,7 @@ describe('RangeManager', function () { ], comments: [ { - id: new ObjectId(threadId), + id: new ObjectId(commentId), op: { c: 'foo', p: 3, t: new ObjectId(threadId) }, }, ], @@ -110,6 +110,7 @@ describe('RangeManager', function () { return it('should be consistent when transformed through json -> mongo -> json', function () { const changeId = new ObjectId().toString() + const commentId = new ObjectId().toString() const userId = new ObjectId().toString() const threadId = new ObjectId().toString() const ts = new Date().toJSON() @@ -126,7 +127,7 @@ describe('RangeManager', function () { ], comments: [ { - id: threadId, + id: commentId, op: { c: 'foo', p: 3, t: threadId }, }, ], @@ -141,7 +142,6 @@ describe('RangeManager', function () { return describe('shouldUpdateRanges', function () { beforeEach(function () { - const threadId = new ObjectId() this.ranges = { changes: [ { @@ -155,8 +155,8 @@ describe('RangeManager', function () { ], comments: [ { - id: threadId, - op: { c: 'foo', p: 3, t: threadId }, + id: new ObjectId(), + op: { c: 'foo', p: 3, t: new ObjectId() }, }, ], } diff --git a/services/document-updater/app/js/DiffCodec.js b/services/document-updater/app/js/DiffCodec.js index 17da409386..8c574cff70 100644 --- a/services/document-updater/app/js/DiffCodec.js +++ b/services/document-updater/app/js/DiffCodec.js @@ -1,4 +1,3 @@ -const OError = require('@overleaf/o-error') const DMP = require('diff-match-patch') const { TextOperation } = require('overleaf-editor-core') const dmp = new DMP() @@ -39,62 +38,23 @@ module.exports = { return ops }, - /** - * @param {import("overleaf-editor-core").StringFileData} file - * @param {string} after - * @return {TextOperation} - */ - diffAsHistoryOTEditOperation(file, after) { - const beforeWithoutTrackedDeletes = file.getContent({ - filterTrackedDeletes: true, - }) - const diffs = dmp.diff_main(beforeWithoutTrackedDeletes, after) + diffAsHistoryV1EditOperation(before, after) { + const diffs = dmp.diff_main(before, after) dmp.diff_cleanupSemantic(diffs) - const trackedChanges = file.trackedChanges.asSorted() - let nextTc = trackedChanges.shift() - const op = new TextOperation() for (const diff of diffs) { - let [type, content] = diff + const [type, content] = diff if (type === this.ADDED) { op.insert(content) - } else if (type === this.REMOVED || type === this.UNCHANGED) { - while (op.baseLength + content.length > nextTc?.range.start) { - if (nextTc.tracking.type === 'delete') { - const untilRange = nextTc.range.start - op.baseLength - if (type === this.REMOVED) { - op.remove(untilRange) - } else if (type === this.UNCHANGED) { - op.retain(untilRange) - } - op.retain(nextTc.range.end - nextTc.range.start) - content = content.slice(untilRange) - } - nextTc = trackedChanges.shift() - } - if (type === this.REMOVED) { - op.remove(content.length) - } else if (type === this.UNCHANGED) { - op.retain(content.length) - } + } else if (type === this.REMOVED) { + op.remove(content.length) + } else if (type === this.UNCHANGED) { + op.retain(content.length) } else { throw new Error('Unknown type') } } - while (nextTc) { - if ( - nextTc.tracking.type !== 'delete' || - nextTc.range.start !== op.baseLength - ) { - throw new OError( - 'StringFileData.trackedChanges out of sync: unexpected range after end of diff', - { nextTc, baseLength: op.baseLength } - ) - } - op.retain(nextTc.range.end - nextTc.range.start) - nextTc = trackedChanges.shift() - } return op }, } diff --git a/services/document-updater/app/js/DocumentManager.js b/services/document-updater/app/js/DocumentManager.js index 3fb3d10a6e..4803056423 100644 --- a/services/document-updater/app/js/DocumentManager.js +++ b/services/document-updater/app/js/DocumentManager.js @@ -194,8 +194,9 @@ const DocumentManager = { let op if (type === 'history-ot') { const file = StringFileData.fromRaw(oldLines) - const operation = DiffCodec.diffAsHistoryOTEditOperation( - file, + const operation = DiffCodec.diffAsHistoryV1EditOperation( + // TODO(24596): tc support for history-ot + file.getContent({ filterTrackedDeletes: true }), newLines.join('\n') ) if (operation.isNoop()) { @@ -535,6 +536,11 @@ const DocumentManager = { if (opts.historyRangesMigration) { historyRangesSupport = opts.historyRangesMigration === 'forwards' } + if (!Array.isArray(lines)) { + const file = StringFileData.fromRaw(lines) + // TODO(24596): tc support for history-ot + lines = file.getLines() + } await ProjectHistoryRedisManager.promises.queueResyncDocContent( projectId, diff --git a/services/document-updater/app/js/Limits.js b/services/document-updater/app/js/Limits.js index cbd9293042..268ccd3f9b 100644 --- a/services/document-updater/app/js/Limits.js +++ b/services/document-updater/app/js/Limits.js @@ -28,19 +28,4 @@ module.exports = { // since we didn't hit the limit in the loop, the document is within the allowed length return false }, - - /** - * @param {StringFileRawData} raw - * @param {number} maxDocLength - */ - stringFileDataContentIsTooLarge(raw, maxDocLength) { - let n = raw.content.length - if (n <= maxDocLength) return false // definitely under the limit, no need to calculate the total size - for (const tc of raw.trackedChanges ?? []) { - if (tc.tracking.type !== 'delete') continue - n -= tc.range.length - if (n <= maxDocLength) return false // under the limit now, no need to calculate the exact size - } - return true - }, } diff --git a/services/document-updater/app/js/ProjectHistoryRedisManager.js b/services/document-updater/app/js/ProjectHistoryRedisManager.js index 78e9c2ea4c..9a9985d99a 100644 --- a/services/document-updater/app/js/ProjectHistoryRedisManager.js +++ b/services/document-updater/app/js/ProjectHistoryRedisManager.js @@ -8,14 +8,13 @@ const rclient = require('@overleaf/redis-wrapper').createClient( ) const logger = require('@overleaf/logger') const metrics = require('./Metrics') -const { docIsTooLarge, stringFileDataContentIsTooLarge } = require('./Limits') +const { docIsTooLarge } = require('./Limits') const { addTrackedDeletesToContent, extractOriginOrSource } = require('./Utils') const HistoryConversions = require('./HistoryConversions') const OError = require('@overleaf/o-error') /** * @import { Ranges } from './types' - * @import { StringFileRawData } from 'overleaf-editor-core/lib/types' */ const ProjectHistoryRedisManager = { @@ -181,7 +180,7 @@ const ProjectHistoryRedisManager = { * @param {string} projectId * @param {string} projectHistoryId * @param {string} docId - * @param {string[] | StringFileRawData} lines + * @param {string[]} lines * @param {Ranges} ranges * @param {string[]} resolvedCommentIds * @param {number} version @@ -205,8 +204,13 @@ const ProjectHistoryRedisManager = { 'queue doc content resync' ) + let content = lines.join('\n') + if (historyRangesSupport) { + content = addTrackedDeletesToContent(content, ranges.changes ?? []) + } + const projectUpdate = { - resyncDocContent: { version }, + resyncDocContent: { content, version }, projectHistoryId, path: pathname, doc: docId, @@ -215,38 +219,17 @@ const ProjectHistoryRedisManager = { }, } - let content = '' - if (Array.isArray(lines)) { - content = lines.join('\n') - if (historyRangesSupport) { - content = addTrackedDeletesToContent(content, ranges.changes ?? []) - projectUpdate.resyncDocContent.ranges = - HistoryConversions.toHistoryRanges(ranges) - projectUpdate.resyncDocContent.resolvedCommentIds = resolvedCommentIds - } - } else { - content = lines.content - projectUpdate.resyncDocContent.historyOTRanges = { - comments: lines.comments, - trackedChanges: lines.trackedChanges, - } + if (historyRangesSupport) { + projectUpdate.resyncDocContent.ranges = + HistoryConversions.toHistoryRanges(ranges) + projectUpdate.resyncDocContent.resolvedCommentIds = resolvedCommentIds } - projectUpdate.resyncDocContent.content = content const jsonUpdate = JSON.stringify(projectUpdate) // Do an optimised size check on the docLines using the serialised // project update length as an upper bound const sizeBound = jsonUpdate.length - if (Array.isArray(lines)) { - if (docIsTooLarge(sizeBound, lines, Settings.max_doc_length)) { - throw new OError( - 'blocking resync doc content insert into project history queue: doc is too large', - { projectId, docId, docSize: sizeBound } - ) - } - } else if ( - stringFileDataContentIsTooLarge(lines, Settings.max_doc_length) - ) { + if (docIsTooLarge(sizeBound, lines, Settings.max_doc_length)) { throw new OError( 'blocking resync doc content insert into project history queue: doc is too large', { projectId, docId, docSize: sizeBound } diff --git a/services/document-updater/docker-compose.ci.yml b/services/document-updater/docker-compose.ci.yml index ca15f35fef..2fe97bd9b3 100644 --- a/services/document-updater/docker-compose.ci.yml +++ b/services/document-updater/docker-compose.ci.yml @@ -28,15 +28,12 @@ services: MOCHA_GREP: ${MOCHA_GREP} NODE_ENV: test NODE_OPTIONS: "--unhandled-rejections=strict" - volumes: - - ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it depends_on: mongo: condition: service_started redis: condition: service_healthy user: node - entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 -- command: npm run test:acceptance @@ -48,7 +45,7 @@ services: command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs . user: root redis: - image: redis:7.4.3 + image: redis healthcheck: test: ping="$$(redis-cli ping)" && [ "$$ping" = 'PONG' ] interval: 1s diff --git a/services/document-updater/docker-compose.yml b/services/document-updater/docker-compose.yml index 3688d21d0b..8a94d1a24c 100644 --- a/services/document-updater/docker-compose.yml +++ b/services/document-updater/docker-compose.yml @@ -26,7 +26,6 @@ services: - .:/overleaf/services/document-updater - ../../node_modules:/overleaf/node_modules - ../../libraries:/overleaf/libraries - - ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it working_dir: /overleaf/services/document-updater environment: ELASTIC_SEARCH_DSN: es:9200 @@ -46,11 +45,10 @@ services: condition: service_started redis: condition: service_healthy - entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 -- command: npm run --silent test:acceptance redis: - image: redis:7.4.3 + image: redis healthcheck: test: ping=$$(redis-cli ping) && [ "$$ping" = 'PONG' ] interval: 1s diff --git a/services/document-updater/scripts/check_redis_mongo_sync_state.js b/services/document-updater/scripts/check_redis_mongo_sync_state.js index 51db47af4d..08209400aa 100644 --- a/services/document-updater/scripts/check_redis_mongo_sync_state.js +++ b/services/document-updater/scripts/check_redis_mongo_sync_state.js @@ -15,7 +15,6 @@ const request = require('requestretry').defaults({ retryDelay: 10, }) -const ONLY_PROJECT_ID = process.env.ONLY_PROJECT_ID const AUTO_FIX_VERSION_MISMATCH = process.env.AUTO_FIX_VERSION_MISMATCH === 'true' const AUTO_FIX_PARTIALLY_DELETED_DOC_METADATA = @@ -320,12 +319,10 @@ async function processProject(projectId) { * @return {Promise<{perIterationOutOfSync: number, done: boolean}>} */ async function scanOnce(processed, outOfSync) { - const projectIds = ONLY_PROJECT_ID - ? [ONLY_PROJECT_ID] - : await ProjectFlusher.promises.flushAllProjects({ - limit: LIMIT, - dryRun: true, - }) + const projectIds = await ProjectFlusher.promises.flushAllProjects({ + limit: LIMIT, + dryRun: true, + }) let perIterationOutOfSync = 0 for (const projectId of projectIds) { diff --git a/services/document-updater/test/acceptance/js/SettingADocumentTests.js b/services/document-updater/test/acceptance/js/SettingADocumentTests.js index e1bc54dc90..fd1851a221 100644 --- a/services/document-updater/test/acceptance/js/SettingADocumentTests.js +++ b/services/document-updater/test/acceptance/js/SettingADocumentTests.js @@ -686,285 +686,4 @@ describe('Setting a document', function () { }) }) }) - - describe('with track changes (history-ot)', function () { - const lines = ['one', 'one and a half', 'two', 'three'] - const userId = DocUpdaterClient.randomId() - const ts = new Date().toISOString() - beforeEach(function (done) { - numberOfReceivedUpdates = 0 - this.newLines = ['one', 'two', 'three'] - this.project_id = DocUpdaterClient.randomId() - this.doc_id = DocUpdaterClient.randomId() - this.historyOTUpdate = { - doc: this.doc_id, - op: [ - { - textOperation: [ - 4, - { - r: 'one and a half\n'.length, - tracking: { - type: 'delete', - userId, - ts, - }, - }, - 9, - ], - }, - ], - v: this.version, - meta: { source: 'random-publicId' }, - } - MockWebApi.insertDoc(this.project_id, this.doc_id, { - lines, - version: this.version, - otMigrationStage: 1, - }) - DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, error => { - if (error) { - throw error - } - DocUpdaterClient.sendUpdate( - this.project_id, - this.doc_id, - this.historyOTUpdate, - error => { - if (error) { - throw error - } - DocUpdaterClient.waitForPendingUpdates( - this.project_id, - this.doc_id, - done - ) - } - ) - }) - }) - - afterEach(function () { - MockProjectHistoryApi.flushProject.resetHistory() - MockWebApi.setDocument.resetHistory() - }) - it('should record tracked changes', function (done) { - docUpdaterRedis.get( - Keys.docLines({ doc_id: this.doc_id }), - (error, data) => { - if (error) { - throw error - } - expect(JSON.parse(data)).to.deep.equal({ - content: lines.join('\n'), - trackedChanges: [ - { - range: { - pos: 4, - length: 15, - }, - tracking: { - ts, - type: 'delete', - userId, - }, - }, - ], - }) - done() - } - ) - }) - - it('should apply the change', function (done) { - DocUpdaterClient.getDoc( - this.project_id, - this.doc_id, - (error, res, data) => { - if (error) { - throw error - } - expect(data.lines).to.deep.equal(this.newLines) - done() - } - ) - }) - const cases = [ - { - name: 'when resetting the content', - lines, - want: { - content: 'one\none and a half\none and a half\ntwo\nthree', - trackedChanges: [ - { - range: { - pos: 'one and a half\n'.length + 4, - length: 15, - }, - tracking: { - ts, - type: 'delete', - userId, - }, - }, - ], - }, - }, - { - name: 'when adding content before a tracked delete', - lines: ['one', 'INSERT', 'two', 'three'], - want: { - content: 'one\nINSERT\none and a half\ntwo\nthree', - trackedChanges: [ - { - range: { - pos: 'INSERT\n'.length + 4, - length: 15, - }, - tracking: { - ts, - type: 'delete', - userId, - }, - }, - ], - }, - }, - { - name: 'when adding content after a tracked delete', - lines: ['one', 'two', 'INSERT', 'three'], - want: { - content: 'one\none and a half\ntwo\nINSERT\nthree', - trackedChanges: [ - { - range: { - pos: 4, - length: 15, - }, - tracking: { - ts, - type: 'delete', - userId, - }, - }, - ], - }, - }, - { - name: 'when deleting content before a tracked delete', - lines: ['two', 'three'], - want: { - content: 'one and a half\ntwo\nthree', - trackedChanges: [ - { - range: { - pos: 0, - length: 15, - }, - tracking: { - ts, - type: 'delete', - userId, - }, - }, - ], - }, - }, - { - name: 'when deleting content after a tracked delete', - lines: ['one', 'two'], - want: { - content: 'one\none and a half\ntwo', - trackedChanges: [ - { - range: { - pos: 4, - length: 15, - }, - tracking: { - ts, - type: 'delete', - userId, - }, - }, - ], - }, - }, - { - name: 'when deleting content immediately after a tracked delete', - lines: ['one', 'three'], - want: { - content: 'one\none and a half\nthree', - trackedChanges: [ - { - range: { - pos: 4, - length: 15, - }, - tracking: { - ts, - type: 'delete', - userId, - }, - }, - ], - }, - }, - { - name: 'when deleting content across a tracked delete', - lines: ['onethree'], - want: { - content: 'oneone and a half\nthree', - trackedChanges: [ - { - range: { - pos: 3, - length: 15, - }, - tracking: { - ts, - type: 'delete', - userId, - }, - }, - ], - }, - }, - ] - - for (const { name, lines, want } of cases) { - describe(name, function () { - beforeEach(function (done) { - DocUpdaterClient.setDocLines( - this.project_id, - this.doc_id, - lines, - this.source, - userId, - false, - (error, res, body) => { - if (error) { - return done(error) - } - this.statusCode = res.statusCode - this.body = body - done() - } - ) - }) - it('should update accordingly', function (done) { - docUpdaterRedis.get( - Keys.docLines({ doc_id: this.doc_id }), - (error, data) => { - if (error) { - throw error - } - expect(JSON.parse(data)).to.deep.equal(want) - done() - } - ) - }) - }) - } - }) }) diff --git a/services/document-updater/test/unit/js/Limits/LimitsTests.js b/services/document-updater/test/unit/js/Limits/LimitsTests.js index 11ca38746a..34a5c13c26 100644 --- a/services/document-updater/test/unit/js/Limits/LimitsTests.js +++ b/services/document-updater/test/unit/js/Limits/LimitsTests.js @@ -81,88 +81,4 @@ describe('Limits', function () { }) }) }) - - describe('stringFileDataContentIsTooLarge', function () { - it('should handle small docs', function () { - expect( - this.Limits.stringFileDataContentIsTooLarge({ content: '' }, 123) - ).to.equal(false) - }) - it('should handle docs at the limit', function () { - expect( - this.Limits.stringFileDataContentIsTooLarge( - { content: 'x'.repeat(123) }, - 123 - ) - ).to.equal(false) - }) - it('should handle docs above the limit', function () { - expect( - this.Limits.stringFileDataContentIsTooLarge( - { content: 'x'.repeat(123 + 1) }, - 123 - ) - ).to.equal(true) - }) - it('should handle docs above the limit and below with tracked-deletes removed', function () { - expect( - this.Limits.stringFileDataContentIsTooLarge( - { - content: 'x'.repeat(123 + 1), - trackedChanges: [ - { - range: { pos: 1, length: 1 }, - tracking: { - type: 'delete', - ts: '2025-06-16T14:31:44.910Z', - userId: 'user-id', - }, - }, - ], - }, - 123 - ) - ).to.equal(false) - }) - it('should handle docs above the limit and above with tracked-deletes removed', function () { - expect( - this.Limits.stringFileDataContentIsTooLarge( - { - content: 'x'.repeat(123 + 2), - trackedChanges: [ - { - range: { pos: 1, length: 1 }, - tracking: { - type: 'delete', - ts: '2025-06-16T14:31:44.910Z', - userId: 'user-id', - }, - }, - ], - }, - 123 - ) - ).to.equal(true) - }) - it('should handle docs above the limit and with tracked-inserts', function () { - expect( - this.Limits.stringFileDataContentIsTooLarge( - { - content: 'x'.repeat(123 + 1), - trackedChanges: [ - { - range: { pos: 1, length: 1 }, - tracking: { - type: 'insert', - ts: '2025-06-16T14:31:44.910Z', - userId: 'user-id', - }, - }, - ], - }, - 123 - ) - ).to.equal(true) - }) - }) }) diff --git a/services/document-updater/test/unit/js/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js b/services/document-updater/test/unit/js/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js index ad6c121dfb..760385b176 100644 --- a/services/document-updater/test/unit/js/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js +++ b/services/document-updater/test/unit/js/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js @@ -15,7 +15,6 @@ describe('ProjectHistoryRedisManager', function () { this.Limits = { docIsTooLarge: sinon.stub().returns(false), - stringFileDataContentIsTooLarge: sinon.stub().returns(false), } this.ProjectHistoryRedisManager = SandboxedModule.require(modulePath, { @@ -62,18 +61,22 @@ describe('ProjectHistoryRedisManager', function () { }) it('should queue an update', function () { - this.multi.rpush.should.have.been.calledWithExactly( - `ProjectHistory:Ops:${this.project_id}`, - this.ops[0], - this.ops[1] - ) + this.multi.rpush + .calledWithExactly( + `ProjectHistory:Ops:${this.project_id}`, + this.ops[0], + this.ops[1] + ) + .should.equal(true) }) it('should set the queue timestamp if not present', function () { - this.multi.setnx.should.have.been.calledWithExactly( - `ProjectHistory:FirstOpTimestamp:${this.project_id}`, - Date.now() - ) + this.multi.setnx + .calledWithExactly( + `ProjectHistory:FirstOpTimestamp:${this.project_id}`, + Date.now() + ) + .should.equal(true) }) }) @@ -115,10 +118,9 @@ describe('ProjectHistoryRedisManager', function () { file: this.file_id, } - this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly( - this.project_id, - JSON.stringify(update) - ) + this.ProjectHistoryRedisManager.promises.queueOps + .calledWithExactly(this.project_id, JSON.stringify(update)) + .should.equal(true) }) }) @@ -164,10 +166,9 @@ describe('ProjectHistoryRedisManager', function () { doc: this.doc_id, } - this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly( - this.project_id, - JSON.stringify(update) - ) + this.ProjectHistoryRedisManager.promises.queueOps + .calledWithExactly(this.project_id, JSON.stringify(update)) + .should.equal(true) }) it('should queue an update with file metadata', async function () { @@ -349,10 +350,9 @@ describe('ProjectHistoryRedisManager', function () { doc: this.doc_id, } - this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly( - this.project_id, - JSON.stringify(update) - ) + this.ProjectHistoryRedisManager.promises.queueOps + .calledWithExactly(this.project_id, JSON.stringify(update)) + .should.equal(true) }) it('should not forward ranges if history ranges support is undefined', async function () { @@ -402,10 +402,9 @@ describe('ProjectHistoryRedisManager', function () { doc: this.doc_id, } - this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly( - this.project_id, - JSON.stringify(update) - ) + this.ProjectHistoryRedisManager.promises.queueOps + .calledWithExactly(this.project_id, JSON.stringify(update)) + .should.equal(true) }) it('should pass "false" as the createdBlob field if not provided', async function () { @@ -433,10 +432,9 @@ describe('ProjectHistoryRedisManager', function () { doc: this.doc_id, } - this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly( - this.project_id, - JSON.stringify(update) - ) + this.ProjectHistoryRedisManager.promises.queueOps + .calledWithExactly(this.project_id, JSON.stringify(update)) + .should.equal(true) }) it('should pass through the value of the createdBlob field', async function () { @@ -465,10 +463,9 @@ describe('ProjectHistoryRedisManager', function () { doc: this.doc_id, } - this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly( - this.project_id, - JSON.stringify(update) - ) + this.ProjectHistoryRedisManager.promises.queueOps + .calledWithExactly(this.project_id, JSON.stringify(update)) + .should.equal(true) }) }) @@ -496,8 +493,8 @@ describe('ProjectHistoryRedisManager', function () { beforeEach(async function () { this.update = { resyncDocContent: { - version: this.version, content: 'one\ntwo', + version: this.version, }, projectHistoryId: this.projectHistoryId, path: this.pathname, @@ -519,18 +516,19 @@ describe('ProjectHistoryRedisManager', function () { }) it('should check if the doc is too large', function () { - this.Limits.docIsTooLarge.should.have.been.calledWith( - JSON.stringify(this.update).length, - this.lines, - this.settings.max_doc_length - ) + this.Limits.docIsTooLarge + .calledWith( + JSON.stringify(this.update).length, + this.lines, + this.settings.max_doc_length + ) + .should.equal(true) }) it('should queue an update', function () { - this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly( - this.project_id, - JSON.stringify(this.update) - ) + this.ProjectHistoryRedisManager.promises.queueOps + .calledWithExactly(this.project_id, JSON.stringify(this.update)) + .should.equal(true) }) }) @@ -553,8 +551,9 @@ describe('ProjectHistoryRedisManager', function () { }) it('should not queue an update if the doc is too large', function () { - this.ProjectHistoryRedisManager.promises.queueOps.should.not.have.been - .called + this.ProjectHistoryRedisManager.promises.queueOps.called.should.equal( + false + ) }) }) @@ -562,10 +561,10 @@ describe('ProjectHistoryRedisManager', function () { beforeEach(async function () { this.update = { resyncDocContent: { + content: 'onedeleted\ntwo', version: this.version, ranges: this.ranges, resolvedCommentIds: this.resolvedCommentIds, - content: 'onedeleted\ntwo', }, projectHistoryId: this.projectHistoryId, path: this.pathname, @@ -602,76 +601,9 @@ describe('ProjectHistoryRedisManager', function () { }) it('should queue an update', function () { - this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly( - this.project_id, - JSON.stringify(this.update) - ) - }) - }) - - describe('history-ot', function () { - beforeEach(async function () { - this.lines = { - content: 'onedeleted\ntwo', - comments: [{ id: 'id1', ranges: [{ pos: 0, length: 3 }] }], - trackedChanges: [ - { - range: { pos: 3, length: 7 }, - tracking: { - type: 'delete', - userId: 'user-id', - ts: '2025-06-16T14:31:44.910Z', - }, - }, - ], - } - this.update = { - resyncDocContent: { - version: this.version, - historyOTRanges: { - comments: this.lines.comments, - trackedChanges: this.lines.trackedChanges, - }, - content: this.lines.content, - }, - projectHistoryId: this.projectHistoryId, - path: this.pathname, - doc: this.doc_id, - meta: { ts: new Date() }, - } - - await this.ProjectHistoryRedisManager.promises.queueResyncDocContent( - this.project_id, - this.projectHistoryId, - this.doc_id, - this.lines, - this.ranges, - this.resolvedCommentIds, - this.version, - this.pathname, - true - ) - }) - - it('should include tracked deletes in the update', function () { - this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly( - this.project_id, - JSON.stringify(this.update) - ) - }) - - it('should check the doc length without tracked deletes', function () { - this.Limits.stringFileDataContentIsTooLarge.should.have.been.calledWith( - this.lines, - this.settings.max_doc_length - ) - }) - - it('should queue an update', function () { - this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly( - this.project_id, - JSON.stringify(this.update) - ) + this.ProjectHistoryRedisManager.promises.queueOps + .calledWithExactly(this.project_id, JSON.stringify(this.update)) + .should.equal(true) }) }) }) diff --git a/services/history-v1/api/app/rollout.js b/services/history-v1/api/app/rollout.js deleted file mode 100644 index 24ca0409f8..0000000000 --- a/services/history-v1/api/app/rollout.js +++ /dev/null @@ -1,76 +0,0 @@ -const crypto = require('node:crypto') - -class Rollout { - constructor(config) { - // The history buffer level is used to determine whether to queue changes - // in Redis or persist them directly to the chunk store. - // If defaults to 0 (no queuing) if not set. - this.historyBufferLevel = config.has('historyBufferLevel') - ? parseInt(config.get('historyBufferLevel'), 10) - : 0 - // The forcePersistBuffer flag will ensure the buffer is fully persisted before - // any persist operation. Set this to true if you want to make the persisted-version - // in Redis match the endVersion of the latest chunk. This should be set to true - // when downgrading from a history buffer level that queues changes in Redis - // without persisting them immediately. - this.forcePersistBuffer = config.has('forcePersistBuffer') - ? config.get('forcePersistBuffer') === 'true' - : false - - // Support gradual rollout of the next history buffer level - // with a percentage of projects using it. - this.nextHistoryBufferLevel = config.has('nextHistoryBufferLevel') - ? parseInt(config.get('nextHistoryBufferLevel'), 10) - : null - this.nextHistoryBufferLevelRolloutPercentage = config.has( - 'nextHistoryBufferLevelRolloutPercentage' - ) - ? parseInt(config.get('nextHistoryBufferLevelRolloutPercentage'), 10) - : 0 - } - - report(logger) { - logger.info( - { - historyBufferLevel: this.historyBufferLevel, - forcePersistBuffer: this.forcePersistBuffer, - nextHistoryBufferLevel: this.nextHistoryBufferLevel, - nextHistoryBufferLevelRolloutPercentage: - this.nextHistoryBufferLevelRolloutPercentage, - }, - this.historyBufferLevel > 0 || this.forcePersistBuffer - ? 'using history buffer' - : 'history buffer disabled' - ) - } - - /** - * Get the history buffer level for a project. - * @param {string} projectId - * @returns {Object} - An object containing the history buffer level and force persist buffer flag. - * @property {number} historyBufferLevel - The history buffer level to use for processing changes. - * @property {boolean} forcePersistBuffer - If true, forces the buffer to be persisted before any operation. - */ - getHistoryBufferLevelOptions(projectId) { - if ( - this.nextHistoryBufferLevel > this.historyBufferLevel && - this.nextHistoryBufferLevelRolloutPercentage > 0 - ) { - const hash = crypto.createHash('sha1').update(projectId).digest('hex') - const percentage = parseInt(hash.slice(0, 8), 16) % 100 - // If the project is in the rollout percentage, we use the next history buffer level. - if (percentage < this.nextHistoryBufferLevelRolloutPercentage) { - return { - historyBufferLevel: this.nextHistoryBufferLevel, - forcePersistBuffer: this.forcePersistBuffer, - } - } - } - return { - historyBufferLevel: this.historyBufferLevel, - forcePersistBuffer: this.forcePersistBuffer, - } - } -} - -module.exports = Rollout diff --git a/services/history-v1/api/controllers/project_import.js b/services/history-v1/api/controllers/project_import.js index 638873d105..edffb19a25 100644 --- a/services/history-v1/api/controllers/project_import.js +++ b/services/history-v1/api/controllers/project_import.js @@ -2,7 +2,6 @@ 'use strict' -const config = require('config') const { expressify } = require('@overleaf/promise-utils') const HTTPStatus = require('http-status') @@ -22,15 +21,10 @@ const BatchBlobStore = storage.BatchBlobStore const BlobStore = storage.BlobStore const chunkStore = storage.chunkStore const HashCheckBlobStore = storage.HashCheckBlobStore -const commitChanges = storage.commitChanges -const persistBuffer = storage.persistBuffer +const persistChanges = storage.persistChanges const InvalidChangeError = storage.InvalidChangeError const render = require('./render') -const Rollout = require('../app/rollout') - -const rollout = new Rollout(config) -rollout.report(logger) // display the rollout configuration in the logs async function importSnapshot(req, res) { const projectId = req.swagger.params.project_id.value @@ -41,7 +35,6 @@ async function importSnapshot(req, res) { try { snapshot = Snapshot.fromRaw(rawSnapshot) } catch (err) { - logger.warn({ err, projectId }, 'failed to import snapshot') return render.unprocessableEntity(res) } @@ -50,7 +43,6 @@ async function importSnapshot(req, res) { historyId = await chunkStore.initializeProject(projectId, snapshot) } catch (err) { if (err instanceof chunkStore.AlreadyInitialized) { - logger.warn({ err, projectId }, 'already initialized') return render.conflict(res) } else { throw err @@ -116,12 +108,7 @@ async function importChanges(req, res, next) { let result try { - const { historyBufferLevel, forcePersistBuffer } = - rollout.getHistoryBufferLevelOptions(projectId) - result = await commitChanges(projectId, changes, limits, endVersion, { - historyBufferLevel, - forcePersistBuffer, - }) + result = await persistChanges(projectId, changes, limits, endVersion) } catch (err) { if ( err instanceof Chunk.ConflictingEndVersion || @@ -154,29 +141,5 @@ async function importChanges(req, res, next) { } } -async function flushChanges(req, res, next) { - const projectId = req.swagger.params.project_id.value - // Use the same limits importChanges, since these are passed to persistChanges - const farFuture = new Date() - farFuture.setTime(farFuture.getTime() + 7 * 24 * 3600 * 1000) - const limits = { - maxChanges: 0, - minChangeTimestamp: farFuture, - maxChangeTimestamp: farFuture, - autoResync: true, - } - try { - await persistBuffer(projectId, limits) - res.status(HTTPStatus.OK).end() - } catch (err) { - if (err instanceof Chunk.NotFoundError) { - render.notFound(res) - } else { - throw err - } - } -} - exports.importSnapshot = expressify(importSnapshot) exports.importChanges = expressify(importChanges) -exports.flushChanges = expressify(flushChanges) diff --git a/services/history-v1/api/controllers/projects.js b/services/history-v1/api/controllers/projects.js index 031833688c..47a1d959ad 100644 --- a/services/history-v1/api/controllers/projects.js +++ b/services/history-v1/api/controllers/projects.js @@ -34,7 +34,6 @@ async function initializeProject(req, res, next) { res.status(HTTPStatus.OK).json({ projectId }) } catch (err) { if (err instanceof chunkStore.AlreadyInitialized) { - logger.warn({ err, projectId }, 'failed to initialize') render.conflict(res) } else { throw err @@ -243,15 +242,11 @@ async function createProjectBlob(req, res, next) { const sizeLimit = new StreamSizeLimit(maxUploadSize) await pipeline(req, sizeLimit, fs.createWriteStream(tmpPath)) if (sizeLimit.sizeLimitExceeded) { - logger.warn( - { projectId, expectedHash, maxUploadSize }, - 'blob exceeds size threshold' - ) return render.requestEntityTooLarge(res) } const hash = await blobHash.fromFile(tmpPath) if (hash !== expectedHash) { - logger.warn({ projectId, hash, expectedHash }, 'Hash mismatch') + logger.debug({ hash, expectedHash }, 'Hash mismatch') return render.conflict(res, 'File hash mismatch') } @@ -348,10 +343,6 @@ async function copyProjectBlob(req, res, next) { targetBlobStore.getBlob(blobHash), ]) if (!sourceBlob) { - logger.warn( - { sourceProjectId, targetProjectId, blobHash }, - 'missing source blob when copying across projects' - ) return render.notFound(res) } // Exit early if the blob exists in the target project. diff --git a/services/history-v1/api/swagger/project_import.js b/services/history-v1/api/swagger/project_import.js index 6103eed74b..a93f42d27e 100644 --- a/services/history-v1/api/swagger/project_import.js +++ b/services/history-v1/api/swagger/project_import.js @@ -139,45 +139,9 @@ const getChanges = { ], } -const flushChanges = { - 'x-swagger-router-controller': 'project_import', - operationId: 'flushChanges', - tags: ['ProjectImport'], - description: 'Flush project changes from buffer to the chunk store.', - parameters: [ - { - name: 'project_id', - in: 'path', - description: 'project id', - required: true, - type: 'string', - }, - ], - responses: { - 200: { - description: 'Success', - schema: { - $ref: '#/definitions/Project', - }, - }, - 404: { - description: 'Not Found', - schema: { - $ref: '#/definitions/Error', - }, - }, - }, - security: [ - { - basic: [], - }, - ], -} - exports.paths = { '/projects/{project_id}/import': { post: importSnapshot }, '/projects/{project_id}/legacy_import': { post: importSnapshot }, '/projects/{project_id}/changes': { get: getChanges, post: importChanges }, '/projects/{project_id}/legacy_changes': { post: importChanges }, - '/projects/{project_id}/flush': { post: flushChanges }, } diff --git a/services/history-v1/app.js b/services/history-v1/app.js index dd991c1a6d..261f1001b6 100644 --- a/services/history-v1/app.js +++ b/services/history-v1/app.js @@ -100,13 +100,11 @@ function setupErrorHandling() { }) } if (err.code === 'ENUM_MISMATCH') { - logger.warn({ err, projectId }, err.message) return res.status(HTTPStatus.UNPROCESSABLE_ENTITY).json({ message: 'invalid enum value: ' + err.paramName, }) } if (err.code === 'REQUIRED') { - logger.warn({ err, projectId }, err.message) return res.status(HTTPStatus.UNPROCESSABLE_ENTITY).json({ message: err.message, }) diff --git a/services/history-v1/config/custom-environment-variables.json b/services/history-v1/config/custom-environment-variables.json index 686ca25407..d07ae2925a 100644 --- a/services/history-v1/config/custom-environment-variables.json +++ b/services/history-v1/config/custom-environment-variables.json @@ -84,10 +84,6 @@ "maxFileUploadSize": "MAX_FILE_UPLOAD_SIZE", "httpsOnly": "HTTPS_ONLY", "httpRequestTimeout": "HTTP_REQUEST_TIMEOUT", - "historyBufferLevel": "HISTORY_BUFFER_LEVEL", - "forcePersistBuffer": "FORCE_PERSIST_BUFFER", - "nextHistoryBufferLevel": "NEXT_HISTORY_BUFFER_LEVEL", - "nextHistoryBufferLevelRolloutPercentage": "NEXT_HISTORY_BUFFER_LEVEL_ROLLOUT_PERCENTAGE", "redis": { "queue": { "host": "QUEUES_REDIS_HOST", @@ -104,9 +100,5 @@ "password": "REDIS_PASSWORD", "port": "REDIS_PORT" } - }, - "projectHistory": { - "host": "PROJECT_HISTORY_HOST", - "port": "PROJECT_HISTORY_PORT" } } diff --git a/services/history-v1/config/default.json b/services/history-v1/config/default.json index e7732fe3f7..5222b84d87 100644 --- a/services/history-v1/config/default.json +++ b/services/history-v1/config/default.json @@ -39,8 +39,5 @@ "databasePoolMin": "2", "databasePoolMax": "10", "httpsOnly": "false", - "httpRequestTimeout": "300000", - "projectHistory": { - "port": "3054" - } + "httpRequestTimeout": "300000" } diff --git a/services/history-v1/docker-compose.ci.yml b/services/history-v1/docker-compose.ci.yml index 9128451c4f..0dfe8b99d3 100644 --- a/services/history-v1/docker-compose.ci.yml +++ b/services/history-v1/docker-compose.ci.yml @@ -39,7 +39,6 @@ services: NODE_OPTIONS: "--unhandled-rejections=strict" volumes: - ./test/acceptance/certs:/certs - - ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it depends_on: mongo: condition: service_started @@ -56,7 +55,6 @@ services: gcs: condition: service_healthy user: node - entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 -- command: npm run test:acceptance @@ -68,7 +66,7 @@ services: command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs . user: root redis: - image: redis:7.4.3 + image: redis healthcheck: test: ping="$$(redis-cli ping)" && [ "$$ping" = 'PONG' ] interval: 1s diff --git a/services/history-v1/docker-compose.yml b/services/history-v1/docker-compose.yml index cda379fb14..b87d859e1e 100644 --- a/services/history-v1/docker-compose.yml +++ b/services/history-v1/docker-compose.yml @@ -33,7 +33,6 @@ services: - ../../node_modules:/overleaf/node_modules - ../../libraries:/overleaf/libraries - ./test/acceptance/certs:/certs - - ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it working_dir: /overleaf/services/history-v1 environment: ELASTIC_SEARCH_DSN: es:9200 @@ -72,11 +71,10 @@ services: condition: service_completed_successfully gcs: condition: service_healthy - entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 -- command: npm run --silent test:acceptance redis: - image: redis:7.4.3 + image: redis healthcheck: test: ping=$$(redis-cli ping) && [ "$$ping" = 'PONG' ] interval: 1s diff --git a/services/history-v1/package.json b/services/history-v1/package.json index 4796cafd03..1fdfd95c45 100644 --- a/services/history-v1/package.json +++ b/services/history-v1/package.json @@ -7,7 +7,6 @@ "private": true, "dependencies": { "@google-cloud/secret-manager": "^5.6.0", - "@overleaf/fetch-utils": "*", "@overleaf/logger": "*", "@overleaf/metrics": "*", "@overleaf/mongo-utils": "*", @@ -37,7 +36,6 @@ "mongodb": "6.12.0", "overleaf-editor-core": "*", "p-limit": "^6.2.0", - "p-queue": "^8.1.0", "pg": "^8.7.1", "pg-query-stream": "^4.2.4", "swagger-tools": "^0.10.4", diff --git a/services/history-v1/storage/index.js b/services/history-v1/storage/index.js index 82a51583be..2aa492f46e 100644 --- a/services/history-v1/storage/index.js +++ b/services/history-v1/storage/index.js @@ -8,9 +8,6 @@ exports.mongodb = require('./lib/mongodb') exports.redis = require('./lib/redis') exports.persistChanges = require('./lib/persist_changes') exports.persistor = require('./lib/persistor') -exports.persistBuffer = require('./lib/persist_buffer') -exports.commitChanges = require('./lib/commit_changes') -exports.queueChanges = require('./lib/queue_changes') exports.ProjectArchive = require('./lib/project_archive') exports.streams = require('./lib/streams') exports.temp = require('./lib/temp') diff --git a/services/history-v1/storage/lib/chunk_store/index.js b/services/history-v1/storage/lib/chunk_store/index.js index 286a8d8764..6dab84f929 100644 --- a/services/history-v1/storage/lib/chunk_store/index.js +++ b/services/history-v1/storage/lib/chunk_store/index.js @@ -151,48 +151,23 @@ async function loadAtVersion(projectId, version, opts = {}) { const backend = getBackend(projectId) const blobStore = new BlobStore(projectId) const batchBlobStore = new BatchBlobStore(blobStore) - const latestChunkMetadata = await getLatestChunkMetadata(projectId) - // When loading a chunk for a version there are three cases to consider: - // 1. If `persistedOnly` is true, we always use the requested version - // to fetch the chunk. - // 2. If `persistedOnly` is false and the requested version is in the - // persisted chunk version range, we use the requested version. - // 3. If `persistedOnly` is false and the requested version is ahead of - // the persisted chunk versions, we fetch the latest chunk and see if - // the non-persisted changes include the requested version. - const targetChunkVersion = opts.persistedOnly - ? version - : Math.min(latestChunkMetadata.endVersion, version) - - const chunkRecord = await backend.getChunkForVersion( - projectId, - targetChunkVersion, - { - preferNewer: opts.preferNewer, - } - ) + const chunkRecord = await backend.getChunkForVersion(projectId, version, { + preferNewer: opts.preferNewer, + }) const rawHistory = await historyStore.loadRaw(projectId, chunkRecord.id) const history = History.fromRaw(rawHistory) - const startVersion = chunkRecord.endVersion - history.countChanges() if (!opts.persistedOnly) { - // Try to extend the chunk with any non-persisted changes that - // follow the chunk's end version. const nonPersistedChanges = await getChunkExtension( projectId, chunkRecord.endVersion ) history.pushChanges(nonPersistedChanges) - - // Check that the changes do actually contain the requested version - if (version > chunkRecord.endVersion + nonPersistedChanges.length) { - throw new Chunk.VersionNotFoundError(projectId, version) - } } await lazyLoadHistoryFiles(history, batchBlobStore) - return new Chunk(history, startVersion) + return new Chunk(history, chunkRecord.endVersion - history.countChanges()) } /** @@ -215,7 +190,6 @@ async function loadAtTimestamp(projectId, timestamp, opts = {}) { const chunkRecord = await backend.getChunkForTimestamp(projectId, timestamp) const rawHistory = await historyStore.loadRaw(projectId, chunkRecord.id) const history = History.fromRaw(rawHistory) - const startVersion = chunkRecord.endVersion - history.countChanges() if (!opts.persistedOnly) { const nonPersistedChanges = await getChunkExtension( @@ -226,7 +200,7 @@ async function loadAtTimestamp(projectId, timestamp, opts = {}) { } await lazyLoadHistoryFiles(history, batchBlobStore) - return new Chunk(history, startVersion) + return new Chunk(history, chunkRecord.endVersion - history.countChanges()) } /** diff --git a/services/history-v1/storage/lib/chunk_store/mongo.js b/services/history-v1/storage/lib/chunk_store/mongo.js index 49020c6be4..26c1bc48ec 100644 --- a/services/history-v1/storage/lib/chunk_store/mongo.js +++ b/services/history-v1/storage/lib/chunk_store/mongo.js @@ -286,27 +286,6 @@ async function updateProjectRecord( ) } -/** - * @param {number} historyId - * @return {Promise} - */ -async function lookupMongoProjectIdFromHistoryId(historyId) { - const project = await mongodb.projects.findOne( - // string for Object ids, number for postgres ids - { 'overleaf.history.id': historyId }, - { projection: { _id: 1 } } - ) - if (!project) { - // should not happen: We flush before allowing a project to be soft-deleted. - throw new OError('mongo project not found by history id', { historyId }) - } - return project._id.toString() -} - -async function resolveHistoryIdToMongoProjectId(projectId) { - return projectId -} - /** * Record that a chunk was replaced by a new one. * @@ -554,6 +533,4 @@ module.exports = { deleteProjectChunks, getOldChunksBatch, deleteOldChunks, - lookupMongoProjectIdFromHistoryId, - resolveHistoryIdToMongoProjectId, } diff --git a/services/history-v1/storage/lib/chunk_store/postgres.js b/services/history-v1/storage/lib/chunk_store/postgres.js index 8906db38e1..bfb5c6954a 100644 --- a/services/history-v1/storage/lib/chunk_store/postgres.js +++ b/services/history-v1/storage/lib/chunk_store/postgres.js @@ -5,10 +5,7 @@ const assert = require('../assert') const knex = require('../knex') const knexReadOnly = require('../knex_read_only') const { ChunkVersionConflictError } = require('./errors') -const { - updateProjectRecord, - lookupMongoProjectIdFromHistoryId, -} = require('./mongo') +const { updateProjectRecord } = require('./mongo') const DUPLICATE_KEY_ERROR_CODE = '23505' @@ -475,10 +472,6 @@ async function generateProjectId() { return record.doc_id.toString() } -async function resolveHistoryIdToMongoProjectId(projectId) { - return await lookupMongoProjectIdFromHistoryId(parseInt(projectId, 10)) -} - module.exports = { getLatestChunk, getFirstChunkBeforeTimestamp, @@ -495,5 +488,4 @@ module.exports = { getOldChunksBatch, deleteOldChunks, generateProjectId, - resolveHistoryIdToMongoProjectId, } diff --git a/services/history-v1/storage/lib/chunk_store/redis.js b/services/history-v1/storage/lib/chunk_store/redis.js index 59bfd81e39..0ae7cee2e5 100644 --- a/services/history-v1/storage/lib/chunk_store/redis.js +++ b/services/history-v1/storage/lib/chunk_store/redis.js @@ -480,12 +480,11 @@ async function getNonPersistedChanges(projectId, baseVersion) { } rclient.defineCommand('set_persisted_version', { - numberOfKeys: 4, + numberOfKeys: 3, lua: ` local headVersionKey = KEYS[1] local persistedVersionKey = KEYS[2] - local persistTimeKey = KEYS[3] - local changesKey = KEYS[4] + local changesKey = KEYS[3] local newPersistedVersion = tonumber(ARGV[1]) local maxPersistedChanges = tonumber(ARGV[2]) @@ -502,19 +501,9 @@ rclient.defineCommand('set_persisted_version', { return 'too_low' end - -- Refuse to set a persisted version that is higher than the head version - if newPersistedVersion > headVersion then - return 'too_high' - end - -- Set the persisted version redis.call('SET', persistedVersionKey, newPersistedVersion) - -- Clear the persist time if the persisted version now matches the head version - if newPersistedVersion == headVersion then - redis.call('DEL', persistTimeKey) - end - -- Calculate the starting index, to keep only maxPersistedChanges beyond the persisted version -- Using negative indexing to count backwards from the end of the list local startIndex = newPersistedVersion - headVersion - maxPersistedChanges @@ -541,7 +530,6 @@ async function setPersistedVersion(projectId, persistedVersion) { const keys = [ keySchema.headVersion({ projectId }), keySchema.persistedVersion({ projectId }), - keySchema.persistTime({ projectId }), keySchema.changes({ projectId }), ] @@ -553,13 +541,6 @@ async function setPersistedVersion(projectId, persistedVersion) { status, }) - if (status === 'too_high') { - throw new VersionOutOfBoundsError( - 'Persisted version cannot be higher than head version', - { projectId, persistedVersion } - ) - } - return status } catch (err) { metrics.inc('chunk_store.redis.set_persisted_version', 1, { @@ -650,7 +631,6 @@ async function expireProject(projectId) { metrics.inc('chunk_store.redis.set_persisted_version', 1, { status, }) - return status } catch (err) { metrics.inc('chunk_store.redis.set_persisted_version', 1, { status: 'error', diff --git a/services/history-v1/storage/lib/commit_changes.js b/services/history-v1/storage/lib/commit_changes.js deleted file mode 100644 index 5749e5fc0e..0000000000 --- a/services/history-v1/storage/lib/commit_changes.js +++ /dev/null @@ -1,159 +0,0 @@ -// @ts-check - -'use strict' - -const metrics = require('@overleaf/metrics') -const redisBackend = require('./chunk_store/redis') -const logger = require('@overleaf/logger') -const queueChanges = require('./queue_changes') -const persistChanges = require('./persist_changes') -const persistBuffer = require('./persist_buffer') - -/** - * @typedef {import('overleaf-editor-core').Change} Change - */ - -/** - * Handle incoming changes by processing them according to the specified options. - * @param {string} projectId - * @param {Change[]} changes - * @param {Object} limits - * @param {number} endVersion - * @param {Object} options - * @param {number} [options.historyBufferLevel] - The history buffer level to use for processing changes. - * @param {Boolean} [options.forcePersistBuffer] - If true, forces the buffer to be persisted before any operation. - * @return {Promise.} - */ - -async function commitChanges( - projectId, - changes, - limits, - endVersion, - options = {} -) { - const { historyBufferLevel, forcePersistBuffer } = options - - // Force the buffer to be persisted if specified. - if (forcePersistBuffer) { - try { - const status = await redisBackend.expireProject(projectId) // clear the project from Redis if it is persisted, returns 'not-persisted' if it was not persisted - if (status === 'not-persisted') { - await persistBuffer(projectId, limits) - await redisBackend.expireProject(projectId) // clear the project from Redis after persisting - metrics.inc('persist_buffer_force', 1, { status: 'persisted' }) - } - } catch (err) { - metrics.inc('persist_buffer_force', 1, { status: 'error' }) - logger.error( - { err, projectId }, - 'failed to persist buffer before committing changes' - ) - } - } - - metrics.inc('commit_changes', 1, { - history_buffer_level: historyBufferLevel || 0, - }) - - // Now handle the changes based on the configured history buffer level. - switch (historyBufferLevel) { - case 4: // Queue changes and only persist them in the background - await queueChanges(projectId, changes, endVersion) - return {} - case 3: // Queue changes and immediately persist with persistBuffer - await queueChanges(projectId, changes, endVersion) - return await persistBuffer(projectId, limits) - case 2: // Equivalent to queueChangesInRedis:true - await queueChangesFake(projectId, changes, endVersion) - return await persistChanges(projectId, changes, limits, endVersion) - case 1: // Queue changes with fake persist only for projects in redis already - await queueChangesFakeOnlyIfExists(projectId, changes, endVersion) - return await persistChanges(projectId, changes, limits, endVersion) - case 0: // Persist changes directly to the chunk store - return await persistChanges(projectId, changes, limits, endVersion) - default: - throw new Error(`Invalid history buffer level: ${historyBufferLevel}`) - } -} - -/** - * Queues a set of changes in redis as if they had been persisted, ignoring any errors. - * @param {string} projectId - * @param {Change[]} changes - * @param {number} endVersion - * @param {Object} [options] - * @param {boolean} [options.onlyIfExists] - If true, only queue changes if the project - * already exists in Redis. - */ - -async function queueChangesFake(projectId, changes, endVersion, options = {}) { - try { - await queueChanges(projectId, changes, endVersion) - await fakePersistRedisChanges(projectId, changes, endVersion) - } catch (err) { - logger.error({ err }, 'Chunk buffer verification failed') - } -} - -/** - * Queues changes in Redis, simulating persistence, but only if the project already exists. - * @param {string} projectId - The ID of the project. - * @param {Change[]} changes - An array of changes to be queued. - * @param {number} endVersion - The expected version of the project before these changes are applied. - */ - -async function queueChangesFakeOnlyIfExists(projectId, changes, endVersion) { - await queueChangesFake(projectId, changes, endVersion, { - onlyIfExists: true, - }) -} - -/** - * Simulates the persistence of changes by verifying a given set of changes against - * what is currently stored as non-persisted in Redis, and then updates the - * persisted version number in Redis. - * - * @async - * @param {string} projectId - The ID of the project. - * @param {Change[]} changesToPersist - An array of changes that are expected to be - * persisted. These are used for verification - * against the changes currently in Redis. - * @param {number} baseVersion - The base version number from which to calculate - * the new persisted version. - * @returns {Promise} A promise that resolves when the persisted version - * in Redis has been updated. - */ -async function fakePersistRedisChanges( - projectId, - changesToPersist, - baseVersion -) { - const nonPersistedChanges = await redisBackend.getNonPersistedChanges( - projectId, - baseVersion - ) - - if ( - serializeChanges(nonPersistedChanges) === serializeChanges(changesToPersist) - ) { - metrics.inc('persist_redis_changes_verification', 1, { status: 'match' }) - } else { - logger.warn({ projectId }, 'mismatch of non-persisted changes from Redis') - metrics.inc('persist_redis_changes_verification', 1, { - status: 'mismatch', - }) - } - - const persistedVersion = baseVersion + nonPersistedChanges.length - await redisBackend.setPersistedVersion(projectId, persistedVersion) -} - -/** - * @param {Change[]} changes - */ -function serializeChanges(changes) { - return JSON.stringify(changes.map(change => change.toRaw())) -} - -module.exports = commitChanges diff --git a/services/history-v1/storage/lib/persist_buffer.js b/services/history-v1/storage/lib/persist_buffer.js deleted file mode 100644 index d562388f87..0000000000 --- a/services/history-v1/storage/lib/persist_buffer.js +++ /dev/null @@ -1,206 +0,0 @@ -// @ts-check -'use strict' - -const logger = require('@overleaf/logger') -const metrics = require('@overleaf/metrics') -const OError = require('@overleaf/o-error') -const assert = require('./assert') -const chunkStore = require('./chunk_store') -const { BlobStore } = require('./blob_store') -const BatchBlobStore = require('./batch_blob_store') -const persistChanges = require('./persist_changes') -const resyncProject = require('./resync_project') -const redisBackend = require('./chunk_store/redis') - -/** - * Persist the changes from Redis buffer to the main storage - * - * Algorithm Outline: - * 1. Get the latest chunk's endVersion from the database - * 2. Get non-persisted changes from Redis that are after this endVersion. - * 3. If no such changes, exit. - * 4. Load file blobs for these Redis changes. - * 5. Run the persistChanges() algorithm to store these changes into a new chunk(s) in GCS. - * - This must not decrease the endVersion. If changes were processed, it must advance. - * 6. Set the new persisted version (endVersion of the latest persisted chunk) in Redis. - * - * @param {string} projectId - * @param {Object} limits - * @throws {Error | OError} If a critical error occurs during persistence. - */ -async function persistBuffer(projectId, limits) { - assert.projectId(projectId) - logger.debug({ projectId }, 'starting persistBuffer operation') - - // 1. Get the latest chunk's endVersion from GCS/main store - let endVersion - const latestChunkMetadata = await chunkStore.getLatestChunkMetadata(projectId) - - if (latestChunkMetadata) { - endVersion = latestChunkMetadata.endVersion - } else { - endVersion = 0 // No chunks found, start from version 0 - logger.debug({ projectId }, 'no existing chunks found in main storage') - } - - logger.debug({ projectId, endVersion }, 'got latest persisted chunk') - - // 2. Get non-persisted changes from Redis - const changesToPersist = await redisBackend.getNonPersistedChanges( - projectId, - endVersion - ) - - if (changesToPersist.length === 0) { - logger.debug( - { projectId, endVersion }, - 'no new changes in Redis buffer to persist' - ) - metrics.inc('persist_buffer', 1, { status: 'no_changes' }) - // No changes to persist, update the persisted version in Redis - // to match the current endVersion. This shouldn't be needed - // unless a worker failed to update the persisted version. - await redisBackend.setPersistedVersion(projectId, endVersion) - const { chunk } = await chunkStore.loadByChunkRecord( - projectId, - latestChunkMetadata - ) - // Return the result in the same format as persistChanges - // so that the caller can handle it uniformly. - return { - numberOfChangesPersisted: changesToPersist.length, - originalEndVersion: endVersion, - currentChunk: chunk, - } - } - - logger.debug( - { - projectId, - endVersion, - count: changesToPersist.length, - }, - 'found changes in Redis to persist' - ) - - // 4. Load file blobs for these Redis changes. Errors will propagate. - const blobStore = new BlobStore(projectId) - const batchBlobStore = new BatchBlobStore(blobStore) - - const blobHashes = new Set() - for (const change of changesToPersist) { - change.findBlobHashes(blobHashes) - } - if (blobHashes.size > 0) { - await batchBlobStore.preload(Array.from(blobHashes)) - } - for (const change of changesToPersist) { - await change.loadFiles('lazy', blobStore) - } - - // 5. Run the persistChanges() algorithm. Errors will propagate. - logger.debug( - { - projectId, - endVersion, - changeCount: changesToPersist.length, - }, - 'calling persistChanges' - ) - - const persistResult = await persistChanges( - projectId, - changesToPersist, - limits, - endVersion - ) - - if (!persistResult || !persistResult.currentChunk) { - metrics.inc('persist_buffer', 1, { status: 'no-chunk-error' }) - throw new OError( - 'persistChanges did not produce a new chunk for non-empty changes', - { - projectId, - endVersion, - changeCount: changesToPersist.length, - } - ) - } - - const newPersistedChunk = persistResult.currentChunk - const newEndVersion = newPersistedChunk.getEndVersion() - - if (newEndVersion <= endVersion) { - metrics.inc('persist_buffer', 1, { status: 'chunk-version-mismatch' }) - throw new OError( - 'persisted chunk endVersion must be greater than current persisted chunk end version for non-empty changes', - { - projectId, - newEndVersion, - endVersion, - changeCount: changesToPersist.length, - } - ) - } - - logger.debug( - { - projectId, - oldVersion: endVersion, - newVersion: newEndVersion, - }, - 'successfully persisted changes from Redis to main storage' - ) - - // 6. Set the persisted version in Redis. Errors will propagate. - const status = await redisBackend.setPersistedVersion( - projectId, - newEndVersion - ) - - if (status !== 'ok') { - metrics.inc('persist_buffer', 1, { status: 'error-on-persisted-version' }) - throw new OError('failed to update persisted version in Redis', { - projectId, - newEndVersion, - status, - }) - } - - logger.debug( - { projectId, newEndVersion }, - 'updated persisted version in Redis' - ) - - // 7. Resync the project if content hash validation failed - if (limits.autoResync && persistResult.resyncNeeded) { - if ( - changesToPersist.some( - change => change.getOrigin()?.getKind() === 'history-resync' - ) - ) { - // To avoid an infinite loop, do not resync if the current batch of - // changes contains a history resync. - logger.warn( - { projectId }, - 'content hash validation failed while persisting a history resync, skipping additional resync' - ) - } else { - const backend = chunkStore.getBackend(projectId) - const mongoProjectId = - await backend.resolveHistoryIdToMongoProjectId(projectId) - await resyncProject(mongoProjectId) - } - } - - logger.debug( - { projectId, finalPersistedVersion: newEndVersion }, - 'persistBuffer operation completed successfully' - ) - - metrics.inc('persist_buffer', 1, { status: 'persisted' }) - - return persistResult -} - -module.exports = persistBuffer diff --git a/services/history-v1/storage/lib/persist_changes.js b/services/history-v1/storage/lib/persist_changes.js index d2ca00053f..5b80285eb0 100644 --- a/services/history-v1/storage/lib/persist_changes.js +++ b/services/history-v1/storage/lib/persist_changes.js @@ -4,6 +4,7 @@ const _ = require('lodash') const logger = require('@overleaf/logger') +const metrics = require('@overleaf/metrics') const core = require('overleaf-editor-core') const Chunk = core.Chunk @@ -14,6 +15,7 @@ const chunkStore = require('./chunk_store') const { BlobStore } = require('./blob_store') const { InvalidChangeError } = require('./errors') const { getContentHash } = require('./content_hash') +const redisBackend = require('./chunk_store/redis') function countChangeBytes(change) { // Note: This is not quite accurate, because the raw change may contain raw @@ -200,6 +202,45 @@ async function persistChanges(projectId, allChanges, limits, clientEndVersion) { currentSnapshot.applyAll(currentChunk.getChanges()) } + async function queueChangesInRedis() { + const hollowSnapshot = currentSnapshot.clone() + // We're transforming a lazy snapshot to a hollow snapshot, so loadFiles() + // doesn't really need a blobStore, but its signature still requires it. + const blobStore = new BlobStore(projectId) + await hollowSnapshot.loadFiles('hollow', blobStore) + hollowSnapshot.applyAll(changesToPersist, { strict: true }) + const baseVersion = currentChunk.getEndVersion() + await redisBackend.queueChanges( + projectId, + hollowSnapshot, + baseVersion, + changesToPersist + ) + } + + async function fakePersistRedisChanges() { + const baseVersion = currentChunk.getEndVersion() + const nonPersistedChanges = await redisBackend.getNonPersistedChanges( + projectId, + baseVersion + ) + + if ( + serializeChanges(nonPersistedChanges) === + serializeChanges(changesToPersist) + ) { + metrics.inc('persist_redis_changes_verification', 1, { status: 'match' }) + } else { + logger.warn({ projectId }, 'mismatch of non-persisted changes from Redis') + metrics.inc('persist_redis_changes_verification', 1, { + status: 'mismatch', + }) + } + + const persistedVersion = baseVersion + nonPersistedChanges.length + await redisBackend.setPersistedVersion(projectId, persistedVersion) + } + async function extendLastChunkIfPossible() { const timer = new Timer() const changesPushed = await fillChunk(currentChunk, changesToPersist) @@ -248,6 +289,12 @@ async function persistChanges(projectId, allChanges, limits, clientEndVersion) { const numberOfChangesToPersist = oldChanges.length await loadLatestChunk() + try { + await queueChangesInRedis() + await fakePersistRedisChanges() + } catch (err) { + logger.error({ err }, 'Chunk buffer verification failed') + } await extendLastChunkIfPossible() await createNewChunksAsNeeded() @@ -262,4 +309,11 @@ async function persistChanges(projectId, allChanges, limits, clientEndVersion) { } } +/** + * @param {core.Change[]} changes + */ +function serializeChanges(changes) { + return JSON.stringify(changes.map(change => change.toRaw())) +} + module.exports = persistChanges diff --git a/services/history-v1/storage/lib/queue_changes.js b/services/history-v1/storage/lib/queue_changes.js deleted file mode 100644 index 6b8d4b22b4..0000000000 --- a/services/history-v1/storage/lib/queue_changes.js +++ /dev/null @@ -1,75 +0,0 @@ -// @ts-check - -'use strict' - -const redisBackend = require('./chunk_store/redis') -const { BlobStore } = require('./blob_store') -const chunkStore = require('./chunk_store') -const core = require('overleaf-editor-core') -const Chunk = core.Chunk - -/** - * Queues an incoming set of changes after validating them against the current snapshot. - * - * @async - * @function queueChanges - * @param {string} projectId - The project to queue changes for. - * @param {Array} changesToQueue - An array of change objects to be applied and queued. - * @param {number} endVersion - The expected version of the project before these changes are applied. - * This is used for optimistic concurrency control. - * @param {Object} [opts] - Additional options for queuing changes. - * @throws {Chunk.ConflictingEndVersion} If the provided `endVersion` does not match the - * current version of the project. - * @returns {Promise} A promise that resolves with the status returned by the - * `redisBackend.queueChanges` operation. - */ -async function queueChanges(projectId, changesToQueue, endVersion, opts) { - const result = await redisBackend.getHeadSnapshot(projectId) - let currentSnapshot = null - let currentVersion = null - if (result) { - // If we have a snapshot in redis, we can use it to check the current state - // of the project and apply changes to it. - currentSnapshot = result.snapshot - currentVersion = result.version - } else { - // Otherwise, load the latest chunk from the chunk store. - const latestChunk = await chunkStore.loadLatest(projectId, { - persistedOnly: true, - }) - // Throw an error if no latest chunk is found, indicating the project has not been initialised. - if (!latestChunk) { - throw new Chunk.NotFoundError(projectId) - } - currentSnapshot = latestChunk.getSnapshot() - currentSnapshot.applyAll(latestChunk.getChanges()) - currentVersion = latestChunk.getEndVersion() - } - - // Ensure the endVersion matches the current version of the project. - if (endVersion !== currentVersion) { - throw new Chunk.ConflictingEndVersion(endVersion, currentVersion) - } - - // Compute the new hollow snapshot to be saved to redis. - const hollowSnapshot = currentSnapshot - const blobStore = new BlobStore(projectId) - await hollowSnapshot.loadFiles('hollow', blobStore) - // Clone the changes to avoid modifying the original ones when computing the hollow snapshot. - const hollowChanges = changesToQueue.map(change => change.clone()) - for (const change of hollowChanges) { - await change.loadFiles('hollow', blobStore) - } - hollowSnapshot.applyAll(hollowChanges, { strict: true }) - const baseVersion = currentVersion - const status = await redisBackend.queueChanges( - projectId, - hollowSnapshot, - baseVersion, - changesToQueue, - opts - ) - return status -} - -module.exports = queueChanges diff --git a/services/history-v1/storage/lib/resync_project.js b/services/history-v1/storage/lib/resync_project.js deleted file mode 100644 index 3ec680bb5b..0000000000 --- a/services/history-v1/storage/lib/resync_project.js +++ /dev/null @@ -1,14 +0,0 @@ -// @ts-check - -const config = require('config') -const { fetchNothing } = require('@overleaf/fetch-utils') - -const PROJECT_HISTORY_URL = `http://${config.projectHistory.host}:${config.projectHistory.port}` - -async function resyncProject(projectId) { - await fetchNothing(`${PROJECT_HISTORY_URL}/project/${projectId}/resync`, { - method: 'POST', - }) -} - -module.exports = resyncProject diff --git a/services/history-v1/storage/lib/scan.js b/services/history-v1/storage/lib/scan.js index d55f5362c1..1f2a335254 100644 --- a/services/history-v1/storage/lib/scan.js +++ b/services/history-v1/storage/lib/scan.js @@ -1,7 +1,3 @@ -// @ts-check - -'use strict' - const logger = require('@overleaf/logger') const { JobNotFoundError, JobNotReadyError } = require('./chunk_store/errors') const BATCH_SIZE = 1000 // Default batch size for SCAN diff --git a/services/history-v1/storage/scripts/expire_redis_chunks.js b/services/history-v1/storage/scripts/expire_redis_chunks.js index 60ce4c66f6..af2be097b6 100644 --- a/services/history-v1/storage/scripts/expire_redis_chunks.js +++ b/services/history-v1/storage/scripts/expire_redis_chunks.js @@ -14,9 +14,12 @@ logger.initialize('expire-redis-chunks') async function expireProjectAction(projectId) { const job = await claimExpireJob(projectId) - await expireProject(projectId) - if (job && job.close) { - await job.close() + try { + await expireProject(projectId) + } finally { + if (job && job.close) { + await job.close() + } } } diff --git a/services/history-v1/storage/scripts/persist_and_expire_queues.sh b/services/history-v1/storage/scripts/persist_and_expire_queues.sh deleted file mode 100644 index d5789541da..0000000000 --- a/services/history-v1/storage/scripts/persist_and_expire_queues.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/sh -node storage/scripts/persist_redis_chunks.mjs --queue --max-time 270 -node storage/scripts/expire_redis_chunks.js diff --git a/services/history-v1/storage/scripts/persist_redis_chunks.mjs b/services/history-v1/storage/scripts/persist_redis_chunks.mjs deleted file mode 100644 index dd7e9f3a51..0000000000 --- a/services/history-v1/storage/scripts/persist_redis_chunks.mjs +++ /dev/null @@ -1,181 +0,0 @@ -import config from 'config' -import PQueue from 'p-queue' -import { fetchNothing } from '@overleaf/fetch-utils' -import logger from '@overleaf/logger' -import commandLineArgs from 'command-line-args' -import * as redis from '../lib/redis.js' -import knex from '../lib/knex.js' -import knexReadOnly from '../lib/knex_read_only.js' -import { client } from '../lib/mongodb.js' -import { scanAndProcessDueItems } from '../lib/scan.js' -import persistBuffer from '../lib/persist_buffer.js' -import { claimPersistJob } from '../lib/chunk_store/redis.js' -import { loadGlobalBlobs } from '../lib/blob_store/index.js' -import { EventEmitter } from 'node:events' -import { fileURLToPath } from 'node:url' - -// Something is registering 11 listeners, over the limit of 10, which generates -// a lot of warning noise. -EventEmitter.defaultMaxListeners = 11 - -const rclient = redis.rclientHistory - -const optionDefinitions = [ - { name: 'dry-run', alias: 'd', type: Boolean }, - { name: 'queue', type: Boolean }, - { name: 'max-time', type: Number }, - { name: 'min-rate', type: Number, defaultValue: 1 }, -] -const options = commandLineArgs(optionDefinitions) -const DRY_RUN = options['dry-run'] || false -const USE_QUEUE = options.queue || false -const MAX_TIME = options['max-time'] || null -const MIN_RATE = options['min-rate'] -const HISTORY_V1_URL = `http://${process.env.HISTORY_V1_HOST || 'localhost'}:${process.env.PORT || 3100}` -let isShuttingDown = false - -logger.initialize('persist-redis-chunks') - -async function persistProjectAction(projectId) { - const job = await claimPersistJob(projectId) - // Set limits to force us to persist all of the changes. - const farFuture = new Date() - farFuture.setTime(farFuture.getTime() + 7 * 24 * 3600 * 1000) - const limits = { - maxChanges: 0, - minChangeTimestamp: farFuture, - maxChangeTimestamp: farFuture, - autoResync: true, - } - await persistBuffer(projectId, limits) - if (job && job.close) { - await job.close() - } -} - -async function requestProjectFlush(projectId) { - const job = await claimPersistJob(projectId) - logger.debug({ projectId }, 'sending project flush request') - const url = `${HISTORY_V1_URL}/api/projects/${projectId}/flush` - const credentials = Buffer.from( - `staging:${config.get('basicHttpAuth.password')}` - ).toString('base64') - await fetchNothing(url, { - method: 'POST', - headers: { - Authorization: `Basic ${credentials}`, - }, - }) - if (job && job.close) { - await job.close() - } -} - -async function persistQueuedProjects(queuedProjects) { - const totalCount = queuedProjects.size - // Compute the rate at which we need to dispatch requests - const targetRate = MAX_TIME > 0 ? Math.ceil(totalCount / MAX_TIME) : 0 - // Rate limit to spread the requests over the interval. - const queue = new PQueue({ - intervalCap: Math.max(MIN_RATE, targetRate), - interval: 1000, // use a 1 second interval - }) - logger.info( - { totalCount, targetRate, minRate: MIN_RATE, maxTime: MAX_TIME }, - 'dispatching project flush requests' - ) - const startTime = Date.now() - let dispatchedCount = 0 - for (const projectId of queuedProjects) { - if (isShuttingDown) { - logger.info('Shutting down, stopping project flush requests') - queue.clear() - break - } - queue.add(async () => { - try { - await requestProjectFlush(projectId) - } catch (err) { - logger.error({ err, projectId }, 'error while flushing project') - } - }) - dispatchedCount++ - if (dispatchedCount % 1000 === 0) { - logger.info( - { count: dispatchedCount }, - 'dispatched project flush requests' - ) - } - await queue.onEmpty() - } - const elapsedTime = Math.floor((Date.now() - startTime) / 1000) - logger.info( - { count: totalCount, elapsedTime }, - 'dispatched project flush requests' - ) - await queue.onIdle() -} - -async function runPersistChunks() { - const queuedProjects = new Set() - - async function queueProjectAction(projectId) { - queuedProjects.add(projectId) - } - - await loadGlobalBlobs() - await scanAndProcessDueItems( - rclient, - 'persistChunks', - 'persist-time', - USE_QUEUE ? queueProjectAction : persistProjectAction, - DRY_RUN - ) - - if (USE_QUEUE) { - if (isShuttingDown) { - logger.info('Shutting down, skipping queued project persistence') - return - } - logger.info( - { count: queuedProjects.size }, - 'queued projects for persistence' - ) - await persistQueuedProjects(queuedProjects) - } -} - -async function main() { - try { - await runPersistChunks() - } catch (err) { - logger.fatal( - { err, taskName: 'persistChunks' }, - 'Unhandled error in runPersistChunks' - ) - process.exit(1) - } finally { - await redis.disconnect() - await client.close() - await knex.destroy() - await knexReadOnly.destroy() - } -} - -function gracefulShutdown() { - if (isShuttingDown) { - return - } - isShuttingDown = true - logger.info({ isShuttingDown }, 'received shutdown signal, cleaning up...') -} - -// Check if the module is being run directly -const currentScriptPath = fileURLToPath(import.meta.url) -if (process.argv[1] === currentScriptPath) { - process.on('SIGINT', gracefulShutdown) - process.on('SIGTERM', gracefulShutdown) - main() -} - -export { runPersistChunks } diff --git a/services/history-v1/storage/scripts/show_buffer.js b/services/history-v1/storage/scripts/show_buffer.js deleted file mode 100644 index 1d80ee227d..0000000000 --- a/services/history-v1/storage/scripts/show_buffer.js +++ /dev/null @@ -1,117 +0,0 @@ -#!/usr/bin/env node -// @ts-check - -const { rclientHistory: rclient } = require('../lib/redis') -const { keySchema } = require('../lib/chunk_store/redis') -const commandLineArgs = require('command-line-args') - -const optionDefinitions = [ - { name: 'historyId', type: String, defaultOption: true }, -] - -// Column width for key display alignment; can be overridden with COL_WIDTH env variable -const COLUMN_WIDTH = process.env.COL_WIDTH - ? parseInt(process.env.COL_WIDTH, 10) - : 45 - -let options -try { - options = commandLineArgs(optionDefinitions) -} catch (e) { - console.error( - 'Error parsing command line arguments:', - e instanceof Error ? e.message : String(e) - ) - console.error('Usage: ./show_buffer.js ') - process.exit(1) -} - -const { historyId } = options - -if (!historyId) { - console.error('Usage: ./show_buffer.js ') - process.exit(1) -} - -function format(str, indent = COLUMN_WIDTH + 2) { - const lines = str.split('\n') - for (let i = 1; i < lines.length; i++) { - lines[i] = ' '.repeat(indent) + lines[i] - } - return lines.join('\n') -} - -async function displayKeyValue( - rclient, - key, - { parseJson = false, formatDate = false } = {} -) { - const value = await rclient.get(key) - let displayValue = '(nil)' - if (value) { - if (parseJson) { - try { - displayValue = format(JSON.stringify(JSON.parse(value), null, 2)) - } catch (e) { - displayValue = ` Raw value: ${value}` - } - } else if (formatDate) { - const ts = parseInt(value, 10) - displayValue = `${new Date(ts).toISOString()} (${value})` - } else { - displayValue = value - } - } - console.log(`${key.padStart(COLUMN_WIDTH)}: ${displayValue}`) -} - -async function displayBuffer(projectId) { - console.log(`Buffer for history ID: ${projectId}`) - console.log('--------------------------------------------------') - - try { - const headKey = keySchema.head({ projectId }) - const headVersionKey = keySchema.headVersion({ projectId }) - const persistedVersionKey = keySchema.persistedVersion({ projectId }) - const expireTimeKey = keySchema.expireTime({ projectId }) - const persistTimeKey = keySchema.persistTime({ projectId }) - const changesKey = keySchema.changes({ projectId }) - - await displayKeyValue(rclient, headKey, { parseJson: true }) - await displayKeyValue(rclient, headVersionKey) - await displayKeyValue(rclient, persistedVersionKey) - await displayKeyValue(rclient, expireTimeKey, { formatDate: true }) - await displayKeyValue(rclient, persistTimeKey, { formatDate: true }) - - const changesList = await rclient.lrange(changesKey, 0, -1) - - // 6. changes - let changesListDisplay = '(nil)' - if (changesList) { - changesListDisplay = changesList.length - ? format( - changesList - .map((change, index) => `[${index}]: ${change}`) - .join('\n') - ) - : '(empty list)' - } - console.log(`${changesKey.padStart(COLUMN_WIDTH)}: ${changesListDisplay}`) - } catch (error) { - console.error('Error fetching data from Redis:', error) - throw error - } -} - -;(async () => { - let errorOccurred = false - try { - await displayBuffer(historyId) - } catch (error) { - errorOccurred = true - } finally { - rclient.quit(() => { - process.exit(errorOccurred ? 1 : 0) - }) - } -})() diff --git a/services/history-v1/test/acceptance/js/api/project_flush.test.js b/services/history-v1/test/acceptance/js/api/project_flush.test.js deleted file mode 100644 index f8d0b23d8e..0000000000 --- a/services/history-v1/test/acceptance/js/api/project_flush.test.js +++ /dev/null @@ -1,66 +0,0 @@ -'use strict' - -const BPromise = require('bluebird') -const { expect } = require('chai') -const HTTPStatus = require('http-status') -const fetch = require('node-fetch') -const fs = BPromise.promisifyAll(require('node:fs')) - -const cleanup = require('../storage/support/cleanup') -const fixtures = require('../storage/support/fixtures') -const testFiles = require('../storage/support/test_files') -const testProjects = require('./support/test_projects') -const testServer = require('./support/test_server') - -const { Change, File, Operation } = require('overleaf-editor-core') -const queueChanges = require('../../../../storage/lib/queue_changes') -const { getState } = require('../../../../storage/lib/chunk_store/redis') - -describe('project flush', function () { - beforeEach(cleanup.everything) - beforeEach(fixtures.create) - - it('persists queued changes to the chunk store', async function () { - const basicAuthClient = testServer.basicAuthClient - const projectId = await testProjects.createEmptyProject() - - // upload an empty file - const response = await fetch( - testServer.url( - `/api/projects/${projectId}/blobs/${File.EMPTY_FILE_HASH}`, - { qs: { pathname: 'main.tex' } } - ), - { - method: 'PUT', - body: fs.createReadStream(testFiles.path('empty.tex')), - headers: { - Authorization: testServer.basicAuthHeader, - }, - } - ) - expect(response.ok).to.be.true - - const testFile = File.fromHash(File.EMPTY_FILE_HASH) - const testChange = new Change( - [Operation.addFile('main.tex', testFile)], - new Date() - ) - await queueChanges(projectId, [testChange], 0) - - // Verify that the changes are queued and not yet persisted - const initialState = await getState(projectId) - expect(initialState.persistedVersion).to.be.null - expect(initialState.changes).to.have.lengthOf(1) - - const importResponse = - await basicAuthClient.apis.ProjectImport.flushChanges({ - project_id: projectId, - }) - - expect(importResponse.status).to.equal(HTTPStatus.OK) - - // Verify that the changes were persisted to the chunk store - const finalState = await getState(projectId) - expect(finalState.persistedVersion).to.equal(1) - }) -}) diff --git a/services/history-v1/test/acceptance/js/api/rollout.test.js b/services/history-v1/test/acceptance/js/api/rollout.test.js deleted file mode 100644 index f1a65e5aff..0000000000 --- a/services/history-v1/test/acceptance/js/api/rollout.test.js +++ /dev/null @@ -1,115 +0,0 @@ -const config = require('config') -const sinon = require('sinon') -const { expect } = require('chai') - -const cleanup = require('../storage/support/cleanup') -const Rollout = require('../../../../api/app/rollout') - -describe('rollout', function () { - beforeEach(cleanup.everything) - beforeEach('Set up stubs', function () { - sinon.stub(config, 'has').callThrough() - sinon.stub(config, 'get').callThrough() - }) - afterEach(sinon.restore) - - it('should return a valid history buffer level', function () { - setMockConfig('historyBufferLevel', '2') - setMockConfig('forcePersistBuffer', 'false') - - const rollout = new Rollout(config) - const { historyBufferLevel, forcePersistBuffer } = - rollout.getHistoryBufferLevelOptions('test-project-id') - expect(historyBufferLevel).to.equal(2) - expect(forcePersistBuffer).to.be.false - }) - - it('should return a valid history buffer level and force persist buffer options', function () { - setMockConfig('historyBufferLevel', '1') - setMockConfig('forcePersistBuffer', 'true') - const rollout = new Rollout(config) - const { historyBufferLevel, forcePersistBuffer } = - rollout.getHistoryBufferLevelOptions('test-project-id') - expect(historyBufferLevel).to.equal(1) - expect(forcePersistBuffer).to.be.true - }) - - describe('with a higher next history buffer level rollout', function () { - beforeEach(function () { - setMockConfig('historyBufferLevel', '2') - setMockConfig('forcePersistBuffer', 'false') - setMockConfig('nextHistoryBufferLevel', '3') - }) - it('should return the expected history buffer level when the rollout percentage is zero', function () { - setMockConfig('nextHistoryBufferLevelRolloutPercentage', '0') - const rollout = new Rollout(config) - for (let i = 0; i < 1000; i++) { - const { historyBufferLevel, forcePersistBuffer } = - rollout.getHistoryBufferLevelOptions(`test-project-id-${i}`) - expect(historyBufferLevel).to.equal(2) - expect(forcePersistBuffer).to.be.false - } - }) - - it('should return the expected distribution of levels when the rollout percentage is 10%', function () { - setMockConfig('nextHistoryBufferLevelRolloutPercentage', '10') - const rollout = new Rollout(config) - let currentLevel = 0 - let nextLevel = 0 - for (let i = 0; i < 1000; i++) { - const { historyBufferLevel } = rollout.getHistoryBufferLevelOptions( - `test-project-id-${i}` - ) - switch (historyBufferLevel) { - case 2: - currentLevel++ - break - case 3: - nextLevel++ - break - default: - expect.fail( - `Unexpected history buffer level: ${historyBufferLevel}` - ) - } - } - const twoPercentage = (currentLevel / 1000) * 100 - const threePercentage = (nextLevel / 1000) * 100 - expect(twoPercentage).to.be.closeTo(90, 5) // 90% for level 2 - expect(threePercentage).to.be.closeTo(10, 5) // 10% for level 3 - }) - }) - describe('with a next history buffer level lower than the current level', function () { - beforeEach(function () { - setMockConfig('historyBufferLevel', '3') - setMockConfig('forcePersistBuffer', 'false') - setMockConfig('nextHistoryBufferLevel', '2') - }) - it('should always return the current level when the rollout percentage is zero', function () { - setMockConfig('nextHistoryBufferLevelRolloutPercentage', '0') - const rollout = new Rollout(config) - for (let i = 0; i < 1000; i++) { - const { historyBufferLevel, forcePersistBuffer } = - rollout.getHistoryBufferLevelOptions(`test-project-id-${i}`) - expect(historyBufferLevel).to.equal(3) - expect(forcePersistBuffer).to.be.false - } - }) - - it('should always return the current level regardless of the rollout percentage', function () { - setMockConfig('nextHistoryBufferLevelRolloutPercentage', '10') - const rollout = new Rollout(config) - for (let i = 0; i < 1000; i++) { - const { historyBufferLevel } = rollout.getHistoryBufferLevelOptions( - `test-project-id-${i}` - ) - expect(historyBufferLevel).to.equal(3) - } - }) - }) -}) - -function setMockConfig(path, value) { - config.has.withArgs(path).returns(true) - config.get.withArgs(path).returns(value) -} diff --git a/services/history-v1/test/acceptance/js/storage/chunk_store.test.js b/services/history-v1/test/acceptance/js/storage/chunk_store.test.js index 8b06b8e412..da70467934 100644 --- a/services/history-v1/test/acceptance/js/storage/chunk_store.test.js +++ b/services/history-v1/test/acceptance/js/storage/chunk_store.test.js @@ -470,8 +470,6 @@ describe('chunkStore', function () { describe('with changes queued in the Redis buffer', function () { let queuedChanges - const firstQueuedChangeTimestamp = new Date('2017-01-01T00:01:00') - const lastQueuedChangeTimestamp = new Date('2017-01-01T00:02:00') beforeEach(async function () { const snapshot = thirdChunk.getSnapshot() @@ -483,15 +481,7 @@ describe('chunkStore', function () { 'in-redis.tex', File.createLazyFromBlobs(blob) ), - firstQueuedChangeTimestamp - ), - makeChange( - // Add a second change to make the buffer more interesting - Operation.editFile( - 'in-redis.tex', - TextOperation.fromJSON({ textOperation: ['hello'] }) - ), - lastQueuedChangeTimestamp + new Date() ), ] await redisBackend.queueChanges( @@ -508,15 +498,6 @@ describe('chunkStore', function () { .getChanges() .concat(queuedChanges) expect(chunk.getChanges()).to.deep.equal(expectedChanges) - expect(chunk.getStartVersion()).to.equal( - thirdChunk.getStartVersion() - ) - expect(chunk.getEndVersion()).to.equal( - thirdChunk.getEndVersion() + queuedChanges.length - ) - expect(chunk.getEndTimestamp()).to.deep.equal( - lastQueuedChangeTimestamp - ) }) it('includes the queued changes when getting the latest chunk by timestamp', async function () { @@ -528,12 +509,6 @@ describe('chunkStore', function () { .getChanges() .concat(queuedChanges) expect(chunk.getChanges()).to.deep.equal(expectedChanges) - expect(chunk.getStartVersion()).to.equal( - thirdChunk.getStartVersion() - ) - expect(chunk.getEndVersion()).to.equal( - thirdChunk.getEndVersion() + queuedChanges.length - ) }) it("doesn't include the queued changes when getting another chunk by timestamp", async function () { @@ -543,11 +518,6 @@ describe('chunkStore', function () { ) const expectedChanges = secondChunk.getChanges() expect(chunk.getChanges()).to.deep.equal(expectedChanges) - expect(chunk.getStartVersion()).to.equal( - secondChunk.getStartVersion() - ) - expect(chunk.getEndVersion()).to.equal(secondChunk.getEndVersion()) - expect(chunk.getEndTimestamp()).to.deep.equal(secondChunkTimestamp) }) it('includes the queued changes when getting the latest chunk by version', async function () { @@ -559,15 +529,6 @@ describe('chunkStore', function () { .getChanges() .concat(queuedChanges) expect(chunk.getChanges()).to.deep.equal(expectedChanges) - expect(chunk.getStartVersion()).to.equal( - thirdChunk.getStartVersion() - ) - expect(chunk.getEndVersion()).to.equal( - thirdChunk.getEndVersion() + queuedChanges.length - ) - expect(chunk.getEndTimestamp()).to.deep.equal( - lastQueuedChangeTimestamp - ) }) it("doesn't include the queued changes when getting another chunk by version", async function () { @@ -577,47 +538,6 @@ describe('chunkStore', function () { ) const expectedChanges = secondChunk.getChanges() expect(chunk.getChanges()).to.deep.equal(expectedChanges) - expect(chunk.getStartVersion()).to.equal( - secondChunk.getStartVersion() - ) - expect(chunk.getEndVersion()).to.equal(secondChunk.getEndVersion()) - expect(chunk.getEndTimestamp()).to.deep.equal(secondChunkTimestamp) - }) - - it('loads a version that is only in the Redis buffer', async function () { - const versionInRedis = thirdChunk.getEndVersion() + 1 // the first change in Redis - const chunk = await chunkStore.loadAtVersion( - projectId, - versionInRedis - ) - // The chunk should contain changes from the thirdChunk and the queuedChanges - const expectedChanges = thirdChunk - .getChanges() - .concat(queuedChanges) - expect(chunk.getChanges()).to.deep.equal(expectedChanges) - expect(chunk.getStartVersion()).to.equal( - thirdChunk.getStartVersion() - ) - expect(chunk.getEndVersion()).to.equal( - thirdChunk.getEndVersion() + queuedChanges.length - ) - expect(chunk.getEndTimestamp()).to.deep.equal( - lastQueuedChangeTimestamp - ) - }) - - it('throws an error when loading a version beyond the Redis buffer', async function () { - const versionBeyondRedis = - thirdChunk.getEndVersion() + queuedChanges.length + 1 - await expect( - chunkStore.loadAtVersion(projectId, versionBeyondRedis) - ) - .to.be.rejectedWith(chunkStore.VersionOutOfBoundsError) - .and.eventually.satisfy(err => { - expect(err.info).to.have.property('projectId', projectId) - expect(err.info).to.have.property('version', versionBeyondRedis) - return true - }) }) }) diff --git a/services/history-v1/test/acceptance/js/storage/chunk_store_redis_backend.test.js b/services/history-v1/test/acceptance/js/storage/chunk_store_redis_backend.test.js index d34cd701d0..2b13343fc4 100644 --- a/services/history-v1/test/acceptance/js/storage/chunk_store_redis_backend.test.js +++ b/services/history-v1/test/acceptance/js/storage/chunk_store_redis_backend.test.js @@ -699,8 +699,6 @@ describe('chunk buffer Redis backend', function () { }) describe('setPersistedVersion', function () { - const persistTime = Date.now() + 60 * 1000 // 1 minute from now - it('should return not_found when project does not exist', async function () { const result = await redisBackend.setPersistedVersion(projectId, 5) expect(result).to.equal('not_found') @@ -711,41 +709,15 @@ describe('chunk buffer Redis backend', function () { await setupState(projectId, { headVersion: 5, persistedVersion: null, - persistTime, changes: 5, }) }) it('should set the persisted version', async function () { - const status = await redisBackend.setPersistedVersion(projectId, 3) - expect(status).to.equal('ok') + await redisBackend.setPersistedVersion(projectId, 3) const state = await redisBackend.getState(projectId) expect(state.persistedVersion).to.equal(3) }) - - it('should leave the persist time if the persisted version is not current', async function () { - const status = await redisBackend.setPersistedVersion(projectId, 3) - expect(status).to.equal('ok') - const state = await redisBackend.getState(projectId) - expect(state.persistTime).to.deep.equal(persistTime) // Persist time should remain unchanged - }) - - it('should refuse to set a persisted version greater than the head version', async function () { - await expect( - redisBackend.setPersistedVersion(projectId, 10) - ).to.be.rejectedWith(VersionOutOfBoundsError) - // Ensure persisted version remains unchanged - const state = await redisBackend.getState(projectId) - expect(state.persistedVersion).to.be.null - }) - - it('should clear the persist time when the persisted version is current', async function () { - const status = await redisBackend.setPersistedVersion(projectId, 5) - expect(status).to.equal('ok') - const state = await redisBackend.getState(projectId) - expect(state.persistedVersion).to.equal(5) - expect(state.persistTime).to.be.null // Persist time should be cleared - }) }) describe('when the persisted version is set', function () { @@ -753,46 +725,18 @@ describe('chunk buffer Redis backend', function () { await setupState(projectId, { headVersion: 5, persistedVersion: 3, - persistTime, changes: 5, }) }) it('should set the persisted version', async function () { - const status = await redisBackend.setPersistedVersion(projectId, 5) - expect(status).to.equal('ok') + await redisBackend.setPersistedVersion(projectId, 5) const state = await redisBackend.getState(projectId) expect(state.persistedVersion).to.equal(5) }) - it('should clear the persist time when the persisted version is current', async function () { - const status = await redisBackend.setPersistedVersion(projectId, 5) - expect(status).to.equal('ok') - const state = await redisBackend.getState(projectId) - expect(state.persistedVersion).to.equal(5) - expect(state.persistTime).to.be.null // Persist time should be cleared - }) - - it('should leave the persist time if the persisted version is not current', async function () { - const status = await redisBackend.setPersistedVersion(projectId, 4) - expect(status).to.equal('ok') - const state = await redisBackend.getState(projectId) - expect(state.persistedVersion).to.equal(4) - expect(state.persistTime).to.deep.equal(persistTime) // Persist time should remain unchanged - }) - it('should not decrease the persisted version', async function () { - const status = await redisBackend.setPersistedVersion(projectId, 2) - expect(status).to.equal('too_low') - const state = await redisBackend.getState(projectId) - expect(state.persistedVersion).to.equal(3) - }) - - it('should refuse to set a persisted version greater than the head version', async function () { - await expect( - redisBackend.setPersistedVersion(projectId, 10) - ).to.be.rejectedWith(VersionOutOfBoundsError) - // Ensure persisted version remains unchanged + await redisBackend.setPersistedVersion(projectId, 2) const state = await redisBackend.getState(projectId) expect(state.persistedVersion).to.equal(3) }) @@ -1218,8 +1162,6 @@ function makeChange() { * @param {object} params * @param {number} params.headVersion * @param {number | null} params.persistedVersion - * @param {number | null} params.persistTime - time when the project should be persisted - * @param {number | null} params.expireTime - time when the project should expire * @param {number} params.changes - number of changes to create * @return {Promise} dummy changes that have been created */ @@ -1231,12 +1173,7 @@ async function setupState(projectId, params) { params.persistedVersion ) } - if (params.persistTime) { - await rclient.set(keySchema.persistTime({ projectId }), params.persistTime) - } - if (params.expireTime) { - await rclient.set(keySchema.expireTime({ projectId }), params.expireTime) - } + const changes = [] for (let i = 1; i <= params.changes; i++) { const change = new Change( diff --git a/services/history-v1/test/acceptance/js/storage/expire_redis_chunks.test.js b/services/history-v1/test/acceptance/js/storage/expire_redis_chunks.test.js index f8a5943c43..b657991dda 100644 --- a/services/history-v1/test/acceptance/js/storage/expire_redis_chunks.test.js +++ b/services/history-v1/test/acceptance/js/storage/expire_redis_chunks.test.js @@ -1,13 +1,91 @@ 'use strict' const { expect } = require('chai') -const { Author, Change } = require('overleaf-editor-core') +const { promisify } = require('node:util') +const { execFile } = require('node:child_process') +const { Snapshot, Author, Change } = require('overleaf-editor-core') const cleanup = require('./support/cleanup') -const { setupProjectState, rclient, keySchema } = require('./support/redis') -const { runScript } = require('./support/runscript') +const redisBackend = require('../../../../storage/lib/chunk_store/redis') +const redis = require('../../../../storage/lib/redis') +const rclient = redis.rclientHistory +const keySchema = redisBackend.keySchema const SCRIPT_PATH = 'storage/scripts/expire_redis_chunks.js' +async function runExpireScript() { + const TIMEOUT = 10 * 1000 // 10 seconds + let result + try { + result = await promisify(execFile)('node', [SCRIPT_PATH], { + encoding: 'utf-8', + timeout: TIMEOUT, + env: { + ...process.env, + LOG_LEVEL: 'debug', // Override LOG_LEVEL for script output + }, + }) + result.status = 0 + } catch (err) { + const { stdout, stderr, code } = err + if (typeof code !== 'number') { + console.error('Error running expire script:', err) + throw err + } + result = { stdout, stderr, status: code } + } + // The script might exit with status 1 if it finds no keys to process, which is ok + if (result.status !== 0 && result.status !== 1) { + console.error('Expire script failed:', result.stderr) + throw new Error(`expire script failed with status ${result.status}`) + } + return result +} + +// Helper to set up a basic project state in Redis +async function setupProjectState( + projectId, + { + headVersion = 0, + persistedVersion = null, + expireTime = null, + persistTime = null, + changes = [], + } +) { + const headSnapshot = new Snapshot() + await rclient.set( + keySchema.head({ projectId }), + JSON.stringify(headSnapshot.toRaw()) + ) + await rclient.set( + keySchema.headVersion({ projectId }), + headVersion.toString() + ) + + if (persistedVersion !== null) { + await rclient.set( + keySchema.persistedVersion({ projectId }), + persistedVersion.toString() + ) + } + if (expireTime !== null) { + await rclient.set( + keySchema.expireTime({ projectId }), + expireTime.toString() + ) + } + if (persistTime !== null) { + await rclient.set( + keySchema.persistTime({ projectId }), + persistTime.toString() + ) + } + if (changes.length > 0) { + const rawChanges = changes.map(c => JSON.stringify(c.toRaw())) + await rclient.rpush(keySchema.changes({ projectId }), ...rawChanges) + } +} + function makeChange() { const timestamp = new Date() const author = new Author(123, 'test@example.com', 'Test User') @@ -72,7 +150,7 @@ describe('expire_redis_chunks script', function () { }) // Run the expire script once after all projects are set up - await runScript(SCRIPT_PATH) + await runExpireScript() }) async function checkProjectStatus(projectId) { diff --git a/services/history-v1/test/acceptance/js/storage/persist_buffer.test.mjs b/services/history-v1/test/acceptance/js/storage/persist_buffer.test.mjs deleted file mode 100644 index 138a70e626..0000000000 --- a/services/history-v1/test/acceptance/js/storage/persist_buffer.test.mjs +++ /dev/null @@ -1,519 +0,0 @@ -'use strict' - -import fs from 'node:fs' -import { expect } from 'chai' -import { - Change, - Snapshot, - File, - TextOperation, - AddFileOperation, - EditFileOperation, // Added EditFileOperation -} from 'overleaf-editor-core' -import persistBuffer from '../../../../storage/lib/persist_buffer.js' -import chunkStore from '../../../../storage/lib/chunk_store/index.js' -import redisBackend from '../../../../storage/lib/chunk_store/redis.js' -import persistChanges from '../../../../storage/lib/persist_changes.js' -import cleanup from './support/cleanup.js' -import fixtures from './support/fixtures.js' -import testFiles from './support/test_files.js' - -describe('persistBuffer', function () { - let projectId - const initialVersion = 0 - let limitsToPersistImmediately - - before(function () { - const farFuture = new Date() - farFuture.setTime(farFuture.getTime() + 7 * 24 * 3600 * 1000) - limitsToPersistImmediately = { - minChangeTimestamp: farFuture, - maxChangeTimestamp: farFuture, - maxChunkChanges: 10, - } - }) - - beforeEach(cleanup.everything) - beforeEach(fixtures.create) - - beforeEach(async function () { - projectId = fixtures.docs.uninitializedProject.id - await chunkStore.initializeProject(projectId) - }) - - describe('with an empty initial chunk (new project)', function () { - it('should persist changes from Redis to a new chunk', async function () { - // create an initial snapshot and add the empty file `main.tex` - const HELLO_TXT = fs.readFileSync(testFiles.path('hello.txt')).toString() - - const createFile = new Change( - [new AddFileOperation('main.tex', File.fromString(HELLO_TXT))], - new Date(), - [] - ) - - await persistChanges( - projectId, - [createFile], - limitsToPersistImmediately, - 0 - ) - // Now queue some changes in Redis - const op1 = new TextOperation().insert('Hello').retain(HELLO_TXT.length) - const change1 = new Change( - [new EditFileOperation('main.tex', op1)], - new Date() - ) - - const op2 = new TextOperation() - .retain('Hello'.length) - .insert(' World') - .retain(HELLO_TXT.length) - const change2 = new Change( - [new EditFileOperation('main.tex', op2)], - new Date() - ) - - const changesToQueue = [change1, change2] - - const finalHeadVersion = initialVersion + 1 + changesToQueue.length - - const now = Date.now() - await redisBackend.queueChanges( - projectId, - new Snapshot(), // dummy snapshot - 1, - changesToQueue, - { - persistTime: now + redisBackend.MAX_PERSIST_DELAY_MS, - expireTime: now + redisBackend.PROJECT_TTL_MS, - } - ) - await redisBackend.setPersistedVersion(projectId, initialVersion) - - // Persist the changes from Redis to the chunk store - const persistResult = await persistBuffer( - projectId, - limitsToPersistImmediately - ) - - // Check the return value of persistBuffer - expect(persistResult).to.exist - expect(persistResult).to.have.property('numberOfChangesPersisted') - expect(persistResult).to.have.property('originalEndVersion') - expect(persistResult).to.have.property('currentChunk') - expect(persistResult).to.have.property('resyncNeeded') - expect(persistResult.numberOfChangesPersisted).to.equal( - changesToQueue.length - ) - expect(persistResult.originalEndVersion).to.equal(initialVersion + 1) - expect(persistResult.resyncNeeded).to.be.false - - const latestChunk = await chunkStore.loadLatest(projectId, { - persistedOnly: true, - }) - expect(latestChunk).to.exist - expect(latestChunk.getStartVersion()).to.equal(initialVersion) - expect(latestChunk.getEndVersion()).to.equal(finalHeadVersion) - expect(latestChunk.getChanges().length).to.equal( - changesToQueue.length + 1 - ) - // Check that chunk returned by persistBuffer matches the latest chunk - expect(latestChunk).to.deep.equal(persistResult.currentChunk) - - const chunkSnapshot = latestChunk.getSnapshot() - expect(Object.keys(chunkSnapshot.getFileMap()).length).to.equal(1) - - const persistedVersionInRedis = (await redisBackend.getState(projectId)) - .persistedVersion - expect(persistedVersionInRedis).to.equal(finalHeadVersion) - - const nonPersisted = await redisBackend.getNonPersistedChanges( - projectId, - finalHeadVersion - ) - expect(nonPersisted).to.be.an('array').that.is.empty - }) - }) - - describe('with an existing chunk and new changes in Redis', function () { - it('should persist new changes from Redis, appending to existing history', async function () { - const initialContent = 'Initial document content.\n' - - const addInitialFileChange = new Change( - [new AddFileOperation('main.tex', File.fromString(initialContent))], - new Date(), - [] - ) - - await persistChanges( - projectId, - [addInitialFileChange], - limitsToPersistImmediately, - initialVersion - ) - const versionAfterInitialSetup = initialVersion + 1 // Now version is 1 - - const opForChunk1 = new TextOperation() - .retain(initialContent.length) - .insert(' First addition.') - const changesForChunk1 = [ - new Change( - [new EditFileOperation('main.tex', opForChunk1)], - new Date(), - [] - ), - ] - - await persistChanges( - projectId, - changesForChunk1, - limitsToPersistImmediately, // Original limits for this step - versionAfterInitialSetup // Correct clientEndVersion - ) - // Update persistedChunkEndVersion: 1 (from setup) + 1 (from changesForChunk1) = 2 - const persistedChunkEndVersion = - versionAfterInitialSetup + changesForChunk1.length - const contentAfterChunk1 = initialContent + ' First addition.' - - const opVersion2 = new TextOperation() - .retain(contentAfterChunk1.length) - .insert(' Second addition.') - const changeVersion2 = new Change( - [new EditFileOperation('main.tex', opVersion2)], - new Date(), - [] - ) - - const contentAfterChange2 = contentAfterChunk1 + ' Second addition.' - const opVersion3 = new TextOperation() - .retain(contentAfterChange2.length) - .insert(' Third addition.') - const changeVersion3 = new Change( - [new EditFileOperation('main.tex', opVersion3)], - new Date(), - [] - ) - - const redisChangesToPush = [changeVersion2, changeVersion3] - const finalHeadVersionAfterRedisPush = - persistedChunkEndVersion + redisChangesToPush.length - const now = Date.now() - - await redisBackend.queueChanges( - projectId, - new Snapshot(), // Use new Snapshot() like in the first test - persistedChunkEndVersion, - redisChangesToPush, - { - persistTime: now + redisBackend.MAX_PERSIST_DELAY_MS, - expireTime: now + redisBackend.PROJECT_TTL_MS, - } - ) - await redisBackend.setPersistedVersion( - projectId, - persistedChunkEndVersion - ) - - const persistResult = await persistBuffer( - projectId, - limitsToPersistImmediately - ) - - // Check the return value of persistBuffer - expect(persistResult).to.exist - expect(persistResult).to.have.property('numberOfChangesPersisted') - expect(persistResult).to.have.property('originalEndVersion') - expect(persistResult).to.have.property('currentChunk') - expect(persistResult).to.have.property('resyncNeeded') - expect(persistResult.numberOfChangesPersisted).to.equal( - redisChangesToPush.length - ) - expect(persistResult.originalEndVersion).to.equal( - persistedChunkEndVersion - ) - expect(persistResult.resyncNeeded).to.be.false - - const latestChunk = await chunkStore.loadLatest(projectId, { - persistedOnly: true, - }) - expect(latestChunk).to.exist - expect(latestChunk.getStartVersion()).to.equal(0) - expect(latestChunk.getEndVersion()).to.equal( - finalHeadVersionAfterRedisPush - ) - expect(latestChunk.getChanges().length).to.equal( - persistedChunkEndVersion + redisChangesToPush.length - ) - - const persistedVersionInRedisAfter = ( - await redisBackend.getState(projectId) - ).persistedVersion - expect(persistedVersionInRedisAfter).to.equal( - finalHeadVersionAfterRedisPush - ) - - // Check that chunk returned by persistBuffer matches the latest chunk - expect(persistResult.currentChunk).to.deep.equal(latestChunk) - - const nonPersisted = await redisBackend.getNonPersistedChanges( - projectId, - finalHeadVersionAfterRedisPush - ) - expect(nonPersisted).to.be.an('array').that.is.empty - }) - }) - - describe('when Redis has no new changes', function () { - let persistedChunkEndVersion - let changesForChunk1 - - beforeEach(async function () { - const initialContent = 'Content.' - - const addInitialFileChange = new Change( - [new AddFileOperation('main.tex', File.fromString(initialContent))], - new Date(), - [] - ) - - // Replace chunkStore.create with persistChanges - // clientEndVersion is initialVersion (0). This advances version to 1. - await persistChanges( - projectId, - [addInitialFileChange], - limitsToPersistImmediately, - initialVersion - ) - const versionAfterInitialSetup = initialVersion + 1 // Now version is 1 - - const opForChunk1 = new TextOperation() - .retain(initialContent.length) - .insert(' More.') - changesForChunk1 = [ - new Change( - [new EditFileOperation('main.tex', opForChunk1)], - new Date(), - [] - ), - ] - // Corrected persistChanges call: clientEndVersion is versionAfterInitialSetup (1) - await persistChanges( - projectId, - changesForChunk1, - limitsToPersistImmediately, // Original limits for this step - versionAfterInitialSetup // Correct clientEndVersion - ) - // Update persistedChunkEndVersion: 1 (from setup) + 1 (from changesForChunk1) = 2 - persistedChunkEndVersion = - versionAfterInitialSetup + changesForChunk1.length - }) - - it('should leave the persisted version and stored chunks unchanged', async function () { - const now = Date.now() - await redisBackend.queueChanges( - projectId, - new Snapshot(), - persistedChunkEndVersion - 1, - changesForChunk1, - { - persistTime: now + redisBackend.MAX_PERSIST_DELAY_MS, - expireTime: now + redisBackend.PROJECT_TTL_MS, - } - ) - await redisBackend.setPersistedVersion( - projectId, - persistedChunkEndVersion - ) - - const chunksBefore = await chunkStore.getProjectChunks(projectId) - - const persistResult = await persistBuffer( - projectId, - limitsToPersistImmediately - ) - - const currentChunk = await chunkStore.loadLatest(projectId, { - persistedOnly: true, - }) - expect(persistResult).to.deep.equal({ - numberOfChangesPersisted: 0, - originalEndVersion: persistedChunkEndVersion, - currentChunk, - }) - - const chunksAfter = await chunkStore.getProjectChunks(projectId) - expect(chunksAfter.length).to.equal(chunksBefore.length) - expect(chunksAfter).to.deep.equal(chunksBefore) - - const finalPersistedVersionInRedis = ( - await redisBackend.getState(projectId) - ).persistedVersion - expect(finalPersistedVersionInRedis).to.equal(persistedChunkEndVersion) - }) - - it('should update the persisted version if it is behind the chunk store end version', async function () { - const now = Date.now() - - await redisBackend.queueChanges( - projectId, - new Snapshot(), - persistedChunkEndVersion - 1, - changesForChunk1, - { - persistTime: now + redisBackend.MAX_PERSIST_DELAY_MS, - expireTime: now + redisBackend.PROJECT_TTL_MS, - } - ) - // Force the persisted version in Redis to lag behind the chunk store, - // simulating the situation where a worker has persisted changes to the - // chunk store but failed to update the version in redis. - await redisBackend.setPersistedVersion( - projectId, - persistedChunkEndVersion - 1 - ) - - const chunksBefore = await chunkStore.getProjectChunks(projectId) - - // Persist buffer (which should do nothing as there are no new changes) - const persistResult = await persistBuffer( - projectId, - limitsToPersistImmediately - ) - - // Check the return value - const currentChunk = await chunkStore.loadLatest(projectId, { - persistedOnly: true, - }) - expect(persistResult).to.deep.equal({ - numberOfChangesPersisted: 0, - originalEndVersion: persistedChunkEndVersion, - currentChunk, - }) - - const chunksAfter = await chunkStore.getProjectChunks(projectId) - expect(chunksAfter.length).to.equal(chunksBefore.length) - expect(chunksAfter).to.deep.equal(chunksBefore) - - const finalPersistedVersionInRedis = ( - await redisBackend.getState(projectId) - ).persistedVersion - expect(finalPersistedVersionInRedis).to.equal(persistedChunkEndVersion) - }) - }) - - describe('when limits restrict the number of changes to persist', function () { - it('should persist only a subset of changes and update persistedVersion accordingly', async function () { - const now = Date.now() - const oneDayAgo = now - 1000 * 60 * 60 * 24 - const oneHourAgo = now - 1000 * 60 * 60 - const twoHoursAgo = now - 1000 * 60 * 60 * 2 - const threeHoursAgo = now - 1000 * 60 * 60 * 3 - - // Create an initial file with some content - const initialContent = 'Initial content.' - const addInitialFileChange = new Change( - [new AddFileOperation('main.tex', File.fromString(initialContent))], - new Date(oneDayAgo), - [] - ) - - await persistChanges( - projectId, - [addInitialFileChange], - limitsToPersistImmediately, - initialVersion - ) - const versionAfterInitialSetup = initialVersion + 1 // Version is 1 - - // Queue three additional changes in Redis - const op1 = new TextOperation() - .retain(initialContent.length) - .insert(' Change 1.') - const change1 = new Change( - [new EditFileOperation('main.tex', op1)], - new Date(threeHoursAgo) - ) - const contentAfterC1 = initialContent + ' Change 1.' - - const op2 = new TextOperation() - .retain(contentAfterC1.length) - .insert(' Change 2.') - const change2 = new Change( - [new EditFileOperation('main.tex', op2)], - new Date(twoHoursAgo) - ) - const contentAfterC2 = contentAfterC1 + ' Change 2.' - - const op3 = new TextOperation() - .retain(contentAfterC2.length) - .insert(' Change 3.') - const change3 = new Change( - [new EditFileOperation('main.tex', op3)], - new Date(oneHourAgo) - ) - - const changesToQueue = [change1, change2, change3] - await redisBackend.queueChanges( - projectId, - new Snapshot(), // dummy snapshot - versionAfterInitialSetup, // startVersion for queued changes - changesToQueue, - { - persistTime: now + redisBackend.MAX_PERSIST_DELAY_MS, - expireTime: now + redisBackend.PROJECT_TTL_MS, - } - ) - await redisBackend.setPersistedVersion( - projectId, - versionAfterInitialSetup - ) - - // Define limits to only persist 2 additional changes (on top of the initial file creation), - // which should leave the final change (change3) in the redis buffer. - const restrictiveLimits = { - minChangeTimestamp: new Date(oneHourAgo), // only changes more than 1 hour old are considered - maxChangeTimestamp: new Date(twoHoursAgo), // they will be persisted if any change is older than 2 hours - } - - const persistResult = await persistBuffer(projectId, restrictiveLimits) - - // Check the return value of persistBuffer - expect(persistResult).to.exist - expect(persistResult).to.have.property('numberOfChangesPersisted') - expect(persistResult).to.have.property('originalEndVersion') - expect(persistResult).to.have.property('currentChunk') - expect(persistResult).to.have.property('resyncNeeded') - expect(persistResult.numberOfChangesPersisted).to.equal(2) // change1 + change2 - expect(persistResult.originalEndVersion).to.equal( - versionAfterInitialSetup - ) - expect(persistResult.resyncNeeded).to.be.false - - // Check the latest persisted chunk, it should only have the initial file and the first two changes - const latestChunk = await chunkStore.loadLatest(projectId, { - persistedOnly: true, - }) - expect(latestChunk).to.exist - expect(latestChunk.getChanges().length).to.equal(3) // addInitialFileChange + change1 + change2 - expect(latestChunk.getStartVersion()).to.equal(initialVersion) - const expectedEndVersion = versionAfterInitialSetup + 2 // Persisted two changes from the queue - expect(latestChunk.getEndVersion()).to.equal(expectedEndVersion) - - // Check that chunk returned by persistBuffer matches the latest chunk - expect(persistResult.currentChunk).to.deep.equal(latestChunk) - - // Check persisted version in Redis - const state = await redisBackend.getState(projectId) - expect(state.persistedVersion).to.equal(expectedEndVersion) - - // Check non-persisted changes in Redis - const nonPersisted = await redisBackend.getNonPersistedChanges( - projectId, - expectedEndVersion - ) - expect(nonPersisted).to.be.an('array').with.lengthOf(1) // change3 should remain - expect(nonPersisted).to.deep.equal([change3]) - }) - }) -}) diff --git a/services/history-v1/test/acceptance/js/storage/persist_redis_chunks.test.js b/services/history-v1/test/acceptance/js/storage/persist_redis_chunks.test.js deleted file mode 100644 index 58261703bb..0000000000 --- a/services/history-v1/test/acceptance/js/storage/persist_redis_chunks.test.js +++ /dev/null @@ -1,262 +0,0 @@ -'use strict' - -const { expect } = require('chai') -const { - Change, - AddFileOperation, - EditFileOperation, - TextOperation, - File, -} = require('overleaf-editor-core') -const cleanup = require('./support/cleanup') -const fixtures = require('./support/fixtures') -const chunkStore = require('../../../../storage/lib/chunk_store') -const { getState } = require('../../../../storage/lib/chunk_store/redis') -const { setupProjectState } = require('./support/redis') -const { runScript } = require('./support/runscript') -const persistChanges = require('../../../../storage/lib/persist_changes') - -const SCRIPT_PATH = 'storage/scripts/persist_redis_chunks.mjs' - -describe('persist_redis_chunks script', function () { - before(cleanup.everything) - - let now, past, future - let projectIdsStore // To store the generated project IDs, keyed by scenario name - let limitsToPersistImmediately - - before(async function () { - const farFuture = new Date() - farFuture.setTime(farFuture.getTime() + 7 * 24 * 3600 * 1000) - limitsToPersistImmediately = { - minChangeTimestamp: farFuture, - maxChangeTimestamp: farFuture, - maxChunkChanges: 100, // Allow enough changes for setup - } - - await fixtures.create() - - now = Date.now() - past = now - 10000 // 10 seconds ago - future = now + 60000 // 1 minute in the future - - projectIdsStore = {} - - // Scenario 1: project_due_for_persistence - // Goal: Has initial persisted content (v1), Redis has new changes (v1->v2) due for persistence. - // Expected: Script persists Redis changes, persistedVersion becomes 2. - { - const dueProjectId = await chunkStore.initializeProject() - projectIdsStore.project_due_for_persistence = dueProjectId - const initialContent = 'Initial content for due project.' - const initialChange = new Change( - [new AddFileOperation('main.tex', File.fromString(initialContent))], - new Date(now - 30000), // 30 seconds ago - [] - ) - await persistChanges( - dueProjectId, - [initialChange], - limitsToPersistImmediately, - 0 - ) - const secondChangeDue = new Change( - [ - new EditFileOperation( - 'main.tex', - new TextOperation() - .retain(initialContent.length) - .insert(' More content.') - ), - ], - new Date(now - 20000), // 20 seconds ago - [] - ) - await setupProjectState(dueProjectId, { - persistTime: past, - headVersion: 2, // After secondChangeDue - persistedVersion: 1, // Initial content is at v1 - changes: [secondChangeDue], // New changes in Redis (v1->v2) - expireTimeFuture: true, - }) - } - - // Scenario 2: project_not_due_for_persistence - // Goal: Has initial persisted content (v1), Redis has no new changes, not due. - // Expected: Script does nothing, persistedVersion remains 1. - { - const notDueProjectId = await chunkStore.initializeProject() - projectIdsStore.project_not_due_for_persistence = notDueProjectId - const initialContent = 'Initial content for not_due project.' - const initialChange = new Change( - [new AddFileOperation('main.tex', File.fromString(initialContent))], - new Date(now - 30000), // 30 seconds ago - [] - ) - await persistChanges( - notDueProjectId, - [initialChange], - limitsToPersistImmediately, - 0 - ) // Persisted: v0 -> v1 - await setupProjectState(notDueProjectId, { - persistTime: future, - headVersion: 1, // Matches persisted version - persistedVersion: 1, - changes: [], // No new changes in Redis - expireTimeFuture: true, - }) - } - - // Scenario 3: project_no_persist_time - // Goal: Has initial persisted content (v1), Redis has no new changes, no persistTime. - // Expected: Script does nothing, persistedVersion remains 1. - { - const noPersistTimeProjectId = await chunkStore.initializeProject() - projectIdsStore.project_no_persist_time = noPersistTimeProjectId - const initialContent = 'Initial content for no_persist_time project.' - const initialChange = new Change( - [new AddFileOperation('main.tex', File.fromString(initialContent))], - new Date(now - 30000), // 30 seconds ago - [] - ) - await persistChanges( - noPersistTimeProjectId, - [initialChange], - limitsToPersistImmediately, - 0 - ) // Persisted: v0 -> v1 - await setupProjectState(noPersistTimeProjectId, { - persistTime: null, - headVersion: 1, // Matches persisted version - persistedVersion: 1, - changes: [], // No new changes in Redis - expireTimeFuture: true, - }) - } - - // Scenario 4: project_due_fully_persisted - // Goal: Has content persisted up to v2, Redis reflects this (head=2, persisted=2), due for check. - // Expected: Script clears persistTime, persistedVersion remains 2. - { - const dueFullyPersistedId = await chunkStore.initializeProject() - projectIdsStore.project_due_fully_persisted = dueFullyPersistedId - const initialContent = 'Content part 1 for fully persisted.' - const change1 = new Change( - [new AddFileOperation('main.tex', File.fromString(initialContent))], - new Date(now - 40000), // 40 seconds ago - [] - ) - const change2 = new Change( - [ - new EditFileOperation( - 'main.tex', - new TextOperation() - .retain(initialContent.length) - .insert(' Content part 2.') - ), - ], - new Date(now - 30000), // 30 seconds ago - [] - ) - await persistChanges( - dueFullyPersistedId, - [change1, change2], - limitsToPersistImmediately, - 0 - ) - await setupProjectState(dueFullyPersistedId, { - persistTime: past, - headVersion: 2, - persistedVersion: 2, - changes: [], // No new unpersisted changes in Redis - expireTimeFuture: true, - }) - } - - // Scenario 5: project_fails_to_persist - // Goal: Has initial persisted content (v1), Redis has new changes (v1->v2) due for persistence, but these changes will cause an error. - // Expected: Script attempts to persist, fails, and persistTime is NOT cleared. - { - const failsToPersistProjectId = await chunkStore.initializeProject() - projectIdsStore.project_fails_to_persist = failsToPersistProjectId - const initialContent = 'Initial content for failure case.' - const initialChange = new Change( - [new AddFileOperation('main.tex', File.fromString(initialContent))], - new Date(now - 30000), // 30 seconds ago - [] - ) - await persistChanges( - failsToPersistProjectId, - [initialChange], - limitsToPersistImmediately, - 0 - ) - // This change will fail because it tries to insert at a non-existent offset - // assuming the initial content is shorter than 1000 characters. - const conflictingChange = new Change( - [ - new EditFileOperation( - 'main.tex', - new TextOperation().retain(1000).insert('This will fail.') - ), - ], - new Date(now - 20000), // 20 seconds ago - [] - ) - await setupProjectState(failsToPersistProjectId, { - persistTime: past, // Due for persistence - headVersion: 2, // After conflictingChange - persistedVersion: 1, // Initial content is at v1 - changes: [conflictingChange], // New changes in Redis (v1->v2) - expireTimeFuture: true, - }) - } - - await runScript(SCRIPT_PATH) - }) - - describe('when the buffer has new changes', function () { - it('should update persisted-version when the persist-time is in the past', async function () { - const projectId = projectIdsStore.project_due_for_persistence - const state = await getState(projectId) - // console.log('State after running script (project_due_for_persistence):', state) - expect(state.persistTime).to.be.null - expect(state.persistedVersion).to.equal(2) - }) - - it('should not perform any operations when the persist-time is in the future', async function () { - const projectId = projectIdsStore.project_not_due_for_persistence - const state = await getState(projectId) - expect(state.persistTime).to.equal(future) - expect(state.persistedVersion).to.equal(1) - }) - }) - - describe('when the changes in the buffer are already persisted', function () { - it('should delete persist-time for a project when the persist-time is in the past', async function () { - const projectId = projectIdsStore.project_due_fully_persisted - const state = await getState(projectId) - expect(state.persistTime).to.be.null - expect(state.persistedVersion).to.equal(2) - }) - }) - - describe('when there is no persist-time set', function () { - it('should not change redis when there is no persist-time set initially', async function () { - const projectId = projectIdsStore.project_no_persist_time - const state = await getState(projectId) - expect(state.persistTime).to.be.null - expect(state.persistedVersion).to.equal(1) - }) - }) - - describe('when persistence fails due to conflicting changes', function () { - it('should not clear persist-time and not update persisted-version', async function () { - const projectId = projectIdsStore.project_fails_to_persist - const state = await getState(projectId) - expect(state.persistTime).to.be.greaterThan(now) // persistTime should be pushed to the future by RETRY_DELAY_MS - expect(state.persistedVersion).to.equal(1) // persistedVersion should not change - }) - }) -}) diff --git a/services/history-v1/test/acceptance/js/storage/queue_changes.test.js b/services/history-v1/test/acceptance/js/storage/queue_changes.test.js deleted file mode 100644 index dbfe8c7e56..0000000000 --- a/services/history-v1/test/acceptance/js/storage/queue_changes.test.js +++ /dev/null @@ -1,416 +0,0 @@ -'use strict' - -const { expect } = require('chai') -const sinon = require('sinon') - -const cleanup = require('./support/cleanup') -const fixtures = require('./support/fixtures') -const testFiles = require('./support/test_files.js') -const storage = require('../../../../storage') -const chunkStore = storage.chunkStore -const queueChanges = storage.queueChanges -const redisBackend = require('../../../../storage/lib/chunk_store/redis') - -const core = require('overleaf-editor-core') -const AddFileOperation = core.AddFileOperation -const EditFileOperation = core.EditFileOperation -const TextOperation = core.TextOperation -const Change = core.Change -const Chunk = core.Chunk -const File = core.File -const Snapshot = core.Snapshot -const BlobStore = storage.BlobStore -const persistChanges = storage.persistChanges - -describe('queueChanges', function () { - let limitsToPersistImmediately - before(function () { - // Used to provide a limit which forces us to persist all of the changes - const farFuture = new Date() - farFuture.setTime(farFuture.getTime() + 7 * 24 * 3600 * 1000) - limitsToPersistImmediately = { - minChangeTimestamp: farFuture, - maxChangeTimestamp: farFuture, - maxChanges: 10, - maxChunkChanges: 10, - } - }) - - beforeEach(cleanup.everything) - beforeEach(fixtures.create) - afterEach(function () { - sinon.restore() - }) - - it('queues changes when redis has no snapshot (falls back to chunkStore with an empty chunk)', async function () { - // Start with an empty chunk store for the project - const projectId = fixtures.docs.uninitializedProject.id - await chunkStore.initializeProject(projectId) - - // Ensure that the initial state in redis is empty - const initialRedisState = await redisBackend.getState(projectId) - expect(initialRedisState.headVersion).to.be.null - expect(initialRedisState.headSnapshot).to.be.null - expect(initialRedisState.changes).to.be.an('array').that.is.empty - - // Add a test file to the blob store - const blobStore = new BlobStore(projectId) - await blobStore.putFile(testFiles.path('hello.txt')) - - // Prepare an initial change to add a single file to an empty project - const change = new Change( - [ - new AddFileOperation( - 'test.tex', - File.fromHash(testFiles.HELLO_TXT_HASH) - ), - ], - new Date(), - [] - ) - const changesToQueue = [change] - const endVersion = 0 - - // Queue the changes to add the test file - const status = await queueChanges(projectId, changesToQueue, endVersion) - expect(status).to.equal('ok') - - // Verify that we now have some state in redis - const redisState = await redisBackend.getState(projectId) - expect(redisState).to.not.be.null - - // Compute the expected snapshot after applying the changes - const expectedSnapshot = new Snapshot() - await expectedSnapshot.loadFiles('hollow', blobStore) - for (const change of changesToQueue) { - const hollowChange = change.clone() - await hollowChange.loadFiles('hollow', blobStore) - hollowChange.applyTo(expectedSnapshot, { strict: true }) - } - - // Confirm that state in redis matches the expected snapshot and changes queue - const expectedVersionInRedis = endVersion + changesToQueue.length - expect(redisState.headVersion).to.equal(expectedVersionInRedis) - expect(redisState.headSnapshot).to.deep.equal(expectedSnapshot.toRaw()) - expect(redisState.changes).to.deep.equal(changesToQueue.map(c => c.toRaw())) - }) - - it('queues changes when redis has no snapshot (falls back to chunkStore with an existing chunk)', async function () { - const projectId = fixtures.docs.uninitializedProject.id - - // Initialise the project in the chunk store using the "Hello World" test file - await chunkStore.initializeProject(projectId) - const blobStore = new BlobStore(projectId) - await blobStore.putFile(testFiles.path('hello.txt')) - const change = new Change( - [ - new AddFileOperation( - 'hello.tex', - File.fromHash(testFiles.HELLO_TXT_HASH) - ), - ], - new Date(), - [] - ) - const initialChanges = [change] - const initialVersion = 0 - - const result = await persistChanges( - projectId, - initialChanges, - limitsToPersistImmediately, - initialVersion - ) - // Compute the state after the initial changes are persisted for later comparison - const endVersion = initialVersion + initialChanges.length - const { currentChunk } = result - const originalSnapshot = result.currentChunk.getSnapshot() - await originalSnapshot.loadFiles('hollow', blobStore) - originalSnapshot.applyAll(currentChunk.getChanges()) - - // Ensure that the initial state in redis is empty - const initialRedisState = await redisBackend.getState(projectId) - expect(initialRedisState.headVersion).to.be.null - expect(initialRedisState.headSnapshot).to.be.null - expect(initialRedisState.changes).to.be.an('array').that.is.empty - - // Prepare a change to edit the existing file - const editFileOp = new EditFileOperation( - 'hello.tex', - new TextOperation() - .insert('Hello') - .retain(testFiles.HELLO_TXT_UTF8_LENGTH) - ) - const editFileChange = new Change([editFileOp], new Date(), []) - const changesToQueue = [editFileChange] - - // Queue the changes to edit the existing file - const status = await queueChanges(projectId, changesToQueue, endVersion) - expect(status).to.equal('ok') - - // Verify that we now have some state in redis - const redisState = await redisBackend.getState(projectId) - expect(redisState).to.not.be.null - - // Compute the expected snapshot after applying the changes - const expectedSnapshot = originalSnapshot.clone() - await expectedSnapshot.loadFiles('hollow', blobStore) - expectedSnapshot.applyAll(changesToQueue) - - // Confirm that state in redis matches the expected snapshot and changes queue - const expectedVersionInRedis = endVersion + changesToQueue.length - expect(redisState.headVersion).to.equal(expectedVersionInRedis) - expect(redisState.headSnapshot).to.deep.equal(expectedSnapshot.toRaw()) - expect(redisState.changes).to.deep.equal(changesToQueue.map(c => c.toRaw())) - }) - - it('queues changes when redis has a snapshot with existing changes', async function () { - const projectId = fixtures.docs.uninitializedProject.id - - // Initialise the project in redis using the "Hello World" test file - await chunkStore.initializeProject(projectId) - const blobStore = new BlobStore(projectId) - await blobStore.putFile(testFiles.path('hello.txt')) - const initialChangeOp = new AddFileOperation( - 'existing.tex', - File.fromHash(testFiles.HELLO_TXT_HASH) - ) - const initialChange = new Change([initialChangeOp], new Date(), []) - const initialChangesToQueue = [initialChange] - const versionBeforeInitialQueue = 0 - - // Queue the initial changes - const status = await queueChanges( - projectId, - initialChangesToQueue, - versionBeforeInitialQueue - ) - // Confirm that the initial changes were queued successfully - expect(status).to.equal('ok') - const versionAfterInitialQueue = - versionBeforeInitialQueue + initialChangesToQueue.length - - // Compute the snapshot after the initial changes for later use - const initialSnapshot = new Snapshot() - await initialSnapshot.loadFiles('hollow', blobStore) - for (const change of initialChangesToQueue) { - const hollowChange = change.clone() - await hollowChange.loadFiles('hollow', blobStore) - hollowChange.applyTo(initialSnapshot, { strict: true }) - } - - // Now prepare some subsequent changes for the queue - await blobStore.putFile(testFiles.path('graph.png')) - const addFileOp = new AddFileOperation( - 'graph.png', - File.fromHash(testFiles.GRAPH_PNG_HASH) - ) - const addFileChange = new Change([addFileOp], new Date(), []) - const editFileOp = new EditFileOperation( - 'existing.tex', - new TextOperation() - .insert('Hello') - .retain(testFiles.HELLO_TXT_UTF8_LENGTH) - ) - const editFileChange = new Change([editFileOp], new Date(), []) - - const subsequentChangesToQueue = [addFileChange, editFileChange] - const versionBeforeSubsequentQueue = versionAfterInitialQueue - - // Queue the subsequent changes - const subsequentStatus = await queueChanges( - projectId, - subsequentChangesToQueue, - versionBeforeSubsequentQueue - ) - expect(subsequentStatus).to.equal('ok') - - // Compute the expected snapshot after applying all changes - const expectedSnapshot = initialSnapshot.clone() - await expectedSnapshot.loadFiles('hollow', blobStore) - for (const change of subsequentChangesToQueue) { - const hollowChange = change.clone() - await hollowChange.loadFiles('hollow', blobStore) - hollowChange.applyTo(expectedSnapshot, { strict: true }) - } - - // Confirm that state in redis matches the expected snapshot and changes queue - const finalRedisState = await redisBackend.getState(projectId) - expect(finalRedisState).to.not.be.null - const expectedFinalVersion = - versionBeforeSubsequentQueue + subsequentChangesToQueue.length - expect(finalRedisState.headVersion).to.equal(expectedFinalVersion) - expect(finalRedisState.headSnapshot).to.deep.equal(expectedSnapshot.toRaw()) - const allQueuedChangesRaw = initialChangesToQueue - .concat(subsequentChangesToQueue) - .map(c => c.toRaw()) - expect(finalRedisState.changes).to.deep.equal(allQueuedChangesRaw) - }) - - it('skips queuing changes when there is no snapshot and the onlyIfExists flag is set', async function () { - // Start with an empty chunk store for the project - const projectId = fixtures.docs.uninitializedProject.id - await chunkStore.initializeProject(projectId) - - // Ensure that the initial state in redis is empty - const initialRedisState = await redisBackend.getState(projectId) - expect(initialRedisState.headVersion).to.be.null - expect(initialRedisState.headSnapshot).to.be.null - expect(initialRedisState.changes).to.be.an('array').that.is.empty - - // Add a test file to the blob store - const blobStore = new BlobStore(projectId) - await blobStore.putFile(testFiles.path('hello.txt')) - - // Prepare an initial change to add a single file to an empty project - const change = new Change( - [ - new AddFileOperation( - 'test.tex', - File.fromHash(testFiles.HELLO_TXT_HASH) - ), - ], - new Date(), - [] - ) - const changesToQueue = [change] - const endVersion = 0 - - // Queue the changes to add the test file - const status = await queueChanges(projectId, changesToQueue, endVersion, { - onlyIfExists: true, - }) - expect(status).to.equal('ignore') - - // Verify that the state in redis has not changed - const redisState = await redisBackend.getState(projectId) - expect(redisState).to.deep.equal(initialRedisState) - }) - - it('creates an initial hollow snapshot when redis has no snapshot (falls back to chunkStore with an empty chunk)', async function () { - // Start with an empty chunk store for the project - const projectId = fixtures.docs.uninitializedProject.id - await chunkStore.initializeProject(projectId) - const blobStore = new BlobStore(projectId) - await blobStore.putFile(testFiles.path('hello.txt')) - - // Prepare an initial change to add a single file to an empty project - const change = new Change( - [ - new AddFileOperation( - 'test.tex', - File.fromHash(testFiles.HELLO_TXT_HASH) - ), - ], - new Date(), - [] - ) - const changesToQueue = [change] - const endVersion = 0 - - // Queue the changes to add the test file - const status = await queueChanges(projectId, changesToQueue, endVersion) - expect(status).to.equal('ok') - - // Verify that we now have some state in redis - const redisState = await redisBackend.getState(projectId) - expect(redisState).to.not.be.null - expect(redisState.headSnapshot.files['test.tex']).to.deep.equal({ - stringLength: testFiles.HELLO_TXT_UTF8_LENGTH, - }) - }) - - it('throws ConflictingEndVersion if endVersion does not match current version (from chunkStore)', async function () { - const projectId = fixtures.docs.uninitializedProject.id - // Initialise an empty project in the chunk store - await chunkStore.initializeProject(projectId) - - // Ensure that the initial state in redis is empty - const initialRedisState = await redisBackend.getState(projectId) - expect(initialRedisState.headVersion).to.be.null - - // Prepare a change to add a file - const change = new Change( - [new AddFileOperation('test.tex', File.fromString(''))], - new Date(), - [] - ) - const changesToQueue = [change] - const incorrectEndVersion = 1 - - // Attempt to queue the changes with an incorrect endVersion (1 instead of 0) - await expect(queueChanges(projectId, changesToQueue, incorrectEndVersion)) - .to.be.rejectedWith(Chunk.ConflictingEndVersion) - .and.eventually.satisfies(err => { - expect(err.info).to.have.property( - 'clientEndVersion', - incorrectEndVersion - ) - expect(err.info).to.have.property('latestEndVersion', 0) - return true - }) - - // Verify that the state in redis has not changed - const redisStateAfterError = await redisBackend.getState(projectId) - expect(redisStateAfterError).to.deep.equal(initialRedisState) - }) - - it('throws ConflictingEndVersion if endVersion does not match current version (from redis snapshot)', async function () { - const projectId = fixtures.docs.uninitializedProject.id - - // Initialise the project in the redis with a test file - await chunkStore.initializeProject(projectId) - const initialChange = new Change( - [new AddFileOperation('initial.tex', File.fromString('content'))], - new Date(), - [] - ) - const initialChangesToQueue = [initialChange] - const versionBeforeInitialQueue = 0 - - // Queue the initial changes - await queueChanges( - projectId, - initialChangesToQueue, - versionBeforeInitialQueue - ) - const versionInRedisAfterSetup = - versionBeforeInitialQueue + initialChangesToQueue.length - - // Confirm that the initial changes were queued successfully - const initialRedisState = await redisBackend.getState(projectId) - expect(initialRedisState).to.not.be.null - expect(initialRedisState.headVersion).to.equal(versionInRedisAfterSetup) - - // Now prepare a subsequent change for the queue - const subsequentChange = new Change( - [new AddFileOperation('another.tex', File.fromString(''))], - new Date(), - [] - ) - const subsequentChangesToQueue = [subsequentChange] - const incorrectEndVersion = 0 - - // Attempt to queue the changes with an incorrect endVersion (0 instead of 1) - await expect( - queueChanges(projectId, subsequentChangesToQueue, incorrectEndVersion) - ) - .to.be.rejectedWith(Chunk.ConflictingEndVersion) - .and.eventually.satisfies(err => { - expect(err.info).to.have.property( - 'clientEndVersion', - incorrectEndVersion - ) - expect(err.info).to.have.property( - 'latestEndVersion', - versionInRedisAfterSetup - ) - return true - }) - - // Verify that the state in redis has not changed - const redisStateAfterError = await redisBackend.getState(projectId) - expect(redisStateAfterError).to.not.be.null - expect(redisStateAfterError).to.deep.equal(initialRedisState) - }) -}) diff --git a/services/history-v1/test/acceptance/js/storage/support/redis.js b/services/history-v1/test/acceptance/js/storage/support/redis.js deleted file mode 100644 index 3f5b9cda27..0000000000 --- a/services/history-v1/test/acceptance/js/storage/support/redis.js +++ /dev/null @@ -1,75 +0,0 @@ -'use strict' - -const { Snapshot } = require('overleaf-editor-core') -const redis = require('../../../../../storage/lib/redis') -const redisBackend = require('../../../../../storage/lib/chunk_store/redis') -const rclient = redis.rclientHistory -const keySchema = redisBackend.keySchema - -// Helper to set up a basic project state in Redis -async function setupProjectState( - projectId, - { - headVersion = 0, - persistedVersion = null, - expireTime = null, - persistTime = null, - changes = [], - expireTimeFuture = false, // Default to not setting future expire time unless specified - } -) { - const headSnapshot = new Snapshot() - await rclient.set( - keySchema.head({ projectId }), - JSON.stringify(headSnapshot.toRaw()) - ) - await rclient.set( - keySchema.headVersion({ projectId }), - headVersion.toString() - ) - - if (persistedVersion !== null) { - await rclient.set( - keySchema.persistedVersion({ projectId }), - persistedVersion.toString() - ) - } else { - await rclient.del(keySchema.persistedVersion({ projectId })) - } - - if (expireTime !== null) { - await rclient.set( - keySchema.expireTime({ projectId }), - expireTime.toString() - ) - } else { - // If expireTimeFuture is true, set it to a future time, otherwise delete it if null - if (expireTimeFuture) { - const futureExpireTime = Date.now() + 5 * 60 * 1000 // 5 minutes in the future - await rclient.set( - keySchema.expireTime({ projectId }), - futureExpireTime.toString() - ) - } else { - await rclient.del(keySchema.expireTime({ projectId })) - } - } - - if (persistTime !== null) { - await rclient.set( - keySchema.persistTime({ projectId }), - persistTime.toString() - ) - } else { - await rclient.del(keySchema.persistTime({ projectId })) - } - - if (changes.length > 0) { - const rawChanges = changes.map(c => JSON.stringify(c.toRaw())) - await rclient.rpush(keySchema.changes({ projectId }), ...rawChanges) - } else { - await rclient.del(keySchema.changes({ projectId })) - } -} - -module.exports = { setupProjectState, rclient, keySchema } diff --git a/services/history-v1/test/acceptance/js/storage/support/runscript.js b/services/history-v1/test/acceptance/js/storage/support/runscript.js deleted file mode 100644 index 7ff8355566..0000000000 --- a/services/history-v1/test/acceptance/js/storage/support/runscript.js +++ /dev/null @@ -1,35 +0,0 @@ -'use strict' - -const { promisify } = require('node:util') -const { execFile } = require('node:child_process') - -async function runScript(scriptPath, options = {}) { - const TIMEOUT = options.timeout || 10 * 1000 // 10 seconds default - let result - try { - result = await promisify(execFile)('node', [scriptPath], { - encoding: 'utf-8', - timeout: TIMEOUT, - env: { - ...process.env, - LOG_LEVEL: 'debug', // Override LOG_LEVEL for script output - }, - }) - result.status = 0 - } catch (err) { - const { stdout, stderr, code } = err - if (typeof code !== 'number') { - console.error(`Error running script ${scriptPath}:`, err) - throw err - } - result = { stdout, stderr, status: code } - } - // The script might exit with status 1 if it finds no keys to process, which is ok - if (result.status !== 0 && result.status !== 1) { - console.error(`Script ${scriptPath} failed:`, result.stderr) - throw new Error(`Script ${scriptPath} failed with status ${result.status}`) - } - return result -} - -module.exports = { runScript } diff --git a/services/notifications/docker-compose.ci.yml b/services/notifications/docker-compose.ci.yml index 24b57ab084..8fd86c1fbb 100644 --- a/services/notifications/docker-compose.ci.yml +++ b/services/notifications/docker-compose.ci.yml @@ -24,13 +24,10 @@ services: MOCHA_GREP: ${MOCHA_GREP} NODE_ENV: test NODE_OPTIONS: "--unhandled-rejections=strict" - volumes: - - ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it depends_on: mongo: condition: service_started user: node - entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 -- command: npm run test:acceptance diff --git a/services/notifications/docker-compose.yml b/services/notifications/docker-compose.yml index 167e45fdb1..090742ff6d 100644 --- a/services/notifications/docker-compose.yml +++ b/services/notifications/docker-compose.yml @@ -26,7 +26,6 @@ services: - .:/overleaf/services/notifications - ../../node_modules:/overleaf/node_modules - ../../libraries:/overleaf/libraries - - ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it working_dir: /overleaf/services/notifications environment: ELASTIC_SEARCH_DSN: es:9200 @@ -40,7 +39,6 @@ services: depends_on: mongo: condition: service_started - entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 -- command: npm run --silent test:acceptance mongo: diff --git a/services/project-history/app/js/HistoryStoreManager.js b/services/project-history/app/js/HistoryStoreManager.js index 38658bdf5b..bb41dfb3c0 100644 --- a/services/project-history/app/js/HistoryStoreManager.js +++ b/services/project-history/app/js/HistoryStoreManager.js @@ -35,10 +35,7 @@ class StringStream extends stream.Readable { _mocks.getMostRecentChunk = (projectId, historyId, callback) => { const path = `projects/${historyId}/latest/history` logger.debug({ projectId, historyId }, 'getting chunk from history service') - _requestChunk({ path, json: true }, (err, chunk) => { - if (err) return callback(OError.tag(err)) - callback(null, chunk) - }) + _requestChunk({ path, json: true }, callback) } /** @@ -57,10 +54,7 @@ export function getChunkAtVersion(projectId, historyId, version, callback) { { projectId, historyId, version }, 'getting chunk from history service for version' ) - _requestChunk({ path, json: true }, (err, chunk) => { - if (err) return callback(OError.tag(err)) - callback(null, chunk) - }) + _requestChunk({ path, json: true }, callback) } export function getMostRecentVersion(projectId, historyId, callback) { @@ -74,10 +68,8 @@ export function getMostRecentVersion(projectId, historyId, callback) { _.sortBy(chunk.chunk.history.changes || [], x => x.timestamp) ) // find the latest project and doc versions in the chunk - _getLatestProjectVersion(projectId, chunk, (err1, projectVersion) => { - if (err1) err1 = OError.tag(err1) + _getLatestProjectVersion(projectId, chunk, (err1, projectVersion) => _getLatestV2DocVersions(projectId, chunk, (err2, v2DocVersions) => { - if (err2) err2 = OError.tag(err2) // return the project and doc versions const projectStructureAndDocVersions = { project: projectVersion, @@ -91,7 +83,7 @@ export function getMostRecentVersion(projectId, historyId, callback) { chunk ) }) - }) + ) }) } @@ -219,10 +211,7 @@ export function getProjectBlob(historyId, blobHash, callback) { logger.debug({ historyId, blobHash }, 'getting blob from history service') _requestHistoryService( { path: `projects/${historyId}/blobs/${blobHash}` }, - (err, blob) => { - if (err) return callback(OError.tag(err)) - callback(null, blob) - } + callback ) } @@ -288,10 +277,7 @@ function createBlobFromString(historyId, data, fileId, callback) { (fsPath, cb) => { _createBlob(historyId, fsPath, cb) }, - (err, hash) => { - if (err) return callback(OError.tag(err)) - callback(null, hash) - } + callback ) } @@ -344,7 +330,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) { try { ranges = HistoryBlobTranslator.createRangeBlobDataFromUpdate(update) } catch (error) { - return callback(OError.tag(error)) + return callback(error) } createBlobFromString( historyId, @@ -352,7 +338,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) { `project-${projectId}-doc-${update.doc}`, (err, fileHash) => { if (err) { - return callback(OError.tag(err)) + return callback(err) } if (ranges) { createBlobFromString( @@ -361,7 +347,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) { `project-${projectId}-doc-${update.doc}-ranges`, (err, rangesHash) => { if (err) { - return callback(OError.tag(err)) + return callback(err) } logger.debug( { fileHash, rangesHash }, @@ -429,7 +415,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) { }, (err, fileHash) => { if (err) { - return callback(OError.tag(err)) + return callback(err) } if (update.hash && update.hash !== fileHash) { logger.warn( @@ -461,7 +447,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) { }, (err, fileHash) => { if (err) { - return callback(OError.tag(err)) + return callback(err) } logger.debug({ fileHash }, 'created empty blob for file') callback(null, { file: fileHash }) @@ -534,10 +520,7 @@ export function initializeProject(historyId, callback) { export function deleteProject(projectId, callback) { _requestHistoryService( { method: 'DELETE', path: `projects/${projectId}` }, - err => { - if (err) return callback(OError.tag(err)) - callback(null) - } + callback ) } diff --git a/services/project-history/app/js/SyncManager.js b/services/project-history/app/js/SyncManager.js index 43cb61be9f..ef8caf69eb 100644 --- a/services/project-history/app/js/SyncManager.js +++ b/services/project-history/app/js/SyncManager.js @@ -23,7 +23,6 @@ import { isInsert, isDelete } from './Utils.js' /** * @import { Comment as HistoryComment, TrackedChange as HistoryTrackedChange } from 'overleaf-editor-core' - * @import { CommentRawData, TrackedChangeRawData } from 'overleaf-editor-core/lib/types' * @import { Comment, Entity, ResyncDocContentUpdate, RetainOp, TrackedChange } from './types' * @import { TrackedChangeTransition, TrackingDirective, TrackingType, Update } from './types' * @import { ProjectStructureUpdate } from './types' @@ -765,19 +764,11 @@ class SyncUpdateExpander { } const persistedComments = file.getComments().toArray() - if (update.resyncDocContent.historyOTRanges) { - this.queueUpdatesForOutOfSyncCommentsHistoryOT( - update, - pathname, - file.getComments().toRaw() - ) - } else { - await this.queueUpdatesForOutOfSyncComments( - update, - pathname, - persistedComments - ) - } + await this.queueUpdatesForOutOfSyncComments( + update, + pathname, + persistedComments + ) const persistedChanges = file.getTrackedChanges().asSorted() await this.queueUpdatesForOutOfSyncTrackedChanges( @@ -834,91 +825,6 @@ class SyncUpdateExpander { return expandedUpdate } - /** - * Queue updates for out of sync comments - * - * @param {ResyncDocContentUpdate} update - * @param {string} pathname - * @param {CommentRawData[]} persistedComments - */ - queueUpdatesForOutOfSyncCommentsHistoryOT( - update, - pathname, - persistedComments - ) { - const expectedComments = - update.resyncDocContent.historyOTRanges?.comments ?? [] - const expectedCommentsById = new Map( - expectedComments.map(comment => [comment.id, comment]) - ) - const persistedCommentsById = new Map( - persistedComments.map(comment => [comment.id, comment]) - ) - - // Delete any persisted comment that is not in the expected comment list. - for (const persistedComment of persistedComments) { - if (!expectedCommentsById.has(persistedComment.id)) { - this.expandedUpdates.push({ - doc: update.doc, - op: [{ deleteComment: persistedComment.id }], - meta: { - pathname, - resync: true, - origin: this.origin, - ts: update.meta.ts, - }, - }) - } - } - - for (const expectedComment of expectedComments) { - const persistedComment = persistedCommentsById.get(expectedComment.id) - if ( - persistedComment && - commentRangesAreInSyncHistoryOT(persistedComment, expectedComment) - ) { - if (expectedComment.resolved === persistedComment.resolved) { - // Both comments are identical; do nothing - } else { - // Only the resolved state differs - this.expandedUpdates.push({ - doc: update.doc, - op: [ - { - commentId: expectedComment.id, - resolved: expectedComment.resolved, - }, - ], - meta: { - pathname, - resync: true, - origin: this.origin, - ts: update.meta.ts, - }, - }) - } - } else { - // New comment or ranges differ - this.expandedUpdates.push({ - doc: update.doc, - op: [ - { - commentId: expectedComment.id, - ranges: expectedComment.ranges, - resolved: expectedComment.resolved, - }, - ], - meta: { - pathname, - resync: true, - origin: this.origin, - ts: update.meta.ts, - }, - }) - } - } - } - /** * Queue updates for out of sync comments * @@ -1045,7 +951,6 @@ class SyncUpdateExpander { for (const transition of getTrackedChangesTransitions( persistedChanges, expectedChanges, - update.resyncDocContent.historyOTRanges?.trackedChanges || [], expectedContent.length )) { if (transition.pos > cursor) { @@ -1113,25 +1018,6 @@ class SyncUpdateExpander { } } -/** - * Compares the ranges in the persisted and expected comments - * - * @param {CommentRawData} persistedComment - * @param {CommentRawData} expectedComment - */ -function commentRangesAreInSyncHistoryOT(persistedComment, expectedComment) { - if (persistedComment.ranges.length !== expectedComment.ranges.length) { - return false - } - for (let i = 0; i < persistedComment.ranges.length; i++) { - const persistedRange = persistedComment.ranges[i] - const expectedRange = expectedComment.ranges[i] - if (persistedRange.pos !== expectedRange.pos) return false - if (persistedRange.length !== expectedRange.length) return false - } - return true -} - /** * Compares the ranges in the persisted and expected comments * @@ -1163,13 +1049,11 @@ function commentRangesAreInSync(persistedComment, expectedComment) { * * @param {readonly HistoryTrackedChange[]} persistedChanges * @param {TrackedChange[]} expectedChanges - * @param {TrackedChangeRawData[]} persistedChangesHistoryOT * @param {number} docLength */ function getTrackedChangesTransitions( persistedChanges, expectedChanges, - persistedChangesHistoryOT, docLength ) { /** @type {TrackedChangeTransition[]} */ @@ -1192,19 +1076,6 @@ function getTrackedChangesTransitions( }) } - for (const change of persistedChangesHistoryOT) { - transitions.push({ - stage: 'expected', - pos: change.range.pos, - tracking: change.tracking, - }) - transitions.push({ - stage: 'expected', - pos: change.range.pos + change.range.length, - tracking: { type: 'none' }, - }) - } - for (const change of expectedChanges) { const op = change.op const pos = op.hpos ?? op.p diff --git a/services/project-history/app/js/UpdateCompressor.js b/services/project-history/app/js/UpdateCompressor.js index 5ae7591a7f..471fc791ab 100644 --- a/services/project-history/app/js/UpdateCompressor.js +++ b/services/project-history/app/js/UpdateCompressor.js @@ -1,15 +1,8 @@ // @ts-check -import Metrics from '@overleaf/metrics' import OError from '@overleaf/o-error' import DMP from 'diff-match-patch' import { EditOperationBuilder } from 'overleaf-editor-core' -import zlib from 'node:zlib' -import { ReadableString, WritableBuffer } from '@overleaf/stream-utils' -import Stream from 'node:stream' -import logger from '@overleaf/logger' -import { callbackify } from '@overleaf/promise-utils' -import Settings from '@overleaf/settings' /** * @import { DeleteOp, InsertOp, Op, Update } from './types' @@ -169,9 +162,7 @@ export function concatUpdatesWithSameVersion(updates) { lastUpdate.op != null && lastUpdate.v === update.v && lastUpdate.doc === update.doc && - lastUpdate.pathname === update.pathname && - EditOperationBuilder.isValid(update.op[0]) === - EditOperationBuilder.isValid(lastUpdate.op[0]) + lastUpdate.pathname === update.pathname ) { lastUpdate.op = lastUpdate.op.concat(update.op) if (update.meta.doc_hash == null) { @@ -189,66 +180,6 @@ export function concatUpdatesWithSameVersion(updates) { return concattedUpdates } -async function estimateStorage(updates) { - const blob = JSON.stringify(updates) - const bytes = Buffer.from(blob).byteLength - const read = new ReadableString(blob) - const compress = zlib.createGzip() - const write = new WritableBuffer() - await Stream.promises.pipeline(read, compress, write) - const bytesGz = write.size() - return { bytes, bytesGz, nUpdates: updates.length } -} - -/** - * @param {Update[]} rawUpdates - * @param {string} projectId - * @param {import("./Profiler").Profiler} profile - * @return {Promise} - */ -async function compressRawUpdatesWithMetrics(rawUpdates, projectId, profile) { - if (100 * Math.random() > Settings.estimateCompressionSample) { - return compressRawUpdatesWithProfile(rawUpdates, projectId, profile) - } - const before = await estimateStorage(rawUpdates) - profile.log('estimateRawUpdatesSize') - const updates = compressRawUpdatesWithProfile(rawUpdates, projectId, profile) - const after = await estimateStorage(updates) - for (const [path, values] of Object.entries({ before, after })) { - for (const [method, v] of Object.entries(values)) { - Metrics.summary('updates_compression_estimate', v, { path, method }) - } - } - for (const method of Object.keys(before)) { - const percentage = Math.ceil(100 * (after[method] / before[method])) - Metrics.summary('updates_compression_percentage', percentage, { method }) - } - profile.log('estimateCompressedUpdatesSize') - return updates -} - -export const compressRawUpdatesWithMetricsCb = callbackify( - compressRawUpdatesWithMetrics -) - -/** - * @param {Update[]} rawUpdates - * @param {string} projectId - * @param {import("./Profiler").Profiler} profile - * @return {Update[]} - */ -function compressRawUpdatesWithProfile(rawUpdates, projectId, profile) { - const updates = compressRawUpdates(rawUpdates) - const timeTaken = profile.log('compressRawUpdates').getTimeDelta() - if (timeTaken >= 1000) { - logger.debug( - { projectId, updates: rawUpdates, timeTaken }, - 'slow compression of raw updates' - ) - } - return updates -} - export function compressRawUpdates(rawUpdates) { let updates = convertToSingleOpUpdates(rawUpdates) updates = compressUpdates(updates) diff --git a/services/project-history/app/js/UpdatesProcessor.js b/services/project-history/app/js/UpdatesProcessor.js index b4895c012d..a76241d7ca 100644 --- a/services/project-history/app/js/UpdatesProcessor.js +++ b/services/project-history/app/js/UpdatesProcessor.js @@ -546,10 +546,7 @@ export function _processUpdates( } if (filteredUpdates.length === 0) { // return early if there are no updates to apply - return SyncManager.setResyncState(projectId, newSyncState, err => { - if (err) return callback(err) - callback(null, { resyncNeeded: false }) - }) + return SyncManager.setResyncState(projectId, newSyncState, callback) } // only make request to history service if we have actual updates to process _getMostRecentVersionWithDebug( @@ -596,17 +593,17 @@ export function _processUpdates( return cb(err) } profile.log('skipAlreadyAppliedUpdates') - cb(null, unappliedUpdates) - }, - (unappliedUpdates, cb) => { - UpdateCompressor.compressRawUpdatesWithMetricsCb( - unappliedUpdates, - projectId, - profile, - cb - ) - }, - (compressedUpdates, cb) => { + const compressedUpdates = + UpdateCompressor.compressRawUpdates(unappliedUpdates) + const timeTaken = profile + .log('compressRawUpdates') + .getTimeDelta() + if (timeTaken >= 1000) { + logger.debug( + { projectId, updates: unappliedUpdates, timeTaken }, + 'slow compression of raw updates' + ) + } cb = profile.wrap('createBlobs', cb) BlobManager.createBlobsForUpdates( projectId, diff --git a/services/project-history/app/js/types.ts b/services/project-history/app/js/types.ts index c11b7741e3..96701e587f 100644 --- a/services/project-history/app/js/types.ts +++ b/services/project-history/app/js/types.ts @@ -3,8 +3,6 @@ import { LinkedFileData, RawEditOperation, RawOrigin, - CommentRawData, - TrackedChangeRawData, } from 'overleaf-editor-core/lib/types' export type Update = @@ -120,10 +118,6 @@ export type ResyncDocContentUpdate = { content: string version: number ranges?: Ranges - historyOTRanges?: { - comments: CommentRawData[] - trackedChanges: TrackedChangeRawData[] - } resolvedCommentIds?: string[] } projectHistoryId: string diff --git a/services/project-history/config/settings.defaults.cjs b/services/project-history/config/settings.defaults.cjs index d767cddd96..d259d070b9 100644 --- a/services/project-history/config/settings.defaults.cjs +++ b/services/project-history/config/settings.defaults.cjs @@ -110,8 +110,4 @@ module.exports = { shortHistoryQueues: (process.env.SHORT_HISTORY_QUEUES || '') .split(',') .filter(s => !!s), - estimateCompressionSample: parseInt( - process.env.ESTIMATE_COMPRESSION_SAMPLE || '0', - 10 - ), } diff --git a/services/project-history/docker-compose.ci.yml b/services/project-history/docker-compose.ci.yml index ca15f35fef..2fe97bd9b3 100644 --- a/services/project-history/docker-compose.ci.yml +++ b/services/project-history/docker-compose.ci.yml @@ -28,15 +28,12 @@ services: MOCHA_GREP: ${MOCHA_GREP} NODE_ENV: test NODE_OPTIONS: "--unhandled-rejections=strict" - volumes: - - ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it depends_on: mongo: condition: service_started redis: condition: service_healthy user: node - entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 -- command: npm run test:acceptance @@ -48,7 +45,7 @@ services: command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs . user: root redis: - image: redis:7.4.3 + image: redis healthcheck: test: ping="$$(redis-cli ping)" && [ "$$ping" = 'PONG' ] interval: 1s diff --git a/services/project-history/docker-compose.yml b/services/project-history/docker-compose.yml index 95a36b5fcb..68360baf44 100644 --- a/services/project-history/docker-compose.yml +++ b/services/project-history/docker-compose.yml @@ -26,7 +26,6 @@ services: - .:/overleaf/services/project-history - ../../node_modules:/overleaf/node_modules - ../../libraries:/overleaf/libraries - - ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it working_dir: /overleaf/services/project-history environment: ELASTIC_SEARCH_DSN: es:9200 @@ -46,11 +45,10 @@ services: condition: service_started redis: condition: service_healthy - entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 -- command: npm run --silent test:acceptance redis: - image: redis:7.4.3 + image: redis healthcheck: test: ping=$$(redis-cli ping) && [ "$$ping" = 'PONG' ] interval: 1s diff --git a/services/project-history/package.json b/services/project-history/package.json index 4160f36f6f..2a54a807d3 100644 --- a/services/project-history/package.json +++ b/services/project-history/package.json @@ -9,8 +9,8 @@ "test:unit": "npm run test:unit:_run -- --grep=$MOCHA_GREP", "start": "node app.js", "nodemon": "node --watch app.js", - "test:acceptance:_run": "mocha --loader=esmock --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js", - "test:unit:_run": "mocha --loader=esmock --recursive --reporter spec $@ test/unit/js", + "test:acceptance:_run": "LOG_LEVEL=fatal mocha --loader=esmock --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js", + "test:unit:_run": "LOG_LEVEL=fatal mocha --loader=esmock --recursive --reporter spec $@ test/unit/js", "lint": "eslint --max-warnings 0 --format unix .", "format": "prettier --list-different $PWD/'**/*.*js'", "format:fix": "prettier --write $PWD/'**/*.*js'", @@ -25,7 +25,6 @@ "@overleaf/promise-utils": "*", "@overleaf/redis-wrapper": "*", "@overleaf/settings": "*", - "@overleaf/stream-utils": "*", "async": "^3.2.5", "aws-sdk": "^2.650.0", "body-parser": "^1.20.3", diff --git a/services/project-history/scripts/retry_failures.js b/services/project-history/scripts/retry_failures.js deleted file mode 100755 index 85ee21faf4..0000000000 --- a/services/project-history/scripts/retry_failures.js +++ /dev/null @@ -1,26 +0,0 @@ -import * as RetryManager from '../app/js/RetryManager.js' -import minimist from 'minimist' - -const args = minimist(process.argv.slice(2), { - string: ['failureType', 'timeout', 'limit'], - default: { - failureType: 'soft', - timeout: (60 * 60 * 1000).toString(), - limit: (100_000).toString(), - }, -}) - -const failureType = args.failureType -const timeout = parseInt(args.timeout, 10) -const limit = parseInt(args.limit, 10) - -RetryManager.retryFailures({ failureType, timeout, limit }, (err, result) => { - if (err) { - console.error(err) - process.exit(1) - } else { - console.log(JSON.stringify(result)) - console.log('Done.') - } - process.exit(0) -}) diff --git a/services/project-history/test/acceptance/js/SyncTests.js b/services/project-history/test/acceptance/js/SyncTests.js index f7420e6cdb..89e002d4dd 100644 --- a/services/project-history/test/acceptance/js/SyncTests.js +++ b/services/project-history/test/acceptance/js/SyncTests.js @@ -1225,7 +1225,7 @@ describe('Syncing with web and doc-updater', function () { ) }) - it('should add comments in the history store', function (done) { + it('should fix comments in the history store', function (done) { const commentId = 'comment-id' const addComment = MockHistoryStore() .post(`/api/projects/${historyId}/legacy_changes`, body => { @@ -1315,1195 +1315,6 @@ describe('Syncing with web and doc-updater', function () { } ) }) - - it('should add comments in the history store (history-ot)', function (done) { - const commentId = 'comment-id' - const addComment = MockHistoryStore() - .post(`/api/projects/${historyId}/legacy_changes`, body => { - expect(body).to.deep.equal([ - { - v2Authors: [], - authors: [], - timestamp: this.timestamp.toJSON(), - operations: [ - { - pathname: 'main.tex', - commentId, - ranges: [{ pos: 1, length: 10 }], - }, - ], - origin: { kind: 'test-origin' }, - }, - ]) - return true - }) - .query({ end_version: 0 }) - .reply(204) - - async.series( - [ - cb => { - ProjectHistoryClient.resyncHistory(this.project_id, cb) - }, - cb => { - const update = { - projectHistoryId: historyId, - resyncProjectStructure: { - docs: [{ path: '/main.tex' }], - files: [], - }, - meta: { - ts: this.timestamp, - }, - } - ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) - }, - cb => { - const update = { - path: '/main.tex', - projectHistoryId: historyId, - resyncDocContent: { - content: 'a\nb', - historyOTRanges: { - comments: [ - { - id: commentId, - ranges: [ - { - pos: 1, - length: 10, - }, - ], - }, - ], - }, - }, - doc: this.doc_id, - meta: { - ts: this.timestamp, - }, - } - ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) - }, - cb => { - ProjectHistoryClient.flushProject(this.project_id, cb) - }, - ], - error => { - if (error) { - return done(error) - } - assert( - addComment.isDone(), - `/api/projects/${historyId}/changes should have been called` - ) - done() - } - ) - }) - - it('should add tracked changes in the history store', function (done) { - const fixTrackedChange = MockHistoryStore() - .post(`/api/projects/${historyId}/legacy_changes`, body => { - expect(body).to.deep.equal([ - { - v2Authors: [], - authors: [], - timestamp: this.timestamp.toJSON(), - operations: [ - { - pathname: 'main.tex', - textOperation: [ - { - r: 1, - tracking: { - ts: this.timestamp.toJSON(), - type: 'delete', - userId: 'user-id', - }, - }, - { - r: 1, - tracking: { - ts: this.timestamp.toJSON(), - type: 'insert', - userId: 'user-id', - }, - }, - 1, - ], - }, - ], - origin: { kind: 'test-origin' }, - }, - ]) - return true - }) - .query({ end_version: 0 }) - .reply(204) - - async.series( - [ - cb => { - ProjectHistoryClient.resyncHistory(this.project_id, cb) - }, - cb => { - const update = { - projectHistoryId: historyId, - resyncProjectStructure: { - docs: [{ path: '/main.tex' }], - files: [], - }, - meta: { - ts: this.timestamp, - }, - } - ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) - }, - cb => { - const update = { - path: '/main.tex', - projectHistoryId: historyId, - resyncDocContent: { - content: 'a\nb', - ranges: { - changes: [ - { - id: 'id1', - op: { - d: 'a', - p: 0, - }, - metadata: { - user_id: 'user-id', - ts: this.timestamp, - }, - }, - { - id: 'id2', - op: { - i: '\n', - p: 0, - hpos: 1, - }, - metadata: { - user_id: 'user-id', - ts: this.timestamp, - }, - }, - ], - }, - }, - doc: this.doc_id, - meta: { - ts: this.timestamp, - }, - } - ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) - }, - cb => { - ProjectHistoryClient.flushProject(this.project_id, cb) - }, - ], - error => { - if (error) { - return done(error) - } - assert( - fixTrackedChange.isDone(), - `/api/projects/${historyId}/changes should have been called` - ) - done() - } - ) - }) - - it('should add tracked changes in the history store (history-ot)', function (done) { - const fixTrackedChange = MockHistoryStore() - .post(`/api/projects/${historyId}/legacy_changes`, body => { - expect(body).to.deep.equal([ - { - v2Authors: [], - authors: [], - timestamp: this.timestamp.toJSON(), - operations: [ - { - pathname: 'main.tex', - textOperation: [ - { - r: 1, - tracking: { - ts: this.timestamp.toJSON(), - type: 'delete', - userId: 'user-id', - }, - }, - { - r: 1, - tracking: { - ts: this.timestamp.toJSON(), - type: 'insert', - userId: 'user-id', - }, - }, - 1, - ], - }, - ], - origin: { kind: 'test-origin' }, - }, - ]) - return true - }) - .query({ end_version: 0 }) - .reply(204) - - async.series( - [ - cb => { - ProjectHistoryClient.resyncHistory(this.project_id, cb) - }, - cb => { - const update = { - projectHistoryId: historyId, - resyncProjectStructure: { - docs: [{ path: '/main.tex' }], - files: [], - }, - meta: { - ts: this.timestamp, - }, - } - ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) - }, - cb => { - const update = { - path: '/main.tex', - projectHistoryId: historyId, - resyncDocContent: { - content: 'a\nb', - historyOTRanges: { - trackedChanges: [ - { - range: { pos: 0, length: 1 }, - tracking: { - ts: this.timestamp.toJSON(), - type: 'delete', - userId: 'user-id', - }, - }, - { - range: { pos: 1, length: 1 }, - tracking: { - ts: this.timestamp.toJSON(), - type: 'insert', - userId: 'user-id', - }, - }, - ], - }, - }, - doc: this.doc_id, - meta: { - ts: this.timestamp, - }, - } - ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) - }, - cb => { - ProjectHistoryClient.flushProject(this.project_id, cb) - }, - ], - error => { - if (error) { - return done(error) - } - assert( - fixTrackedChange.isDone(), - `/api/projects/${historyId}/changes should have been called` - ) - done() - } - ) - }) - }) - - describe("when a doc's ranges are out of sync", function () { - const commentId = 'comment-id' - beforeEach(function () { - MockHistoryStore() - .get(`/api/projects/${historyId}/latest/history`) - .reply(200, { - chunk: { - history: { - snapshot: { - files: { - 'main.tex': { - hash: '0a207c060e61f3b88eaee0a8cd0696f46fb155eb', - rangesHash: '0a207c060e61f3b88eaee0a8cd0696f46fb155ec', - stringLength: 3, - }, - }, - }, - changes: [], - }, - startVersion: 0, - }, - }) - - MockHistoryStore() - .get( - `/api/projects/${historyId}/blobs/0a207c060e61f3b88eaee0a8cd0696f46fb155eb` - ) - .reply(200, 'a\nb') - - MockHistoryStore() - .get( - `/api/projects/${historyId}/blobs/0a207c060e61f3b88eaee0a8cd0696f46fb155ec` - ) - .reply( - 200, - JSON.stringify({ - comments: [{ id: commentId, ranges: [{ pos: 0, length: 3 }] }], - trackedChanges: [ - { - range: { pos: 0, length: 1 }, - tracking: { - ts: this.timestamp.toJSON(), - type: 'delete', - userId: 'user-id', - }, - }, - { - range: { pos: 2, length: 1 }, - tracking: { - ts: this.timestamp.toJSON(), - type: 'insert', - userId: 'user-id', - }, - }, - ], - }) - ) - }) - - it('should fix comments in the history store', function (done) { - const addComment = MockHistoryStore() - .post(`/api/projects/${historyId}/legacy_changes`, body => { - expect(body).to.deep.equal([ - { - v2Authors: [], - authors: [], - timestamp: this.timestamp.toJSON(), - operations: [ - { - pathname: 'main.tex', - commentId, - ranges: [{ pos: 1, length: 2 }], - }, - ], - origin: { kind: 'test-origin' }, - }, - ]) - return true - }) - .query({ end_version: 0 }) - .reply(204) - - async.series( - [ - cb => { - ProjectHistoryClient.resyncHistory(this.project_id, cb) - }, - cb => { - const update = { - projectHistoryId: historyId, - resyncProjectStructure: { - docs: [{ path: '/main.tex' }], - files: [], - }, - meta: { - ts: this.timestamp, - }, - } - ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) - }, - cb => { - const update = { - path: '/main.tex', - projectHistoryId: historyId, - resyncDocContent: { - content: 'a\nb', - ranges: { - comments: [ - { - id: commentId, - op: { - c: 'a', - p: 0, - hpos: 1, - hlen: 2, - t: commentId, - }, - meta: { - user_id: 'user-id', - ts: this.timestamp, - }, - }, - ], - changes: [ - { - id: 'id1', - op: { - d: 'a', - p: 0, - }, - metadata: { - user_id: 'user-id', - ts: this.timestamp, - }, - }, - { - id: 'id2', - op: { - i: '\n', - p: 1, - hpos: 2, - }, - metadata: { - user_id: 'user-id', - ts: this.timestamp, - }, - }, - ], - }, - }, - doc: this.doc_id, - meta: { - ts: this.timestamp, - }, - } - ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) - }, - cb => { - ProjectHistoryClient.flushProject(this.project_id, cb) - }, - ], - error => { - if (error) { - return done(error) - } - assert( - addComment.isDone(), - `/api/projects/${historyId}/changes should have been called` - ) - done() - } - ) - }) - - it('should fix resolved state for comments in the history store', function (done) { - const addComment = MockHistoryStore() - .post(`/api/projects/${historyId}/legacy_changes`, body => { - expect(body).to.deep.equal([ - { - v2Authors: [], - authors: [], - timestamp: this.timestamp.toJSON(), - operations: [ - { - pathname: 'main.tex', - commentId, - resolved: true, - }, - ], - origin: { kind: 'test-origin' }, - }, - ]) - return true - }) - .query({ end_version: 0 }) - .reply(204) - - async.series( - [ - cb => { - ProjectHistoryClient.resyncHistory(this.project_id, cb) - }, - cb => { - const update = { - projectHistoryId: historyId, - resyncProjectStructure: { - docs: [{ path: '/main.tex' }], - files: [], - }, - meta: { - ts: this.timestamp, - }, - } - ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) - }, - cb => { - const update = { - path: '/main.tex', - projectHistoryId: historyId, - resyncDocContent: { - content: 'a\nb', - resolvedCommentIds: [commentId], - ranges: { - comments: [ - { - id: commentId, - op: { - c: 'a', - p: 0, - hpos: 0, - hlen: 3, - t: commentId, - }, - meta: { - user_id: 'user-id', - ts: this.timestamp, - }, - }, - ], - changes: [ - { - id: 'id1', - op: { - d: 'a', - p: 0, - }, - metadata: { - user_id: 'user-id', - ts: this.timestamp, - }, - }, - { - id: 'id2', - op: { - i: '\n', - p: 1, - hpos: 2, - }, - metadata: { - user_id: 'user-id', - ts: this.timestamp, - }, - }, - ], - }, - }, - doc: this.doc_id, - meta: { - ts: this.timestamp, - }, - } - ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) - }, - cb => { - ProjectHistoryClient.flushProject(this.project_id, cb) - }, - ], - error => { - if (error) { - return done(error) - } - assert( - addComment.isDone(), - `/api/projects/${historyId}/changes should have been called` - ) - done() - } - ) - }) - - it('should fix comments in the history store (history-ot)', function (done) { - const addComment = MockHistoryStore() - .post(`/api/projects/${historyId}/legacy_changes`, body => { - expect(body).to.deep.equal([ - { - v2Authors: [], - authors: [], - timestamp: this.timestamp.toJSON(), - operations: [ - { - pathname: 'main.tex', - commentId, - ranges: [{ pos: 1, length: 2 }], - }, - ], - origin: { kind: 'test-origin' }, - }, - ]) - return true - }) - .query({ end_version: 0 }) - .reply(204) - - async.series( - [ - cb => { - ProjectHistoryClient.resyncHistory(this.project_id, cb) - }, - cb => { - const update = { - projectHistoryId: historyId, - resyncProjectStructure: { - docs: [{ path: '/main.tex' }], - files: [], - }, - meta: { - ts: this.timestamp, - }, - } - ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) - }, - cb => { - const update = { - path: '/main.tex', - projectHistoryId: historyId, - resyncDocContent: { - content: 'a\nb', - historyOTRanges: { - comments: [ - { - id: commentId, - ranges: [ - { - pos: 1, - length: 2, - }, - ], - }, - ], - trackedChanges: [ - { - range: { pos: 0, length: 1 }, - tracking: { - ts: this.timestamp.toJSON(), - type: 'delete', - userId: 'user-id', - }, - }, - { - range: { pos: 2, length: 1 }, - tracking: { - ts: this.timestamp.toJSON(), - type: 'insert', - userId: 'user-id', - }, - }, - ], - }, - }, - doc: this.doc_id, - meta: { - ts: this.timestamp, - }, - } - ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) - }, - cb => { - ProjectHistoryClient.flushProject(this.project_id, cb) - }, - ], - error => { - if (error) { - return done(error) - } - assert( - addComment.isDone(), - `/api/projects/${historyId}/changes should have been called` - ) - done() - } - ) - }) - - it('should fix resolved state for comments in the history store (history-ot)', function (done) { - const addComment = MockHistoryStore() - .post(`/api/projects/${historyId}/legacy_changes`, body => { - expect(body).to.deep.equal([ - { - v2Authors: [], - authors: [], - timestamp: this.timestamp.toJSON(), - operations: [ - { - pathname: 'main.tex', - commentId, - resolved: true, - }, - ], - origin: { kind: 'test-origin' }, - }, - ]) - return true - }) - .query({ end_version: 0 }) - .reply(204) - - async.series( - [ - cb => { - ProjectHistoryClient.resyncHistory(this.project_id, cb) - }, - cb => { - const update = { - projectHistoryId: historyId, - resyncProjectStructure: { - docs: [{ path: '/main.tex' }], - files: [], - }, - meta: { - ts: this.timestamp, - }, - } - ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) - }, - cb => { - const update = { - path: '/main.tex', - projectHistoryId: historyId, - resyncDocContent: { - content: 'a\nb', - historyOTRanges: { - comments: [ - { - id: commentId, - ranges: [ - { - pos: 0, - length: 3, - }, - ], - resolved: true, - }, - ], - trackedChanges: [ - { - range: { pos: 0, length: 1 }, - tracking: { - ts: this.timestamp.toJSON(), - type: 'delete', - userId: 'user-id', - }, - }, - { - range: { pos: 2, length: 1 }, - tracking: { - ts: this.timestamp.toJSON(), - type: 'insert', - userId: 'user-id', - }, - }, - ], - }, - }, - doc: this.doc_id, - meta: { - ts: this.timestamp, - }, - } - ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) - }, - cb => { - ProjectHistoryClient.flushProject(this.project_id, cb) - }, - ], - error => { - if (error) { - return done(error) - } - assert( - addComment.isDone(), - `/api/projects/${historyId}/changes should have been called` - ) - done() - } - ) - }) - - it('should fix tracked changes in the history store', function (done) { - const fixTrackedChange = MockHistoryStore() - .post(`/api/projects/${historyId}/legacy_changes`, body => { - expect(body).to.deep.equal([ - { - v2Authors: [], - authors: [], - timestamp: this.timestamp.toJSON(), - operations: [ - { - pathname: 'main.tex', - textOperation: [ - 1, - { - r: 1, - tracking: { - ts: this.timestamp.toJSON(), - type: 'insert', - userId: 'user-id', - }, - }, - { - r: 1, - tracking: { - type: 'none', - }, - }, - ], - }, - ], - origin: { kind: 'test-origin' }, - }, - ]) - return true - }) - .query({ end_version: 0 }) - .reply(204) - - async.series( - [ - cb => { - ProjectHistoryClient.resyncHistory(this.project_id, cb) - }, - cb => { - const update = { - projectHistoryId: historyId, - resyncProjectStructure: { - docs: [{ path: '/main.tex' }], - files: [], - }, - meta: { - ts: this.timestamp, - }, - } - ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) - }, - cb => { - const update = { - path: '/main.tex', - projectHistoryId: historyId, - resyncDocContent: { - content: 'a\nb', - ranges: { - comments: [ - { - id: commentId, - op: { - c: 'a', - p: 0, - hpos: 0, - hlen: 3, - t: commentId, - }, - meta: { - user_id: 'user-id', - ts: this.timestamp, - }, - }, - ], - changes: [ - { - id: 'id1', - op: { - d: 'a', - p: 0, - }, - metadata: { - user_id: 'user-id', - ts: this.timestamp, - }, - }, - { - id: 'id2', - op: { - i: '\n', - p: 0, - hpos: 1, - }, - metadata: { - user_id: 'user-id', - ts: this.timestamp, - }, - }, - ], - }, - }, - doc: this.doc_id, - meta: { - ts: this.timestamp, - }, - } - ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) - }, - cb => { - ProjectHistoryClient.flushProject(this.project_id, cb) - }, - ], - error => { - if (error) { - return done(error) - } - assert( - fixTrackedChange.isDone(), - `/api/projects/${historyId}/changes should have been called` - ) - done() - } - ) - }) - - it('should fix tracked changes in the history store (history-ot)', function (done) { - const fixTrackedChange = MockHistoryStore() - .post(`/api/projects/${historyId}/legacy_changes`, body => { - expect(body).to.deep.equal([ - { - v2Authors: [], - authors: [], - timestamp: this.timestamp.toJSON(), - operations: [ - { - pathname: 'main.tex', - textOperation: [ - 1, - { - r: 1, - tracking: { - ts: this.timestamp.toJSON(), - type: 'insert', - userId: 'user-id', - }, - }, - { - r: 1, - tracking: { - type: 'none', - }, - }, - ], - }, - ], - origin: { kind: 'test-origin' }, - }, - ]) - return true - }) - .query({ end_version: 0 }) - .reply(204) - - async.series( - [ - cb => { - ProjectHistoryClient.resyncHistory(this.project_id, cb) - }, - cb => { - const update = { - projectHistoryId: historyId, - resyncProjectStructure: { - docs: [{ path: '/main.tex' }], - files: [], - }, - meta: { - ts: this.timestamp, - }, - } - ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) - }, - cb => { - const update = { - path: '/main.tex', - projectHistoryId: historyId, - resyncDocContent: { - content: 'a\nb', - historyOTRanges: { - comments: [ - { - id: commentId, - ranges: [ - { - pos: 0, - length: 3, - }, - ], - }, - ], - trackedChanges: [ - { - range: { pos: 0, length: 1 }, - tracking: { - ts: this.timestamp.toJSON(), - type: 'delete', - userId: 'user-id', - }, - }, - { - range: { pos: 1, length: 1 }, - tracking: { - ts: this.timestamp.toJSON(), - type: 'insert', - userId: 'user-id', - }, - }, - ], - }, - }, - doc: this.doc_id, - meta: { - ts: this.timestamp, - }, - } - ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) - }, - cb => { - ProjectHistoryClient.flushProject(this.project_id, cb) - }, - ], - error => { - if (error) { - return done(error) - } - assert( - fixTrackedChange.isDone(), - `/api/projects/${historyId}/changes should have been called` - ) - done() - } - ) - }) - - it('should fix both comments and tracked changes in the history store (history-ot)', function (done) { - const fixTrackedChange = MockHistoryStore() - .post(`/api/projects/${historyId}/legacy_changes`, body => { - expect(body).to.deep.equal([ - // not merged due to comment operation using history-ot and tracked-changes operation using sharejs ot - { - v2Authors: [], - authors: [], - timestamp: this.timestamp.toJSON(), - operations: [ - { - pathname: 'main.tex', - commentId, - ranges: [{ pos: 1, length: 2 }], - }, - ], - origin: { kind: 'test-origin' }, - }, - { - v2Authors: [], - authors: [], - timestamp: this.timestamp.toJSON(), - operations: [ - { - pathname: 'main.tex', - textOperation: [ - 1, - { - r: 1, - tracking: { - ts: this.timestamp.toJSON(), - type: 'insert', - userId: 'user-id', - }, - }, - { - r: 1, - tracking: { - type: 'none', - }, - }, - ], - }, - ], - origin: { kind: 'test-origin' }, - }, - ]) - return true - }) - .query({ end_version: 0 }) - .reply(204) - - async.series( - [ - cb => { - ProjectHistoryClient.resyncHistory(this.project_id, cb) - }, - cb => { - const update = { - projectHistoryId: historyId, - resyncProjectStructure: { - docs: [{ path: '/main.tex' }], - files: [], - }, - meta: { - ts: this.timestamp, - }, - } - ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) - }, - cb => { - const update = { - path: '/main.tex', - projectHistoryId: historyId, - resyncDocContent: { - content: 'a\nb', - historyOTRanges: { - comments: [ - { - id: commentId, - ranges: [ - { - pos: 1, - length: 2, - }, - ], - }, - ], - trackedChanges: [ - { - range: { pos: 0, length: 1 }, - tracking: { - ts: this.timestamp.toJSON(), - type: 'delete', - userId: 'user-id', - }, - }, - { - range: { pos: 1, length: 1 }, - tracking: { - ts: this.timestamp.toJSON(), - type: 'insert', - userId: 'user-id', - }, - }, - ], - }, - }, - doc: this.doc_id, - meta: { - ts: this.timestamp, - }, - } - ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) - }, - cb => { - ProjectHistoryClient.flushProject(this.project_id, cb) - }, - ], - error => { - if (error) { - return done(error) - } - assert( - fixTrackedChange.isDone(), - `/api/projects/${historyId}/changes should have been called` - ) - done() - } - ) - }) }) describe('resyncProjectStructureOnly', function () { diff --git a/services/project-history/test/unit/js/UpdatesManager/UpdatesProcessorTests.js b/services/project-history/test/unit/js/UpdatesManager/UpdatesProcessorTests.js index fcc0918e11..6f148e5a8d 100644 --- a/services/project-history/test/unit/js/UpdatesManager/UpdatesProcessorTests.js +++ b/services/project-history/test/unit/js/UpdatesManager/UpdatesProcessorTests.js @@ -6,14 +6,14 @@ import * as Errors from '../../../../app/js/Errors.js' const MODULE_PATH = '../../../../app/js/UpdatesProcessor.js' describe('UpdatesProcessor', function () { - beforeEach(async function () { + before(async function () { this.extendLock = sinon.stub() this.BlobManager = { createBlobsForUpdates: sinon.stub(), } this.HistoryStoreManager = { getMostRecentVersion: sinon.stub(), - sendChanges: sinon.stub().yields(null, { resyncNeeded: true }), + sendChanges: sinon.stub().yields(null, {}), } this.LockManager = { runWithLock: sinon.spy((key, runner, callback) => @@ -22,7 +22,7 @@ describe('UpdatesProcessor', function () { } this.RedisManager = {} this.UpdateCompressor = { - compressRawUpdatesWithMetricsCb: sinon.stub(), + compressRawUpdates: sinon.stub(), } this.UpdateTranslator = { convertToChanges: sinon.stub(), @@ -299,10 +299,7 @@ describe('UpdatesProcessor', function () { null, this.expandedUpdates ) - this.UpdateCompressor.compressRawUpdatesWithMetricsCb.yields( - null, - this.compressedUpdates - ) + this.UpdateCompressor.compressRawUpdates.returns(this.compressedUpdates) this.BlobManager.createBlobsForUpdates.callsArgWith( 4, null, @@ -318,8 +315,8 @@ describe('UpdatesProcessor', function () { this.ol_project_id, this.rawUpdates, this.extendLock, - (err, flushResponse) => { - this.callback(err, flushResponse) + err => { + this.callback(err) done() } ) @@ -350,7 +347,7 @@ describe('UpdatesProcessor', function () { }) it('should compress updates', function () { - this.UpdateCompressor.compressRawUpdatesWithMetricsCb.should.have.been.calledWith( + this.UpdateCompressor.compressRawUpdates.should.have.been.calledWith( this.expandedUpdates ) }) @@ -385,74 +382,8 @@ describe('UpdatesProcessor', function () { ) }) - it('should call the callback with no error and flush response', function () { - this.callback.should.have.been.calledWith(null, { resyncNeeded: true }) - }) - }) - - describe('no updates', function () { - beforeEach(function (done) { - this.SyncManager.skipUpdatesDuringSync.yields( - null, - [], - this.newSyncState - ) - this.UpdatesProcessor._processUpdates( - this.project_id, - this.ol_project_id, - this.rawUpdates, - this.extendLock, - (err, flushResponse) => { - this.callback(err, flushResponse) - done() - } - ) - }) - - it('should not get the latest version id', function () { - this.HistoryStoreManager.getMostRecentVersion.should.not.have.been.calledWith( - this.project_id, - this.ol_project_id - ) - }) - - it('should skip updates when resyncing', function () { - this.SyncManager.skipUpdatesDuringSync.should.have.been.calledWith( - this.project_id, - this.rawUpdates - ) - }) - - it('should not expand sync updates', function () { - this.SyncManager.expandSyncUpdates.should.not.have.been.called - }) - - it('should not compress updates', function () { - this.UpdateCompressor.compressRawUpdatesWithMetricsCb.should.not.have - .been.called - }) - - it('should not create any blobs for the updates', function () { - this.BlobManager.createBlobsForUpdates.should.not.have.been.called - }) - - it('should not convert the updates into a change requests', function () { - this.UpdateTranslator.convertToChanges.should.not.have.been.called - }) - - it('should not send the change request to the history store', function () { - this.HistoryStoreManager.sendChanges.should.not.have.been.called - }) - - it('should set the sync state', function () { - this.SyncManager.setResyncState.should.have.been.calledWith( - this.project_id, - this.newSyncState - ) - }) - - it('should call the callback with fake flush response', function () { - this.callback.should.have.been.calledWith(null, { resyncNeeded: false }) + it('should call the callback with no error', function () { + this.callback.should.have.been.called }) }) @@ -481,7 +412,7 @@ describe('UpdatesProcessor', function () { }) describe('_skipAlreadyAppliedUpdates', function () { - beforeEach(function () { + before(function () { this.UpdateTranslator.isProjectStructureUpdate.callsFake( update => update.version != null ) @@ -489,7 +420,7 @@ describe('UpdatesProcessor', function () { }) describe('with all doc ops in order', function () { - beforeEach(function () { + before(function () { this.updates = [ { doc: 'id', v: 1 }, { doc: 'id', v: 2 }, @@ -509,7 +440,7 @@ describe('UpdatesProcessor', function () { }) describe('with all project ops in order', function () { - beforeEach(function () { + before(function () { this.updates = [ { version: 1 }, { version: 2 }, @@ -529,7 +460,7 @@ describe('UpdatesProcessor', function () { }) describe('with all multiple doc and ops in order', function () { - beforeEach(function () { + before(function () { this.updates = [ { doc: 'id1', v: 1 }, { doc: 'id1', v: 2 }, @@ -557,47 +488,64 @@ describe('UpdatesProcessor', function () { }) describe('with doc ops out of order', function () { - beforeEach(function () { + before(function () { this.updates = [ { doc: 'id', v: 1 }, { doc: 'id', v: 2 }, { doc: 'id', v: 4 }, { doc: 'id', v: 3 }, ] + this.skipFn = sinon.spy( + this.UpdatesProcessor._mocks, + '_skipAlreadyAppliedUpdates' + ) + try { + this.updatesToApply = + this.UpdatesProcessor._skipAlreadyAppliedUpdates( + this.project_id, + this.updates, + { docs: {} } + ) + } catch (error) {} + }) + + after(function () { + this.skipFn.restore() }) it('should throw an exception', function () { - expect(() => { - this.UpdatesProcessor._skipAlreadyAppliedUpdates( - this.project_id, - this.updates, - { docs: {} } - ) - }).to.throw(Errors.OpsOutOfOrderError) + this.skipFn.threw('OpsOutOfOrderError').should.equal(true) }) }) describe('with project ops out of order', function () { - beforeEach(function () { - this.UpdateTranslator.isProjectStructureUpdate.callsFake( - update => update.version != null - ) + before(function () { this.updates = [ { version: 1 }, { version: 2 }, { version: 4 }, { version: 3 }, ] + this.skipFn = sinon.spy( + this.UpdatesProcessor._mocks, + '_skipAlreadyAppliedUpdates' + ) + try { + this.updatesToApply = + this.UpdatesProcessor._skipAlreadyAppliedUpdates( + this.project_id, + this.updates, + { docs: {} } + ) + } catch (error) {} + }) + + after(function () { + this.skipFn.restore() }) it('should throw an exception', function () { - expect(() => { - this.UpdatesProcessor._skipAlreadyAppliedUpdates( - this.project_id, - this.updates, - { docs: {} } - ) - }).to.throw(Errors.OpsOutOfOrderError) + this.skipFn.threw('OpsOutOfOrderError').should.equal(true) }) }) }) diff --git a/services/real-time/docker-compose.ci.yml b/services/real-time/docker-compose.ci.yml index a5a2292e72..9011627c06 100644 --- a/services/real-time/docker-compose.ci.yml +++ b/services/real-time/docker-compose.ci.yml @@ -43,7 +43,7 @@ services: command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs . user: root redis: - image: redis:7.4.3 + image: redis healthcheck: test: ping="$$(redis-cli ping)" && [ "$$ping" = 'PONG' ] interval: 1s diff --git a/services/real-time/docker-compose.yml b/services/real-time/docker-compose.yml index f1041164bc..9333271dcf 100644 --- a/services/real-time/docker-compose.yml +++ b/services/real-time/docker-compose.yml @@ -46,7 +46,7 @@ services: command: npm run --silent test:acceptance redis: - image: redis:7.4.3 + image: redis healthcheck: test: ping=$$(redis-cli ping) && [ "$$ping" = 'PONG' ] interval: 1s diff --git a/services/web/.eslintrc.js b/services/web/.eslintrc.js index ef3cf11de5..2fa9e8f547 100644 --- a/services/web/.eslintrc.js +++ b/services/web/.eslintrc.js @@ -383,18 +383,6 @@ module.exports = { 'Modify location via customLocalStorage instead of calling window.localStorage methods directly', }, ], - 'no-unused-vars': 'off', - '@typescript-eslint/no-unused-vars': [ - 'error', - { - args: 'after-used', - argsIgnorePattern: '^_', - ignoreRestSiblings: false, - caughtErrors: 'none', - vars: 'all', - varsIgnorePattern: '^_', - }, - ], }, }, { diff --git a/services/web/.prettierignore b/services/web/.prettierignore index 94ab5579c2..f4be187b87 100644 --- a/services/web/.prettierignore +++ b/services/web/.prettierignore @@ -6,7 +6,6 @@ frontend/js/vendor modules/**/frontend/js/vendor public/js public/minjs -frontend/stylesheets/bootstrap-5/modules/metrics/nvd3.scss frontend/stylesheets/components/nvd3.less frontend/js/features/source-editor/lezer-latex/latex.mjs frontend/js/features/source-editor/lezer-latex/latex.terms.mjs diff --git a/services/web/.storybook/preview.tsx b/services/web/.storybook/preview.tsx index 320caac144..e3838a6f97 100644 --- a/services/web/.storybook/preview.tsx +++ b/services/web/.storybook/preview.tsx @@ -122,12 +122,6 @@ const preview: Preview = { // render stories in iframes, to isolate modals inlineStories: false, }, - options: { - storySort: { - method: 'alphabetical', - order: ['Shared'], - }, - }, }, globalTypes: { theme: { diff --git a/services/web/Makefile b/services/web/Makefile index 6ebbc357c6..58323058b8 100644 --- a/services/web/Makefile +++ b/services/web/Makefile @@ -83,11 +83,6 @@ test_unit_app: $(DOCKER_COMPOSE) run --name unit_test_$(BUILD_DIR_NAME) --rm test_unit $(DOCKER_COMPOSE) down -v -t 0 -test_unit_mocha: export COMPOSE_PROJECT_NAME=unit_test_mocha_$(BUILD_DIR_NAME) -test_unit_mocha: - $(DOCKER_COMPOSE) run --rm test_unit npm run test:unit:mocha - $(DOCKER_COMPOSE) down -v -t 0 - test_unit_esm: export COMPOSE_PROJECT_NAME=unit_test_esm_$(BUILD_DIR_NAME) test_unit_esm: $(DOCKER_COMPOSE) run --rm test_unit npm run test:unit:esm diff --git a/services/web/app/src/Features/Authorization/AuthorizationManager.js b/services/web/app/src/Features/Authorization/AuthorizationManager.js index 22d92ea9d9..2f339de83d 100644 --- a/services/web/app/src/Features/Authorization/AuthorizationManager.js +++ b/services/web/app/src/Features/Authorization/AuthorizationManager.js @@ -88,54 +88,9 @@ async function getPrivilegeLevelForProject( opts = {} ) { if (userId) { - return await getPrivilegeLevelForProjectWithUser( - userId, - projectId, - null, - opts - ) + return getPrivilegeLevelForProjectWithUser(userId, projectId, opts) } else { - return await getPrivilegeLevelForProjectWithoutUser(projectId, token, opts) - } -} - -/** - * Get the privilege level that the user has for the project. - * - * @param userId - The id of the user that wants to access the project. - * @param projectId - The id of the project to be accessed. - * @param {string} token - * @param {ProjectAccess} projectAccess - * @param {Object} opts - * @param {boolean} opts.ignoreSiteAdmin - Do not consider whether the user is - * a site admin. - * @param {boolean} opts.ignorePublicAccess - Do not consider the project is - * publicly accessible. - * - * @returns {string|boolean} The privilege level. One of "owner", - * "readAndWrite", "readOnly" or false. - */ -async function getPrivilegeLevelForProjectWithProjectAccess( - userId, - projectId, - token, - projectAccess, - opts = {} -) { - if (userId) { - return await getPrivilegeLevelForProjectWithUser( - userId, - projectId, - projectAccess, - opts - ) - } else { - return await _getPrivilegeLevelForProjectWithoutUserWithPublicAccessLevel( - projectId, - token, - projectAccess.publicAccessLevel(), - opts - ) + return getPrivilegeLevelForProjectWithoutUser(projectId, token, opts) } } @@ -143,7 +98,6 @@ async function getPrivilegeLevelForProjectWithProjectAccess( async function getPrivilegeLevelForProjectWithUser( userId, projectId, - projectAccess, opts = {} ) { if (!opts.ignoreSiteAdmin) { @@ -152,11 +106,11 @@ async function getPrivilegeLevelForProjectWithUser( } } - projectAccess = - projectAccess || - (await CollaboratorsGetter.promises.getProjectAccess(projectId)) - - const privilegeLevel = projectAccess.privilegeLevelForUser(userId) + const privilegeLevel = + await CollaboratorsGetter.promises.getMemberIdPrivilegeLevel( + userId, + projectId + ) if (privilegeLevel && privilegeLevel !== PrivilegeLevels.NONE) { // The user has direct access return privilegeLevel @@ -165,7 +119,7 @@ async function getPrivilegeLevelForProjectWithUser( if (!opts.ignorePublicAccess) { // Legacy public-access system // User is present (not anonymous), but does not have direct access - const publicAccessLevel = projectAccess.publicAccessLevel() + const publicAccessLevel = await getPublicAccessLevel(projectId) if (publicAccessLevel === PublicAccessLevels.READ_ONLY) { return PrivilegeLevels.READ_ONLY } @@ -183,21 +137,7 @@ async function getPrivilegeLevelForProjectWithoutUser( token, opts = {} ) { - return await _getPrivilegeLevelForProjectWithoutUserWithPublicAccessLevel( - projectId, - token, - await getPublicAccessLevel(projectId), - opts - ) -} - -// User is Anonymous, Try Token-based access -async function _getPrivilegeLevelForProjectWithoutUserWithPublicAccessLevel( - projectId, - token, - publicAccessLevel, - opts = {} -) { + const publicAccessLevel = await getPublicAccessLevel(projectId) if (!opts.ignorePublicAccess) { if (publicAccessLevel === PublicAccessLevels.READ_ONLY) { // Legacy public read-only access for anonymous user @@ -209,7 +149,7 @@ async function _getPrivilegeLevelForProjectWithoutUserWithPublicAccessLevel( } } if (publicAccessLevel === PublicAccessLevels.TOKEN_BASED) { - return await getPrivilegeLevelForProjectWithToken(projectId, token) + return getPrivilegeLevelForProjectWithToken(projectId, token) } // Deny anonymous user access @@ -369,7 +309,6 @@ module.exports = { canUserRenameProject, canUserAdminProject, getPrivilegeLevelForProject, - getPrivilegeLevelForProjectWithProjectAccess, isRestrictedUserForProject, isUserSiteAdmin, }, diff --git a/services/web/app/src/Features/Chat/ChatManager.js b/services/web/app/src/Features/Chat/ChatManager.js index 7eab6039d8..9625881dd8 100644 --- a/services/web/app/src/Features/Chat/ChatManager.js +++ b/services/web/app/src/Features/Chat/ChatManager.js @@ -1,46 +1,61 @@ +const async = require('async') +const UserInfoManager = require('../User/UserInfoManager') const UserInfoController = require('../User/UserInfoController') -const UserGetter = require('../User/UserGetter') -const { callbackify } = require('@overleaf/promise-utils') +const { promisify } = require('@overleaf/promise-utils') -async function injectUserInfoIntoThreads(threads) { - const userIds = new Set() - for (const thread of Object.values(threads)) { +function injectUserInfoIntoThreads(threads, callback) { + // There will be a lot of repitition of user_ids, so first build a list + // of unique ones to perform db look ups on, then use these to populate the + // user fields + let message, thread, threadId, userId + if (callback == null) { + callback = function () {} + } + const userIds = {} + for (threadId in threads) { + thread = threads[threadId] if (thread.resolved) { - userIds.add(thread.resolved_by_user_id) + userIds[thread.resolved_by_user_id] = true } - for (const message of thread.messages) { - userIds.add(message.user_id) + for (message of Array.from(thread.messages)) { + userIds[message.user_id] = true } } - const projection = { - _id: true, - first_name: true, - last_name: true, - email: true, + const jobs = [] + const users = {} + for (userId in userIds) { + ;(userId => + jobs.push(cb => + UserInfoManager.getPersonalInfo(userId, function (error, user) { + if (error != null) return cb(error) + user = UserInfoController.formatPersonalInfo(user) + users[userId] = user + cb() + }) + ))(userId) } - const users = await UserGetter.promises.getUsers(userIds, projection) - const usersById = new Map() - for (const user of users) { - usersById.set( - user._id.toString(), - UserInfoController.formatPersonalInfo(user) - ) - } - for (const thread of Object.values(threads)) { - if (thread.resolved) { - thread.resolved_by_user = usersById.get(thread.resolved_by_user_id) + + return async.series(jobs, function (error) { + if (error != null) { + return callback(error) } - for (const message of thread.messages) { - message.user = usersById.get(message.user_id) + for (threadId in threads) { + thread = threads[threadId] + if (thread.resolved) { + thread.resolved_by_user = users[thread.resolved_by_user_id] + } + for (message of Array.from(thread.messages)) { + message.user = users[message.user_id] + } } - } - return threads + return callback(null, threads) + }) } module.exports = { - injectUserInfoIntoThreads: callbackify(injectUserInfoIntoThreads), + injectUserInfoIntoThreads, promises: { - injectUserInfoIntoThreads, + injectUserInfoIntoThreads: promisify(injectUserInfoIntoThreads), }, } diff --git a/services/web/app/src/Features/Collaborators/CollaboratorsGetter.js b/services/web/app/src/Features/Collaborators/CollaboratorsGetter.js index a3543ae614..caa6ef159d 100644 --- a/services/web/app/src/Features/Collaborators/CollaboratorsGetter.js +++ b/services/web/app/src/Features/Collaborators/CollaboratorsGetter.js @@ -1,4 +1,3 @@ -// @ts-check const { callbackify } = require('util') const pLimit = require('p-limit') const { ObjectId } = require('mongodb-legacy') @@ -16,6 +15,9 @@ module.exports = { getMemberIdsWithPrivilegeLevels: callbackify(getMemberIdsWithPrivilegeLevels), getMemberIds: callbackify(getMemberIds), getInvitedMemberIds: callbackify(getInvitedMemberIds), + getInvitedMembersWithPrivilegeLevels: callbackify( + getInvitedMembersWithPrivilegeLevels + ), getInvitedMembersWithPrivilegeLevelsFromFields: callbackify( getInvitedMembersWithPrivilegeLevelsFromFields ), @@ -29,10 +31,10 @@ module.exports = { userIsTokenMember: callbackify(userIsTokenMember), getAllInvitedMembers: callbackify(getAllInvitedMembers), promises: { - getProjectAccess, getMemberIdsWithPrivilegeLevels, getMemberIds, getInvitedMemberIds, + getInvitedMembersWithPrivilegeLevels, getInvitedMembersWithPrivilegeLevelsFromFields, getMemberIdPrivilegeLevel, getInvitedEditCollaboratorCount, @@ -48,202 +50,7 @@ module.exports = { }, } -/** - * @typedef ProjectMember - * @property {string} id - * @property {typeof PrivilegeLevels[keyof PrivilegeLevels]} privilegeLevel - * @property {typeof Sources[keyof Sources]} source - * @property {boolean} [pendingEditor] - * @property {boolean} [pendingReviewer] - */ - -/** - * @typedef LoadedProjectMember - * @property {typeof PrivilegeLevels[keyof PrivilegeLevels]} privilegeLevel - * @property {{_id: ObjectId, email: string, features: any, first_name: string, last_name: string, signUpDate: Date}} user - * @property {boolean} [pendingEditor] - * @property {boolean} [pendingReviewer] - */ - -// Wrapper for determining multiple dimensions of project access. -class ProjectAccess { - /** @type {ProjectMember[]} */ - #members - - /** @type {typeof PublicAccessLevels[keyof PublicAccessLevels]} */ - #publicAccessLevel - - /** - * @param {{ owner_ref: ObjectId; collaberator_refs: ObjectId[]; readOnly_refs: ObjectId[]; tokenAccessReadAndWrite_refs: ObjectId[]; tokenAccessReadOnly_refs: ObjectId[]; publicAccesLevel: typeof PublicAccessLevels[keyof PublicAccessLevels]; pendingEditor_refs: ObjectId[]; reviewer_refs: ObjectId[]; pendingReviewer_refs: ObjectId[]; }} project - */ - constructor(project) { - this.#members = _getMemberIdsWithPrivilegeLevelsFromFields( - project.owner_ref, - project.collaberator_refs, - project.readOnly_refs, - project.tokenAccessReadAndWrite_refs, - project.tokenAccessReadOnly_refs, - project.publicAccesLevel, - project.pendingEditor_refs, - project.reviewer_refs, - project.pendingReviewer_refs - ) - this.#publicAccessLevel = project.publicAccesLevel - } - - /** - * @return {Promise<{ownerMember: LoadedProjectMember|undefined, members: LoadedProjectMember[]}>} - */ - async loadOwnerAndInvitedMembers() { - const all = await _loadMembers( - this.#members.filter(m => m.source !== Sources.TOKEN) - ) - return { - ownerMember: all.find(m => m.privilegeLevel === PrivilegeLevels.OWNER), - members: all.filter(m => m.privilegeLevel !== PrivilegeLevels.OWNER), - } - } - - /** - * @return {Promise} - */ - async loadInvitedMembers() { - return _loadMembers( - this.#members.filter( - m => - m.source !== Sources.TOKEN && - m.privilegeLevel !== PrivilegeLevels.OWNER - ) - ) - } - - /** - * @return {Promise} - */ - async loadOwner() { - const [owner] = await _loadMembers( - this.#members.filter(m => m.privilegeLevel === PrivilegeLevels.OWNER) - ) - return owner - } - - /** - * @return {ProjectMember[]} - */ - allMembers() { - return this.#members - } - - /** - * @return {typeof PublicAccessLevels[keyof PublicAccessLevels]} - */ - publicAccessLevel() { - return this.#publicAccessLevel - } - - /** - * @return {string[]} - */ - memberIds() { - return this.#members.map(m => m.id) - } - - /** - * @return {string[]} - */ - invitedMemberIds() { - return this.#members.filter(m => m.source !== Sources.TOKEN).map(m => m.id) - } - - /** - * @param {string | ObjectId} userId - * @return {typeof PrivilegeLevels[keyof PrivilegeLevels]} - */ - privilegeLevelForUser(userId) { - if (!userId) return PrivilegeLevels.NONE - for (const member of this.#members) { - if (member.id === userId.toString()) { - return member.privilegeLevel - } - } - return PrivilegeLevels.NONE - } - - /** - * @param {string | ObjectId} userId - * @return {boolean} - */ - isUserTokenMember(userId) { - if (!userId) return false - for (const member of this.#members) { - if (member.id === userId.toString() && member.source === Sources.TOKEN) { - return true - } - } - return false - } - - /** - * @param {string | ObjectId} userId - * @return {boolean} - */ - isUserInvitedMember(userId) { - if (!userId) return false - for (const member of this.#members) { - if (member.id === userId.toString() && member.source !== Sources.TOKEN) { - return true - } - } - return false - } - - /** - * @param {string | ObjectId} userId - * @return {boolean} - */ - isUserInvitedReadWriteMember(userId) { - for (const member of this.#members) { - if ( - member.id.toString() === userId.toString() && - member.source !== Sources.TOKEN && - member.privilegeLevel === PrivilegeLevels.READ_AND_WRITE - ) { - return true - } - } - return false - } - - /** - * Counts invited members with editor or reviewer roles - * @return {number} - */ - countInvitedEditCollaborators() { - return this.#members.filter( - m => - m.source === Sources.INVITE && - (m.privilegeLevel === PrivilegeLevels.READ_AND_WRITE || - m.privilegeLevel === PrivilegeLevels.REVIEW) - ).length - } - - /** - * Counts invited members that are readonly pending editors or pending reviewers - * @return {number} - */ - countInvitedPendingEditors() { - return this.#members.filter( - m => - m.source === Sources.INVITE && - m.privilegeLevel === PrivilegeLevels.READ_ONLY && - (m.pendingEditor || m.pendingReviewer) - ).length - } -} - -module.exports.ProjectAccess = ProjectAccess - -async function getProjectAccess(projectId) { +async function getMemberIdsWithPrivilegeLevels(projectId) { const project = await ProjectGetter.promises.getProject(projectId, { owner_ref: 1, collaberator_refs: 1, @@ -258,19 +65,34 @@ async function getProjectAccess(projectId) { if (!project) { throw new Errors.NotFoundError(`no project found with id ${projectId}`) } - return new ProjectAccess(project) -} - -async function getMemberIdsWithPrivilegeLevels(projectId) { - return (await getProjectAccess(projectId)).allMembers() + const memberIds = _getMemberIdsWithPrivilegeLevelsFromFields( + project.owner_ref, + project.collaberator_refs, + project.readOnly_refs, + project.tokenAccessReadAndWrite_refs, + project.tokenAccessReadOnly_refs, + project.publicAccesLevel, + project.pendingEditor_refs, + project.reviewer_refs, + project.pendingReviewer_refs + ) + return memberIds } async function getMemberIds(projectId) { - return (await getProjectAccess(projectId)).memberIds() + const members = await getMemberIdsWithPrivilegeLevels(projectId) + return members.map(m => m.id) } async function getInvitedMemberIds(projectId) { - return (await getProjectAccess(projectId)).invitedMemberIds() + const members = await getMemberIdsWithPrivilegeLevels(projectId) + return members.filter(m => m.source !== Sources.TOKEN).map(m => m.id) +} + +async function getInvitedMembersWithPrivilegeLevels(projectId) { + let members = await getMemberIdsWithPrivilegeLevels(projectId) + members = members.filter(m => m.source !== Sources.TOKEN) + return _loadMembers(members) } async function getInvitedMembersWithPrivilegeLevelsFromFields( @@ -285,7 +107,7 @@ async function getInvitedMembersWithPrivilegeLevelsFromFields( readOnlyIds, [], [], - 'private', + null, [], reviewerIds, [] @@ -299,31 +121,69 @@ async function getMemberIdPrivilegeLevel(userId, projectId) { if (userId == null) { return PrivilegeLevels.NONE } - return (await getProjectAccess(projectId)).privilegeLevelForUser(userId) + const members = await getMemberIdsWithPrivilegeLevels(projectId) + for (const member of members) { + if (member.id === userId.toString()) { + return member.privilegeLevel + } + } + return PrivilegeLevels.NONE } async function getInvitedEditCollaboratorCount(projectId) { - return (await getProjectAccess(projectId)).countInvitedEditCollaborators() + // Counts invited members with editor or reviewer roles + const members = await getMemberIdsWithPrivilegeLevels(projectId) + return members.filter( + m => + m.source === Sources.INVITE && + (m.privilegeLevel === PrivilegeLevels.READ_AND_WRITE || + m.privilegeLevel === PrivilegeLevels.REVIEW) + ).length } async function getInvitedPendingEditorCount(projectId) { - return (await getProjectAccess(projectId)).countInvitedPendingEditors() + // Only counts invited members that are readonly pending editors or pending + // reviewers + const members = await getMemberIdsWithPrivilegeLevels(projectId) + return members.filter( + m => + m.source === Sources.INVITE && + m.privilegeLevel === PrivilegeLevels.READ_ONLY && + (m.pendingEditor || m.pendingReviewer) + ).length } async function isUserInvitedMemberOfProject(userId, projectId) { if (!userId) { return false } - return (await getProjectAccess(projectId)).isUserInvitedMember(userId) + const members = await getMemberIdsWithPrivilegeLevels(projectId) + for (const member of members) { + if ( + member.id.toString() === userId.toString() && + member.source !== Sources.TOKEN + ) { + return true + } + } + return false } async function isUserInvitedReadWriteMemberOfProject(userId, projectId) { if (!userId) { return false } - return (await getProjectAccess(projectId)).isUserInvitedReadWriteMember( - userId - ) + const members = await getMemberIdsWithPrivilegeLevels(projectId) + for (const member of members) { + if ( + member.id.toString() === userId.toString() && + member.source !== Sources.TOKEN && + member.privilegeLevel === PrivilegeLevels.READ_AND_WRITE + ) { + return true + } + } + return false } async function getPublicShareTokens(userId, projectId) { @@ -349,13 +209,10 @@ async function getPublicShareTokens(userId, projectId) { return null } - // @ts-ignore if (memberInfo.isOwner) { return memberInfo.tokens - // @ts-ignore } else if (memberInfo.hasTokenReadOnlyAccess) { return { - // @ts-ignore readOnly: memberInfo.tokens.readOnly, } } else { @@ -367,7 +224,6 @@ async function getPublicShareTokens(userId, projectId) { // excluding projects where the user is listed in the token access fields when // token access has been disabled. async function getProjectsUserIsMemberOf(userId, fields) { - // @ts-ignore const limit = pLimit(2) const [readAndWrite, review, readOnly, tokenReadAndWrite, tokenReadOnly] = await Promise.all([ @@ -418,9 +274,10 @@ async function dangerouslyGetAllProjectsUserIsMemberOf(userId, fields) { async function getAllInvitedMembers(projectId) { try { - const projectAccess = await getProjectAccess(projectId) - const invitedMembers = await projectAccess.loadInvitedMembers() - return invitedMembers.map(ProjectEditorHandler.buildUserModelView) + const rawMembers = await getInvitedMembersWithPrivilegeLevels(projectId) + const { members } = + ProjectEditorHandler.buildOwnerAndMembersViews(rawMembers) + return members } catch (err) { throw OError.tag(err, 'error getting members for project', { projectId }) } @@ -459,19 +316,6 @@ async function userIsReadWriteTokenMember(userId, projectId) { return project != null } -/** - * @param {ObjectId} ownerId - * @param {ObjectId[]} collaboratorIds - * @param {ObjectId[]} readOnlyIds - * @param {ObjectId[]} tokenAccessIds - * @param {ObjectId[]} tokenAccessReadOnlyIds - * @param {typeof PublicAccessLevels[keyof PublicAccessLevels]} publicAccessLevel - * @param {ObjectId[]} pendingEditorIds - * @param {ObjectId[]} reviewerIds - * @param {ObjectId[]} pendingReviewerIds - * @return {ProjectMember[]} - * @private - */ function _getMemberIdsWithPrivilegeLevelsFromFields( ownerId, collaboratorIds, @@ -540,13 +384,7 @@ function _getMemberIdsWithPrivilegeLevelsFromFields( return members } -/** - * @param {ProjectMember[]} members - * @return {Promise} - * @private - */ async function _loadMembers(members) { - if (members.length === 0) return [] const userIds = Array.from(new Set(members.map(m => m.id))) const users = new Map() for (const user of await UserGetter.promises.getUsers(userIds, { diff --git a/services/web/app/src/Features/Collaborators/CollaboratorsHandler.js b/services/web/app/src/Features/Collaborators/CollaboratorsHandler.js index 8b5b1bc3c2..96b4cd6e37 100644 --- a/services/web/app/src/Features/Collaborators/CollaboratorsHandler.js +++ b/services/web/app/src/Features/Collaborators/CollaboratorsHandler.js @@ -161,7 +161,6 @@ async function addUserIdToProject( }) let level let existingUsers = project.collaberator_refs || [] - existingUsers = existingUsers.concat(project.reviewer_refs || []) existingUsers = existingUsers.concat(project.readOnly_refs || []) existingUsers = existingUsers.map(u => u.toString()) if (existingUsers.includes(userId.toString())) { diff --git a/services/web/app/src/Features/Collaborators/CollaboratorsInviteController.mjs b/services/web/app/src/Features/Collaborators/CollaboratorsInviteController.mjs index db853afac3..4c2d911709 100644 --- a/services/web/app/src/Features/Collaborators/CollaboratorsInviteController.mjs +++ b/services/web/app/src/Features/Collaborators/CollaboratorsInviteController.mjs @@ -16,6 +16,7 @@ import ProjectAuditLogHandler from '../Project/ProjectAuditLogHandler.js' import Errors from '../Errors/Errors.js' import AuthenticationController from '../Authentication/AuthenticationController.js' import PrivilegeLevels from '../Authorization/PrivilegeLevels.js' +import SplitTestHandler from '../SplitTests/SplitTestHandler.js' // This rate limiter allows a different number of requests depending on the // number of callaborators a user is allowed. This is implemented by providing @@ -245,6 +246,9 @@ async function viewInvite(req, res) { const projectId = req.params.Project_id const { token } = req.params + // Read split test assignment so that it's available for Pug to read + await SplitTestHandler.promises.getAssignment(req, res, 'core-pug-bs5') + const _renderInvalidPage = function () { res.status(404) logger.debug({ projectId }, 'invite not valid, rendering not-valid page') diff --git a/services/web/app/src/Features/Docstore/DocstoreManager.js b/services/web/app/src/Features/Docstore/DocstoreManager.js index 4074b90605..5fe0f27dc9 100644 --- a/services/web/app/src/Features/Docstore/DocstoreManager.js +++ b/services/web/app/src/Features/Docstore/DocstoreManager.js @@ -1,11 +1,10 @@ const { promisify } = require('util') -const { promisifyMultiResult, callbackify } = require('@overleaf/promise-utils') +const { promisifyMultiResult } = require('@overleaf/promise-utils') const request = require('request').defaults({ jar: false }) const OError = require('@overleaf/o-error') const logger = require('@overleaf/logger') const settings = require('@overleaf/settings') const Errors = require('../Errors/Errors') -const { fetchJson } = require('@overleaf/fetch-utils') const TIMEOUT = 30 * 1000 // request timeout @@ -87,22 +86,6 @@ function getAllDeletedDocs(projectId, callback) { }) } -/** - * @param {string} projectId - */ -async function getCommentThreadIds(projectId) { - const url = `${settings.apis.docstore.url}/project/${projectId}/comment-thread-ids` - return fetchJson(url, { signal: AbortSignal.timeout(TIMEOUT) }) -} - -/** - * @param {string} projectId - */ -async function getTrackedChangesUserIds(projectId) { - const url = `${settings.apis.docstore.url}/project/${projectId}/tracked-changes-user-ids` - return fetchJson(url, { signal: AbortSignal.timeout(TIMEOUT) }) -} - /** * @param {string} projectId * @param {Callback} callback @@ -309,8 +292,6 @@ module.exports = { getAllDeletedDocs, getAllRanges, getDoc, - getCommentThreadIds: callbackify(getCommentThreadIds), - getTrackedChangesUserIds: callbackify(getTrackedChangesUserIds), isDocDeleted, updateDoc, projectHasRanges, @@ -323,8 +304,6 @@ module.exports = { getAllDeletedDocs: promisify(getAllDeletedDocs), getAllRanges: promisify(getAllRanges), getDoc: promisifyMultiResult(getDoc, ['lines', 'rev', 'version', 'ranges']), - getCommentThreadIds, - getTrackedChangesUserIds, isDocDeleted: promisify(isDocDeleted), updateDoc: promisifyMultiResult(updateDoc, ['modified', 'rev']), projectHasRanges: promisify(projectHasRanges), diff --git a/services/web/app/src/Features/Editor/EditorHttpController.js b/services/web/app/src/Features/Editor/EditorHttpController.js index f44b57f069..8128a95b26 100644 --- a/services/web/app/src/Features/Editor/EditorHttpController.js +++ b/services/web/app/src/Features/Editor/EditorHttpController.js @@ -4,13 +4,14 @@ const ProjectGetter = require('../Project/ProjectGetter') const AuthorizationManager = require('../Authorization/AuthorizationManager') const ProjectEditorHandler = require('../Project/ProjectEditorHandler') const Metrics = require('@overleaf/metrics') +const CollaboratorsGetter = require('../Collaborators/CollaboratorsGetter') const CollaboratorsInviteGetter = require('../Collaborators/CollaboratorsInviteGetter') +const CollaboratorsHandler = require('../Collaborators/CollaboratorsHandler') const PrivilegeLevels = require('../Authorization/PrivilegeLevels') const SessionManager = require('../Authentication/SessionManager') const Errors = require('../Errors/Errors') const { expressify } = require('@overleaf/promise-utils') const Settings = require('@overleaf/settings') -const { ProjectAccess } = require('../Collaborators/CollaboratorsGetter') module.exports = { joinProject: expressify(joinProject), @@ -42,6 +43,12 @@ async function joinProject(req, res, next) { if (!project) { return res.sendStatus(403) } + // Hide sensitive data if the user is restricted + if (isRestrictedUser) { + project.owner = { _id: project.owner._id } + project.members = [] + project.invites = [] + } // Only show the 'renamed or deleted' message once if (project.deletedByExternalDataSource) { await ProjectDeleter.promises.unmarkAsDeletedByExternalSource(projectId) @@ -68,43 +75,42 @@ async function _buildJoinProjectView(req, projectId, userId) { if (project == null) { throw new Errors.NotFoundError('project not found') } - const projectAccess = new ProjectAccess(project) + const members = + await CollaboratorsGetter.promises.getInvitedMembersWithPrivilegeLevels( + projectId + ) const token = req.body.anonymousAccessToken const privilegeLevel = - await AuthorizationManager.promises.getPrivilegeLevelForProjectWithProjectAccess( + await AuthorizationManager.promises.getPrivilegeLevelForProject( userId, projectId, - token, - projectAccess + token ) if (privilegeLevel == null || privilegeLevel === PrivilegeLevels.NONE) { return { project: null, privilegeLevel: null, isRestrictedUser: false } } - const isTokenMember = projectAccess.isUserTokenMember(userId) - const isInvitedMember = projectAccess.isUserInvitedMember(userId) + const invites = + await CollaboratorsInviteGetter.promises.getAllInvites(projectId) + const isTokenMember = await CollaboratorsHandler.promises.userIsTokenMember( + userId, + projectId + ) + const isInvitedMember = + await CollaboratorsGetter.promises.isUserInvitedMemberOfProject( + userId, + projectId + ) const isRestrictedUser = AuthorizationManager.isRestrictedUser( userId, privilegeLevel, isTokenMember, isInvitedMember ) - let ownerMember - let members = [] - let invites = [] - if (isRestrictedUser) { - ownerMember = await projectAccess.loadOwner() - } else { - ;({ ownerMember, members } = - await projectAccess.loadOwnerAndInvitedMembers()) - invites = await CollaboratorsInviteGetter.promises.getAllInvites(projectId) - } return { project: ProjectEditorHandler.buildProjectModelView( project, - ownerMember, members, - invites, - isRestrictedUser + invites ), privilegeLevel, isTokenMember, diff --git a/services/web/app/src/Features/Email/EmailBuilder.js b/services/web/app/src/Features/Email/EmailBuilder.js index 4741838b15..01565201ac 100644 --- a/services/web/app/src/Features/Email/EmailBuilder.js +++ b/services/web/app/src/Features/Email/EmailBuilder.js @@ -949,33 +949,6 @@ templates.welcomeWithoutCTA = NoCTAEmailTemplate({ }, }) -templates.removeGroupMember = NoCTAEmailTemplate({ - subject(opts) { - return `Your ${settings.appName} account has been removed from ${opts.adminName}’s group` - }, - title(opts) { - return `Your ${settings.appName} account has been removed from ${opts.adminName}’s group` - }, - greeting() { - return '' - }, - message() { - const passwordResetUrl = `${settings.siteUrl}/user/password/reset` - - return [ - 'Don’t worry, your account and projects are still accessible. But there are a few changes to be aware of:', - '
    ' + - `
  • Your account will have reverted to a free ${settings.appName} plan.
  • `, - `
  • Any project collaborators have been set to read-only (you can invite one collaborator per project on the free plan).
  • `, - `
  • If you previously logged in via SSO, you’ll need to set a password to access your account.
  • ` + - '
', - `If you think this has been done in error, please contact your group admin.`, - `Thanks!`, - `Team ${settings.appName}`, - ] - }, -}) - function _formatUserNameAndEmail(user, placeholder) { if (user.first_name && user.last_name) { const fullName = `${user.first_name} ${user.last_name}` diff --git a/services/web/app/src/Features/History/RestoreManager.js b/services/web/app/src/Features/History/RestoreManager.js index 16ef2024f6..8c73695eed 100644 --- a/services/web/app/src/Features/History/RestoreManager.js +++ b/services/web/app/src/Features/History/RestoreManager.js @@ -18,12 +18,6 @@ const OError = require('@overleaf/o-error') const ProjectGetter = require('../Project/ProjectGetter') const ProjectEntityHandler = require('../Project/ProjectEntityHandler') -async function getCommentThreadIds(projectId) { - await DocumentUpdaterHandler.promises.flushProjectToMongo(projectId) - const raw = await DocstoreManager.promises.getCommentThreadIds(projectId) - return new Map(Object.entries(raw).map(([doc, ids]) => [doc, new Set(ids)])) -} - const RestoreManager = { async restoreFileFromV2(userId, projectId, version, pathname) { const fsPath = await RestoreManager._writeFileVersionToDisk( @@ -58,25 +52,6 @@ const RestoreManager = { }, async revertFile(userId, projectId, version, pathname, options = {}) { - const threadIds = await getCommentThreadIds(projectId) - return await RestoreManager._revertSingleFile( - userId, - projectId, - version, - pathname, - threadIds, - options - ) - }, - - async _revertSingleFile( - userId, - projectId, - version, - pathname, - threadIds, - options = {} - ) { const project = await ProjectGetter.promises.getProject(projectId, { overleaf: true, }) @@ -140,7 +115,6 @@ const RestoreManager = { origin, userId ) - threadIds.delete(file.element._id.toString()) } const { metadata } = await RestoreManager._getMetadataFromHistory( @@ -180,12 +154,22 @@ const RestoreManager = { const documentCommentIds = new Set( ranges.comments?.map(({ op: { t } }) => t) ) - const commentIdsToDuplicate = Array.from(documentCommentIds).filter(id => { - for (const ids of threadIds.values()) { - if (ids.has(id)) return true + + await DocumentUpdaterHandler.promises.flushProjectToMongo(projectId) + + const docsWithRanges = + await DocstoreManager.promises.getAllRanges(projectId) + + const nonOrphanedThreadIds = new Set() + for (const { ranges } of docsWithRanges) { + for (const comment of ranges.comments ?? []) { + nonOrphanedThreadIds.add(comment.op.t) } - return false - }) + } + + const commentIdsToDuplicate = Array.from(documentCommentIds).filter(id => + nonOrphanedThreadIds.has(id) + ) const newRanges = { changes: ranges.changes, comments: [] } @@ -207,7 +191,6 @@ const RestoreManager = { continue } // We have a new id for this comment thread - comment.id = result.duplicateId comment.op.t = result.duplicateId } newRanges.comments.push(comment) @@ -248,6 +231,8 @@ const RestoreManager = { delete threadData.resolved_by_user_id delete threadData.resolved_at } + // remove the resolved property from the comment range as the chat service is synced at this point + delete commentRange.op.resolved } await ChatManager.promises.injectUserInfoIntoThreads(newCommentThreadData) @@ -274,11 +259,6 @@ const RestoreManager = { origin, userId ) - // For revertProject: The next doc that gets reverted will need to duplicate all the threads seen here. - threadIds.set( - _id.toString(), - new Set(newRanges.comments.map(({ op: { t } }) => t)) - ) return { _id, @@ -341,17 +321,11 @@ const RestoreManager = { version, timestamp: new Date(updateAtVersion.meta.end_ts).toISOString(), } - const threadIds = await getCommentThreadIds(projectId) for (const pathname of pathsAtPastVersion) { - await RestoreManager._revertSingleFile( - userId, - projectId, - version, - pathname, - threadIds, - { origin } - ) + await RestoreManager.revertFile(userId, projectId, version, pathname, { + origin, + }) } const entitiesAtLiveVersion = diff --git a/services/web/app/src/Features/Notifications/NotificationsController.mjs b/services/web/app/src/Features/Notifications/NotificationsController.mjs index 35b5f0a677..ae1d9208f3 100644 --- a/services/web/app/src/Features/Notifications/NotificationsController.mjs +++ b/services/web/app/src/Features/Notifications/NotificationsController.mjs @@ -33,26 +33,4 @@ export default { res.sendStatus(200) ) }, - - getNotification(req, res, next) { - const userId = SessionManager.getLoggedInUserId(req.session) - const { notificationId } = req.params - NotificationsHandler.getUserNotifications( - userId, - function (err, unreadNotifications) { - if (err) { - return next(err) - } - const notification = unreadNotifications.find( - n => n._id === notificationId - ) - - if (!notification) { - return res.status(404).end() - } - - res.json(notification) - } - ) - }, } diff --git a/services/web/app/src/Features/PasswordReset/PasswordResetController.mjs b/services/web/app/src/Features/PasswordReset/PasswordResetController.mjs index 771782c302..419a36ecf2 100644 --- a/services/web/app/src/Features/PasswordReset/PasswordResetController.mjs +++ b/services/web/app/src/Features/PasswordReset/PasswordResetController.mjs @@ -119,11 +119,7 @@ async function requestReset(req, res, next) { OError.tag(err, 'failed to generate and email password reset token', { email, }) - - if ( - err.message === - 'user does not have one or more permissions within change-password' - ) { + if (err.message === 'user does not have one or more permissions within change-password') { return res.status(403).json({ message: { key: 'no-password-allowed-due-to-sso', diff --git a/services/web/app/src/Features/Project/ProjectController.js b/services/web/app/src/Features/Project/ProjectController.js index 4ffd8bcc1b..79aaef9334 100644 --- a/services/web/app/src/Features/Project/ProjectController.js +++ b/services/web/app/src/Features/Project/ProjectController.js @@ -14,7 +14,6 @@ const ProjectHelper = require('./ProjectHelper') const metrics = require('@overleaf/metrics') const { User } = require('../../models/User') const SubscriptionLocator = require('../Subscription/SubscriptionLocator') -const { isPaidSubscription } = require('../Subscription/SubscriptionHelper') const LimitationsManager = require('../Subscription/LimitationsManager') const Settings = require('@overleaf/settings') const AuthorizationManager = require('../Authorization/AuthorizationManager') @@ -353,7 +352,6 @@ const _ProjectController = { 'overleaf-assist-bundle', 'word-count-client', 'editor-popup-ux-survey', - 'new-editor-error-logs-redesign', ].filter(Boolean) const getUserValues = async userId => @@ -656,12 +654,17 @@ const _ProjectController = { } } - const hasPaidSubscription = isPaidSubscription(subscription) + const hasNonRecurlySubscription = + subscription && !subscription.recurlySubscription_id const hasManuallyCollectedSubscription = subscription?.collectionMethod === 'manual' + const canPurchaseAddons = !( + hasNonRecurlySubscription || hasManuallyCollectedSubscription + ) const assistantDisabled = user.aiErrorAssistant?.enabled === false // the assistant has been manually disabled by the user const canUseErrorAssistant = - !hasManuallyCollectedSubscription && !assistantDisabled + (user.features?.aiErrorAssistant || canPurchaseAddons) && + !assistantDisabled let featureUsage = {} @@ -728,11 +731,12 @@ const _ProjectController = { ? 'project/ide-react-detached' : 'project/ide-react' - const capabilities = [...req.capabilitySet] - - // make sure the capability is added to CE/SP when the feature is enabled - if (!Features.hasFeature('saas') && Features.hasFeature('chat')) { - capabilities.push('chat') + let chatEnabled + if (Features.hasFeature('saas')) { + chatEnabled = + Features.hasFeature('chat') && req.capabilitySet.has('chat') + } else { + chatEnabled = Features.hasFeature('chat') } const isOverleafAssistBundleEnabled = @@ -764,12 +768,6 @@ const _ProjectController = { isOverleafAssistBundleEnabled && (await ProjectController._getAddonPrices(req, res)) - const reducedTimeoutWarning = - await SplitTestHandler.promises.getAssignmentForUser( - project.owner_ref, - '10s-timeout-warning' - ) - let planCode = subscription?.planCode if (!planCode && !userInNonIndividualSub) { planCode = 'personal' @@ -793,7 +791,7 @@ const _ProjectController = { referal_id: user.referal_id, signUpDate: user.signUpDate, allowedFreeTrial, - hasPaidSubscription, + hasRecurlySubscription: subscription?.recurlySubscription_id != null, featureSwitches: user.featureSwitches, features: fullFeatureSet, featureUsage, @@ -826,7 +824,6 @@ const _ProjectController = { lineHeight: user.ace.lineHeight || 'normal', overallTheme: user.ace.overallTheme, mathPreview: user.ace.mathPreview, - breadcrumbs: user.ace.breadcrumbs, referencesSearchMode: user.ace.referencesSearchMode, enableNewEditor: user.ace.enableNewEditor ?? true, }, @@ -840,7 +837,7 @@ const _ProjectController = { isTokenMember, isInvitedMember ), - capabilities, + chatEnabled, projectHistoryBlobsEnabled: Features.hasFeature( 'project-history-blobs' ), @@ -884,10 +881,6 @@ const _ProjectController = { paywallPlans, customerIoEnabled, addonPrices, - compileSettings: { - reducedTimeoutWarning: reducedTimeoutWarning?.variant, - compileTimeout: ownerFeatures?.compileTimeout, - }, }) timer.done() } catch (err) { diff --git a/services/web/app/src/Features/Project/ProjectDeleter.js b/services/web/app/src/Features/Project/ProjectDeleter.js index b81281e319..e5764bab86 100644 --- a/services/web/app/src/Features/Project/ProjectDeleter.js +++ b/services/web/app/src/Features/Project/ProjectDeleter.js @@ -106,24 +106,8 @@ async function expireDeletedProjectsAfterDuration() { deletedProject => deletedProject.deleterData.deletedProjectId ) ) - logger.info( - { projectCount: projectIds.length }, - 'expiring batch of deleted projects' - ) - try { - for (const projectId of projectIds) { - await expireDeletedProject(projectId) - } - logger.info( - { projectCount: projectIds.length }, - 'batch of deleted projects expired successfully' - ) - } catch (error) { - logger.warn( - { error }, - 'something went wrong expiring batch of deleted projects' - ) - throw error + for (const projectId of projectIds) { + await expireDeletedProject(projectId) } } @@ -292,15 +276,12 @@ async function deleteProject(projectId, options = {}) { ) await Project.deleteOne({ _id: projectId }).exec() - - logger.info( - { projectId, userId: project.owner_ref }, - 'successfully deleted project' - ) } catch (err) { logger.warn({ err }, 'problem deleting project') throw err } + + logger.debug({ projectId }, 'successfully deleted project') } async function undeleteProject(projectId, options = {}) { @@ -354,22 +335,17 @@ async function undeleteProject(projectId, options = {}) { async function expireDeletedProject(projectId) { try { - logger.info({ projectId }, 'expiring deleted project') const activeProject = await Project.findById(projectId).exec() if (activeProject) { // That project is active. The deleted project record might be there // because of an incomplete delete or undelete operation. Clean it up and // return. - logger.info( - { projectId }, - 'deleted project record found but project is active' - ) await DeletedProject.deleteOne({ 'deleterData.deletedProjectId': projectId, }) + await ProjectAuditLogEntry.deleteMany({ projectId }) return } - const deletedProject = await DeletedProject.findOne({ 'deleterData.deletedProjectId': projectId, }).exec() @@ -385,14 +361,12 @@ async function expireDeletedProject(projectId) { ) return } - const userId = deletedProject.deletedProjectOwnerId + const historyId = deletedProject.project.overleaf && deletedProject.project.overleaf.history && deletedProject.project.overleaf.history.id - logger.info({ projectId, userId }, 'destroying expired project data') - await Promise.all([ DocstoreManager.promises.destroyProject(deletedProject.project._id), HistoryManager.promises.deleteProject( @@ -405,10 +379,6 @@ async function expireDeletedProject(projectId) { Modules.promises.hooks.fire('projectExpired', deletedProject.project._id), ]) - logger.info( - { projectId, userId }, - 'redacting PII from the deleted project record' - ) await DeletedProject.updateOne( { _id: deletedProject._id, @@ -420,7 +390,6 @@ async function expireDeletedProject(projectId) { }, } ).exec() - logger.info({ projectId, userId }, 'expired deleted project successfully') } catch (error) { logger.warn({ projectId, error }, 'error expiring deleted project') throw error diff --git a/services/web/app/src/Features/Project/ProjectEditorHandler.js b/services/web/app/src/Features/Project/ProjectEditorHandler.js index f01f9afe12..40fd787e71 100644 --- a/services/web/app/src/Features/Project/ProjectEditorHandler.js +++ b/services/web/app/src/Features/Project/ProjectEditorHandler.js @@ -6,13 +6,8 @@ const Features = require('../../infrastructure/Features') module.exports = ProjectEditorHandler = { trackChangesAvailable: true, - buildProjectModelView( - project, - ownerMember, - members, - invites, - isRestrictedUser - ) { + buildProjectModelView(project, members, invites) { + let owner, ownerFeatures const result = { _id: project._id, name: project.name, @@ -25,23 +20,17 @@ module.exports = ProjectEditorHandler = { description: project.description, spellCheckLanguage: project.spellCheckLanguage, deletedByExternalDataSource: project.deletedByExternalDataSource || false, - imageName: - project.imageName != null - ? Path.basename(project.imageName) - : undefined, + members: [], + invites: this.buildInvitesView(invites), + imageName: project.imageName, } - if (isRestrictedUser) { - result.owner = { _id: project.owner_ref } - result.members = [] - result.invites = [] - } else { - result.owner = this.buildUserModelView(ownerMember) - result.members = members.map(this.buildUserModelView) - result.invites = this.buildInvitesView(invites) - } + ;({ owner, ownerFeatures, members } = + this.buildOwnerAndMembersViews(members)) + result.owner = owner + result.members = members - result.features = _.defaults(ownerMember?.user?.features || {}, { + result.features = _.defaults(ownerFeatures || {}, { collaborators: -1, // Infinite versioning: false, dropbox: false, @@ -70,6 +59,25 @@ module.exports = ProjectEditorHandler = { return result }, + buildOwnerAndMembersViews(members) { + let owner = null + let ownerFeatures = null + const filteredMembers = [] + for (const member of members || []) { + if (member.privilegeLevel === 'owner') { + ownerFeatures = member.user.features + owner = this.buildUserModelView(member) + } else { + filteredMembers.push(this.buildUserModelView(member)) + } + } + return { + owner, + ownerFeatures, + members: filteredMembers, + } + }, + buildUserModelView(member) { const user = member.user return { diff --git a/services/web/app/src/Features/Project/ProjectListController.mjs b/services/web/app/src/Features/Project/ProjectListController.mjs index ab2b0e3082..c62396e153 100644 --- a/services/web/app/src/Features/Project/ProjectListController.mjs +++ b/services/web/app/src/Features/Project/ProjectListController.mjs @@ -26,7 +26,6 @@ import GeoIpLookup from '../../infrastructure/GeoIpLookup.js' import SplitTestHandler from '../SplitTests/SplitTestHandler.js' import SplitTestSessionHandler from '../SplitTests/SplitTestSessionHandler.js' import TutorialHandler from '../Tutorial/TutorialHandler.js' -import SubscriptionHelper from '../Subscription/SubscriptionHelper.js' /** * @import { GetProjectsRequest, GetProjectsResponse, AllUsersProjects, MongoProject } from "./types" @@ -389,13 +388,13 @@ async function projectListPage(req, res, next) { } } - let hasIndividualPaidSubscription = false + let hasIndividualRecurlySubscription = false try { - hasIndividualPaidSubscription = - SubscriptionHelper.isIndividualActivePaidSubscription( - usersIndividualSubscription - ) + hasIndividualRecurlySubscription = + usersIndividualSubscription?.groupPlan === false && + usersIndividualSubscription?.recurlyStatus?.state !== 'canceled' && + usersIndividualSubscription?.recurlySubscription_id !== '' } catch (error) { logger.error({ err: error }, 'Failed to get individual subscription') } @@ -409,15 +408,6 @@ async function projectListPage(req, res, next) { 'papers-notification-banner' ) - const customerIoEnabled = - await SplitTestHandler.promises.hasUserBeenAssignedToVariant( - req, - userId, - 'customer-io-trial-conversion', - 'enabled', - true - ) - res.render('project/list-react', { title: 'your_projects', usersBestSubscription, @@ -447,9 +437,8 @@ async function projectListPage(req, res, next) { groupId: subscription._id, groupName: subscription.teamName, })), - hasIndividualPaidSubscription, + hasIndividualRecurlySubscription, userRestrictions: Array.from(req.userRestrictions || []), - customerIoEnabled, }) } diff --git a/services/web/app/src/Features/Subscription/Errors.js b/services/web/app/src/Features/Subscription/Errors.js index 9ebb08c6db..cbcd0014f7 100644 --- a/services/web/app/src/Features/Subscription/Errors.js +++ b/services/web/app/src/Features/Subscription/Errors.js @@ -26,17 +26,10 @@ class SubtotalLimitExceededError extends OError {} class HasPastDueInvoiceError extends OError {} -class PaymentActionRequiredError extends OError { - constructor(info) { - super('Payment action required', info) - } -} - module.exports = { RecurlyTransactionError, DuplicateAddOnError, AddOnNotPresentError, - PaymentActionRequiredError, MissingBillingInfoError, ManuallyCollectedError, PendingChangeError, diff --git a/services/web/app/src/Features/Subscription/FeaturesUpdater.js b/services/web/app/src/Features/Subscription/FeaturesUpdater.js index 16413c501c..a8c27f705f 100644 --- a/services/web/app/src/Features/Subscription/FeaturesUpdater.js +++ b/services/web/app/src/Features/Subscription/FeaturesUpdater.js @@ -3,7 +3,6 @@ const { callbackify } = require('util') const { callbackifyMultiResult } = require('@overleaf/promise-utils') const PlansLocator = require('./PlansLocator') const SubscriptionLocator = require('./SubscriptionLocator') -const SubscriptionHelper = require('./SubscriptionHelper') const UserFeaturesUpdater = require('./UserFeaturesUpdater') const FeaturesHelper = require('./FeaturesHelper') const Settings = require('@overleaf/settings') @@ -118,10 +117,7 @@ async function computeFeatures(userId) { async function _getIndividualFeatures(userId) { const subscription = await SubscriptionLocator.promises.getUsersSubscription(userId) - if ( - subscription == null || - SubscriptionHelper.getPaidSubscriptionState(subscription) === 'paused' - ) { + if (subscription == null || subscription?.recurlyStatus?.state === 'paused') { return {} } diff --git a/services/web/app/src/Features/Subscription/PaymentProviderEntities.js b/services/web/app/src/Features/Subscription/PaymentProviderEntities.js index 21bd504caf..6fe8638389 100644 --- a/services/web/app/src/Features/Subscription/PaymentProviderEntities.js +++ b/services/web/app/src/Features/Subscription/PaymentProviderEntities.js @@ -8,13 +8,11 @@ const OError = require('@overleaf/o-error') const { DuplicateAddOnError, AddOnNotPresentError } = require('./Errors') const PlansLocator = require('./PlansLocator') +const SubscriptionHelper = require('./SubscriptionHelper') -let SubscriptionHelper = null // Work around circular import (loaded at the bottom of the file) - -const MEMBERS_LIMIT_ADD_ON_CODE = 'additional-license' -const AI_ASSIST_STANDALONE_MONTHLY_PLAN_CODE = 'assistant' -const AI_ASSIST_STANDALONE_ANNUAL_PLAN_CODE = 'assistant-annual' const AI_ADD_ON_CODE = 'assistant' +const MEMBERS_LIMIT_ADD_ON_CODE = 'additional-license' +const STANDALONE_AI_ADD_ON_CODES = ['assistant', 'assistant-annual'] class PaymentProviderSubscription { /** @@ -134,11 +132,9 @@ class PaymentProviderSubscription { if (newPlan == null) { throw new OError('Unable to find plan in settings', { planCode }) } - const isInTrial = SubscriptionHelper.isInTrial(this.trialPeriodEnd) const shouldChangeAtTermEnd = SubscriptionHelper.shouldPlanChangeAtTermEnd( currentPlan, - newPlan, - isInTrial + newPlan ) const changeRequest = new PaymentProviderSubscriptionChangeRequest({ @@ -252,10 +248,9 @@ class PaymentProviderSubscription { const addOnUpdates = this.addOns .filter(addOn => addOn.code !== code) .map(addOn => addOn.toAddOnUpdate()) - const isInTrial = SubscriptionHelper.isInTrial(this.trialPeriodEnd) return new PaymentProviderSubscriptionChangeRequest({ subscription: this, - timeframe: isInTrial ? 'now' : 'term_end', + timeframe: 'term_end', addOnUpdates, }) } @@ -592,10 +587,7 @@ class PaymentProviderAccount { * @param {string} planCode */ function isStandaloneAiAddOnPlanCode(planCode) { - return ( - planCode === AI_ASSIST_STANDALONE_MONTHLY_PLAN_CODE || - planCode === AI_ASSIST_STANDALONE_ANNUAL_PLAN_CODE - ) + return STANDALONE_AI_ADD_ON_CODES.includes(planCode) } /** @@ -626,8 +618,7 @@ function subscriptionChangeIsAiAssistUpgrade(subscriptionChange) { module.exports = { AI_ADD_ON_CODE, MEMBERS_LIMIT_ADD_ON_CODE, - AI_ASSIST_STANDALONE_MONTHLY_PLAN_CODE, - AI_ASSIST_STANDALONE_ANNUAL_PLAN_CODE, + STANDALONE_AI_ADD_ON_CODES, PaymentProviderSubscription, PaymentProviderSubscriptionAddOn, PaymentProviderSubscriptionChange, @@ -645,5 +636,3 @@ module.exports = { subscriptionChangeIsAiAssistUpgrade, PaymentProviderImmediateCharge, } - -SubscriptionHelper = require('./SubscriptionHelper') diff --git a/services/web/app/src/Features/Subscription/PlansLocator.js b/services/web/app/src/Features/Subscription/PlansLocator.js index 67d2f31c52..24343e1109 100644 --- a/services/web/app/src/Features/Subscription/PlansLocator.js +++ b/services/web/app/src/Features/Subscription/PlansLocator.js @@ -1,15 +1,10 @@ -// @ts-check - +// TODO: This file may be deleted when Stripe is fully implemented to all users, so, consider deleting it const Settings = require('@overleaf/settings') const logger = require('@overleaf/logger') /** * @typedef {import('../../../../types/subscription/plan').RecurlyPlanCode} RecurlyPlanCode * @typedef {import('../../../../types/subscription/plan').StripeLookupKey} StripeLookupKey - * @typedef {import('../../../../types/subscription/plan').StripeBaseLookupKey} StripeBaseLookupKey - * @typedef {import('../../../../types/subscription/plan').Plan} Plan - * @typedef {import('../../../../types/subscription/currency').StripeCurrencyCode} StripeCurrencyCode - * @typedef {import('stripe').Stripe.Price.Recurring.Interval} BillingCycleInterval */ function ensurePlansAreSetupCorrectly() { @@ -29,105 +24,61 @@ function ensurePlansAreSetupCorrectly() { }) } -/** - * @type {Record} - */ -const recurlyCodeToStripeBaseLookupKey = { - collaborator: 'standard_monthly', - 'collaborator-annual': 'standard_annual', - collaborator_free_trial_7_days: 'standard_monthly', - - professional: 'professional_monthly', +const recurlyPlanCodeToStripeLookupKey = { 'professional-annual': 'professional_annual', + professional: 'professional_monthly', professional_free_trial_7_days: 'professional_monthly', - - student: 'student_monthly', + 'collaborator-annual': 'standard_annual', + collaborator: 'standard_monthly', + collaborator_free_trial_7_days: 'standard_monthly', 'student-annual': 'student_annual', + student: 'student_monthly', student_free_trial_7_days: 'student_monthly', - - // TODO: change all group plans' lookup_keys to match the UK account after they have been added - group_collaborator: 'group_standard_enterprise', - group_collaborator_educational: 'group_standard_educational', group_professional: 'group_professional_enterprise', group_professional_educational: 'group_professional_educational', - - assistant: 'assistant_monthly', - 'assistant-annual': 'assistant_annual', + group_collaborator: 'group_standard_enterprise', + group_collaborator_educational: 'group_standard_educational', + assistant_annual: 'error_assist_annual', + assistant: 'error_assist_monthly', } -const LATEST_STRIPE_LOOKUP_KEY_VERSION = 'jun2025' - /** - * Build the Stripe lookup key, will be in this format: - * `${productCode}_${billingInterval}_${latestVersion}_${currency}` - * (for example: 'assistant_annual_jun2025_clp') * - * @param {RecurlyPlanCode} recurlyCode - * @param {StripeCurrencyCode} currency - * @param {BillingCycleInterval} [billingCycleInterval] -- needed for handling 'assistant' add-on - * @returns {StripeLookupKey|null} + * @param {RecurlyPlanCode} recurlyPlanCode + * @returns {StripeLookupKey} */ -function buildStripeLookupKey(recurlyCode, currency, billingCycleInterval) { - let stripeBaseLookupKey = recurlyCodeToStripeBaseLookupKey[recurlyCode] - - // Recurly always uses 'assistant' as the code regardless of the subscription duration - if (recurlyCode === 'assistant' && billingCycleInterval) { - if (billingCycleInterval === 'month') { - stripeBaseLookupKey = 'assistant_monthly' - } - if (billingCycleInterval === 'year') { - stripeBaseLookupKey = 'assistant_annual' - } - } - - if (stripeBaseLookupKey == null) { - return null - } - - return `${stripeBaseLookupKey}_${LATEST_STRIPE_LOOKUP_KEY_VERSION}_${currency}` +function mapRecurlyPlanCodeToStripeLookupKey(recurlyPlanCode) { + return recurlyPlanCodeToStripeLookupKey[recurlyPlanCode] } -/** - * @typedef {{ planType: 'individual' | 'group' | 'student' | null, period: 'annual' | 'monthly' }} PlanTypeAndPeriod - * @type {Record} - */ const recurlyPlanCodeToPlanTypeAndPeriod = { collaborator: { planType: 'individual', period: 'monthly' }, - 'collaborator-annual': { planType: 'individual', period: 'annual' }, collaborator_free_trial_7_days: { planType: 'individual', period: 'monthly' }, - + 'collaborator-annual': { planType: 'individual', period: 'annual' }, professional: { planType: 'individual', period: 'monthly' }, - 'professional-annual': { planType: 'individual', period: 'annual' }, professional_free_trial_7_days: { planType: 'individual', period: 'monthly', }, - + 'professional-annual': { planType: 'individual', period: 'annual' }, student: { planType: 'student', period: 'monthly' }, - 'student-annual': { planType: 'student', period: 'annual' }, student_free_trial_7_days: { planType: 'student', period: 'monthly' }, - - group_collaborator: { planType: 'group', period: 'annual' }, - group_collaborator_educational: { planType: 'group', period: 'annual' }, + 'student-annual': { planType: 'student', period: 'annual' }, group_professional: { planType: 'group', period: 'annual' }, group_professional_educational: { planType: 'group', period: 'annual' }, - - assistant: { planType: null, period: 'monthly' }, - 'assistant-annual': { planType: null, period: 'annual' }, + group_collaborator: { planType: 'group', period: 'annual' }, + group_collaborator_educational: { planType: 'group', period: 'annual' }, } /** + * * @param {RecurlyPlanCode} recurlyPlanCode - * @returns {PlanTypeAndPeriod} + * @returns {{ planType: 'individual' | 'group' | 'student', period: 'annual' | 'monthly'}} */ function getPlanTypeAndPeriodFromRecurlyPlanCode(recurlyPlanCode) { return recurlyPlanCodeToPlanTypeAndPeriod[recurlyPlanCode] } -/** - * @param {string|null} [planCode] - * @returns {Plan|null} - */ function findLocalPlanInSettings(planCode) { for (const plan of Settings.plans) { if (plan.planCode === planCode) { @@ -140,6 +91,6 @@ function findLocalPlanInSettings(planCode) { module.exports = { ensurePlansAreSetupCorrectly, findLocalPlanInSettings, - buildStripeLookupKey, + mapRecurlyPlanCodeToStripeLookupKey, getPlanTypeAndPeriodFromRecurlyPlanCode, } diff --git a/services/web/app/src/Features/Subscription/RecurlyClient.js b/services/web/app/src/Features/Subscription/RecurlyClient.js index 25332a9c34..753d49ba0f 100644 --- a/services/web/app/src/Features/Subscription/RecurlyClient.js +++ b/services/web/app/src/Features/Subscription/RecurlyClient.js @@ -22,7 +22,6 @@ const { MissingBillingInfoError, SubtotalLimitExceededError, } = require('./Errors') -const RecurlyMetrics = require('./RecurlyMetrics') /** * @import { PaymentProviderSubscriptionChangeRequest } from './PaymentProviderEntities' @@ -30,28 +29,10 @@ const RecurlyMetrics = require('./RecurlyMetrics') * @import { PaymentMethod } from './types' */ -class RecurlyClientWithErrorHandling extends recurly.Client { - /** - * @param {import('recurly/lib/recurly/Http').Response} response - * @return {Error | null} - * @private - */ - _errorFromResponse(response) { - RecurlyMetrics.recordMetrics( - response.status, - response.rateLimit, - response.rateLimitRemaining, - response.rateLimitReset.getTime() - ) - // @ts-ignore - return super._errorFromResponse(response) - } -} - const recurlySettings = Settings.apis.recurly const recurlyApiKey = recurlySettings ? recurlySettings.apiKey : undefined -const client = new RecurlyClientWithErrorHandling(recurlyApiKey) +const client = new recurly.Client(recurlyApiKey) /** * Get account for a given user @@ -736,21 +717,6 @@ async function failInvoice(invoiceId) { await client.markInvoiceFailed(invoiceId) } -async function terminateSubscriptionByUuid(subscriptionUuid) { - const subscription = await client.terminateSubscription( - 'uuid-' + subscriptionUuid, - { - body: { - refund: 'none', - }, - } - ) - - logger.debug({ subscriptionUuid }, 'subscription terminated') - - return subscription -} - module.exports = { errors: recurly.errors, @@ -774,7 +740,6 @@ module.exports = { resumeSubscriptionByUuid: callbackify(resumeSubscriptionByUuid), getPastDueInvoices: callbackify(getPastDueInvoices), failInvoice: callbackify(failInvoice), - terminateSubscriptionByUuid: callbackify(terminateSubscriptionByUuid), promises: { getSubscription, @@ -797,6 +762,5 @@ module.exports = { getPlan, getPastDueInvoices, failInvoice, - terminateSubscriptionByUuid, }, } diff --git a/services/web/app/src/Features/Subscription/RecurlyMetrics.js b/services/web/app/src/Features/Subscription/RecurlyMetrics.js deleted file mode 100644 index 1b709d7dc4..0000000000 --- a/services/web/app/src/Features/Subscription/RecurlyMetrics.js +++ /dev/null @@ -1,38 +0,0 @@ -const Metrics = require('@overleaf/metrics') - -/** - * @param {number} status - * @param {number} rateLimit - * @param {number} rateLimitRemaining - * @param {number} rateLimitReset - */ -function recordMetrics(status, rateLimit, rateLimitRemaining, rateLimitReset) { - Metrics.inc('recurly_request', 1, { status }) - const metrics = { rateLimit, rateLimitRemaining, rateLimitReset } - for (const [method, v] of Object.entries(metrics)) { - if (Number.isNaN(v)) continue - Metrics.gauge('recurly_request_rate_limiting', v, 1, { method }) - } -} - -/** - * @param {Response} response - */ -function recordMetricsFromResponse(response) { - const rateLimit = parseInt( - response.headers.get('X-RateLimit-Limit') || '', - 10 - ) - const rateLimitRemaining = parseInt( - response.headers.get('X-RateLimit-Remaining') || '', - 10 - ) - const rateLimitReset = - parseInt(response.headers.get('X-RateLimit-Reset') || '', 10) * 1000 - recordMetrics(response.status, rateLimit, rateLimitRemaining, rateLimitReset) -} - -module.exports = { - recordMetrics, - recordMetricsFromResponse, -} diff --git a/services/web/app/src/Features/Subscription/RecurlyWrapper.js b/services/web/app/src/Features/Subscription/RecurlyWrapper.js index 243da6edce..2227597737 100644 --- a/services/web/app/src/Features/Subscription/RecurlyWrapper.js +++ b/services/web/app/src/Features/Subscription/RecurlyWrapper.js @@ -9,30 +9,24 @@ const logger = require('@overleaf/logger') const Errors = require('../Errors/Errors') const SubscriptionErrors = require('./Errors') const { callbackify } = require('@overleaf/promise-utils') -const RecurlyMetrics = require('./RecurlyMetrics') /** - * Updates the email address of a Recurly account - * - * @param userId - * @param newAccountEmail - the new email address to set for the Recurly account + * @param accountId + * @param newEmail */ -async function updateAccountEmailAddress(userId, newAccountEmail) { +async function updateAccountEmailAddress(accountId, newEmail) { const data = { - email: newAccountEmail, + email: newEmail, } let requestBody try { requestBody = RecurlyWrapper._buildXml('account', data) } catch (error) { - throw OError.tag(error, 'error building xml', { - accountId: userId, - newEmail: newAccountEmail, - }) + throw OError.tag(error, 'error building xml', { accountId, newEmail }) } const { body } = await RecurlyWrapper.promises.apiRequest({ - url: `accounts/${userId}`, + url: `accounts/${accountId}`, method: 'PUT', body: requestBody, }) @@ -418,15 +412,9 @@ const promises = { } try { - const { body, response } = await fetchStringWithResponse( - fetchUrl, - fetchOptions - ) - RecurlyMetrics.recordMetricsFromResponse(response) - return { body, response } + return await fetchStringWithResponse(fetchUrl, fetchOptions) } catch (error) { if (error instanceof RequestFailedError) { - RecurlyMetrics.recordMetricsFromResponse(error.response) if (error.response.status === 404 && expect404) { return { response: error.response, body: null } } else if (error.response.status === 422 && expect422) { @@ -693,15 +681,12 @@ const promises = { } }, - async extendTrial(subscriptionId, trialEndsAt, daysUntilExpire) { + async extendTrial(subscriptionId, daysUntilExpire) { if (daysUntilExpire == null) { daysUntilExpire = 7 } - if (trialEndsAt == null) { - trialEndsAt = new Date() - } const nextRenewalDate = new Date() - nextRenewalDate.setDate(trialEndsAt.getDate() + daysUntilExpire) + nextRenewalDate.setDate(nextRenewalDate.getDate() + daysUntilExpire) logger.debug( { subscriptionId, daysUntilExpire }, 'Exending Free trial for user' diff --git a/services/web/app/src/Features/Subscription/SubscriptionController.js b/services/web/app/src/Features/Subscription/SubscriptionController.js index 5856682166..7aa345e7a8 100644 --- a/services/web/app/src/Features/Subscription/SubscriptionController.js +++ b/services/web/app/src/Features/Subscription/SubscriptionController.js @@ -2,7 +2,6 @@ const SessionManager = require('../Authentication/SessionManager') const SubscriptionHandler = require('./SubscriptionHandler') -const SubscriptionHelper = require('./SubscriptionHelper') const SubscriptionViewModelBuilder = require('./SubscriptionViewModelBuilder') const LimitationsManager = require('./LimitationsManager') const RecurlyWrapper = require('./RecurlyWrapper') @@ -16,11 +15,7 @@ const AnalyticsManager = require('../Analytics/AnalyticsManager') const RecurlyEventHandler = require('./RecurlyEventHandler') const { expressify } = require('@overleaf/promise-utils') const OError = require('@overleaf/o-error') -const { - DuplicateAddOnError, - AddOnNotPresentError, - PaymentActionRequiredError, -} = require('./Errors') +const { DuplicateAddOnError, AddOnNotPresentError } = require('./Errors') const SplitTestHandler = require('../SplitTests/SplitTestHandler') const AuthorizationManager = require('../Authorization/AuthorizationManager') const Modules = require('../../infrastructure/Modules') @@ -32,11 +27,6 @@ const PlansLocator = require('./PlansLocator') const PaymentProviderEntities = require('./PaymentProviderEntities') const { User } = require('../../models/User') const UserGetter = require('../User/UserGetter') -const PermissionsManager = require('../Authorization/PermissionsManager') -const { - sanitizeSessionUserForFrontEnd, -} = require('../../infrastructure/FrontEndUser') -const { IndeterminateInvoiceError } = require('../Errors/Errors') /** * @import { SubscriptionChangeDescription } from '../../../../types/subscription/subscription-change-preview' @@ -88,13 +78,9 @@ async function userSubscriptionPage(req, res) { await Modules.promises.hooks.fire('userCanExtendTrial', user) )?.[0] const fromPlansPage = req.query.hasSubscription - const isInTrial = SubscriptionHelper.isInTrial( - personalSubscription?.payment?.trialEndsAt - ) const plansData = SubscriptionViewModelBuilder.buildPlansListForSubscriptionDash( - personalSubscription?.plan, - isInTrial + personalSubscription?.plan ) AnalyticsManager.recordEventForSession(req.session, 'subscription-page-view') @@ -272,8 +258,7 @@ async function pauseSubscription(req, res, next) { { pause_length: pauseCycles, plan_code: subscription?.planCode, - subscriptionId: - SubscriptionHelper.getPaymentProviderSubscriptionId(subscription), + subscriptionId: subscription?.recurlySubscription_id, } ) @@ -326,9 +311,7 @@ function cancelSubscription(req, res, next) { async function canceledSubscription(req, res, next) { return res.render('subscriptions/canceled-subscription-react', { title: 'subscription_canceled', - user: sanitizeSessionUserForFrontEnd( - SessionManager.getSessionUser(req.session) - ), + user: SessionManager.getSessionUser(req.session), }) } @@ -347,8 +330,7 @@ function cancelV1Subscription(req, res, next) { } async function previewAddonPurchase(req, res) { - const user = SessionManager.getSessionUser(req.session) - const userId = user._id + const userId = SessionManager.getLoggedInUserId(req.session) const addOnCode = req.params.addOnCode const purchaseReferrer = req.query.purchaseReferrer @@ -356,16 +338,6 @@ async function previewAddonPurchase(req, res) { return HttpErrorHandler.notFound(req, res, `Unknown add-on: ${addOnCode}`) } - const canUseAi = await PermissionsManager.promises.checkUserPermissions( - user, - ['use-ai'] - ) - if (!canUseAi) { - return res.redirect( - '/user/subscription?redirect-reason=ai-assist-unavailable' - ) - } - /** @type {PaymentMethod[]} */ const paymentMethod = await Modules.promises.hooks.fire( 'getPaymentMethod', @@ -438,6 +410,8 @@ async function purchaseAddon(req, res, next) { logger.debug({ userId: user._id, addOnCode }, 'purchasing add-ons') try { + // set a restore point in the case of a failed payment for the upgrade (Recurly only) + await SubscriptionHandler.promises.setSubscriptionRestorePoint(user._id) await SubscriptionHandler.promises.purchaseAddon( user._id, addOnCode, @@ -451,11 +425,6 @@ async function purchaseAddon(req, res, next) { 'Your subscription already includes this add-on', { addon: addOnCode } ) - } else if (err instanceof PaymentActionRequiredError) { - return res.status(402).json({ - message: 'Payment action required', - clientSecret: err.info.clientSecret, - }) } else { if (err instanceof Error) { OError.tag(err, 'something went wrong purchasing add-ons', { @@ -557,18 +526,18 @@ function cancelPendingSubscriptionChange(req, res, next) { }) } -async function updateAccountEmailAddress(req, res, next) { +function updateAccountEmailAddress(req, res, next) { const user = SessionManager.getSessionUser(req.session) - try { - await Modules.promises.hooks.fire( - 'updateAccountEmailAddress', - user._id, - user.email - ) - return res.sendStatus(200) - } catch (error) { - return next(error) - } + RecurlyWrapper.updateAccountEmailAddress( + user._id, + user.email, + function (error) { + if (error) { + return next(error) + } + res.sendStatus(200) + } + ) } function reactivateSubscription(req, res, next) { @@ -627,13 +596,6 @@ function recurlyCallback(req, res, next) { eventData.transaction.subscription_id, lastSubscription, function (err) { - if (err instanceof IndeterminateInvoiceError) { - logger.warn( - { recurlySubscriptionId: err.info.recurlySubscriptionId }, - 'could not determine invoice to fail for subscription' - ) - return res.sendStatus(200) - } if (err) { return next(err) } @@ -733,7 +695,7 @@ async function getRecommendedCurrency(req, res) { ip = req.query.ip } const currencyLookup = await GeoIpLookup.promises.getCurrencyCode(ip) - const countryCode = currencyLookup.countryCode + let countryCode = currencyLookup.countryCode const recommendedCurrency = currencyLookup.currencyCode let currency = null @@ -744,6 +706,13 @@ async function getRecommendedCurrency(req, res) { currency = recommendedCurrency } + const queryCountryCode = req.query.countryCode?.toUpperCase() + + // only enable countryCode testing flag on staging or dev environments + if (queryCountryCode && process.env.NODE_ENV !== 'production') { + countryCode = queryCountryCode + } + return { currency, recommendedCurrency, @@ -843,7 +812,7 @@ function makeChangePreview( paymentMethod: paymentMethod?.toString(), netTerms: subscription.netTerms, nextPlan: { - annual: nextPlan?.annual ?? false, + annual: nextPlan.annual ?? false, }, nextInvoice: { date: subscription.periodEnd.toISOString(), @@ -881,7 +850,7 @@ module.exports = { cancelV1Subscription, previewSubscription: expressify(previewSubscription), cancelPendingSubscriptionChange, - updateAccountEmailAddress: expressify(updateAccountEmailAddress), + updateAccountEmailAddress, reactivateSubscription, recurlyCallback, extendTrial: expressify(extendTrial), diff --git a/services/web/app/src/Features/Subscription/SubscriptionGroupHandler.js b/services/web/app/src/Features/Subscription/SubscriptionGroupHandler.js index ba862baa67..c717b2eec6 100644 --- a/services/web/app/src/Features/Subscription/SubscriptionGroupHandler.js +++ b/services/web/app/src/Features/Subscription/SubscriptionGroupHandler.js @@ -4,7 +4,6 @@ const OError = require('@overleaf/o-error') const SubscriptionUpdater = require('./SubscriptionUpdater') const SubscriptionLocator = require('./SubscriptionLocator') const SubscriptionController = require('./SubscriptionController') -const SubscriptionHelper = require('./SubscriptionHelper') const { Subscription } = require('../../models/Subscription') const { User } = require('../../models/User') const RecurlyClient = require('./RecurlyClient') @@ -78,7 +77,7 @@ async function ensureFlexibleLicensingEnabled(plan) { } async function ensureSubscriptionIsActive(subscription) { - if (SubscriptionHelper.getPaidSubscriptionState(subscription) !== 'active') { + if (subscription?.recurlyStatus?.state !== 'active') { throw new InactiveError('The subscription is not active', { subscriptionId: subscription._id.toString(), }) diff --git a/services/web/app/src/Features/Subscription/SubscriptionHandler.js b/services/web/app/src/Features/Subscription/SubscriptionHandler.js index 104acd8783..1296a2a7de 100644 --- a/services/web/app/src/Features/Subscription/SubscriptionHandler.js +++ b/services/web/app/src/Features/Subscription/SubscriptionHandler.js @@ -1,21 +1,21 @@ // @ts-check +const recurly = require('recurly') const RecurlyWrapper = require('./RecurlyWrapper') const RecurlyClient = require('./RecurlyClient') const { User } = require('../../models/User') const logger = require('@overleaf/logger') -const SubscriptionHelper = require('./SubscriptionHelper') const SubscriptionUpdater = require('./SubscriptionUpdater') const SubscriptionLocator = require('./SubscriptionLocator') const LimitationsManager = require('./LimitationsManager') const EmailHandler = require('../Email/EmailHandler') const { callbackify } = require('@overleaf/promise-utils') const UserUpdater = require('../User/UserUpdater') -const { IndeterminateInvoiceError } = require('../Errors/Errors') +const { NotFoundError, IndeterminateInvoiceError } = require('../Errors/Errors') const Modules = require('../../infrastructure/Modules') /** - * @import { PaymentProviderSubscriptionChange } from './PaymentProviderEntities' + * @import { PaymentProviderSubscription, PaymentProviderSubscriptionChange } from './PaymentProviderEntities' */ async function validateNoSubscriptionInRecurly(userId) { @@ -102,7 +102,8 @@ async function updateSubscription(user, planCode) { if ( !hasSubscription || subscription == null || - SubscriptionHelper.getPaymentProviderSubscriptionId(subscription) == null + (subscription.recurlySubscription_id == null && + subscription.paymentProvider?.subscriptionId == null) ) { return } @@ -246,8 +247,11 @@ async function attemptPaypalInvoiceCollection(recurlyAccountCode) { ) } -async function extendTrial(subscription, daysToExtend) { - await Modules.promises.hooks.fire('extendTrial', subscription, daysToExtend) +async function extendTrial(subscription, daysToExend) { + await RecurlyWrapper.promises.extendTrial( + subscription.recurlySubscription_id, + daysToExend + ) } /** @@ -274,12 +278,24 @@ async function previewAddonPurchase(userId, addOnCode) { * @param {number} quantity */ async function purchaseAddon(userId, addOnCode, quantity) { - await Modules.promises.hooks.fire( - 'purchaseAddOn', - userId, + const subscription = await getSubscriptionForUser(userId) + try { + await RecurlyClient.promises.getAddOn(subscription.planCode, addOnCode) + } catch (err) { + if (err instanceof recurly.errors.NotFoundError) { + throw new NotFoundError({ + message: 'Add-on not found', + info: { addOnCode }, + }) + } + throw err + } + const changeRequest = subscription.getRequestForAddOnPurchase( addOnCode, quantity ) + await RecurlyClient.promises.applySubscriptionChangeRequest(changeRequest) + await syncSubscription({ uuid: subscription.id }, userId) } /** @@ -289,17 +305,51 @@ async function purchaseAddon(userId, addOnCode, quantity) { * @param {string} addOnCode */ async function removeAddon(userId, addOnCode) { - await Modules.promises.hooks.fire('removeAddOn', userId, addOnCode) + const subscription = await getSubscriptionForUser(userId) + const changeRequest = subscription.getRequestForAddOnRemoval(addOnCode) + await RecurlyClient.promises.applySubscriptionChangeRequest(changeRequest) + await syncSubscription({ uuid: subscription.id }, userId) +} + +/** + * Returns the Recurly UUID for the given user + * + * Throws a NotFoundError if the subscription can't be found + * + * @param {string} userId + * @return {Promise} + */ +async function getSubscriptionForUser(userId) { + const subscription = + await SubscriptionLocator.promises.getUsersSubscription(userId) + const recurlyId = subscription?.recurlySubscription_id + if (recurlyId == null) { + throw new NotFoundError({ + message: 'Recurly subscription not found', + info: { userId }, + }) + } + + try { + const subscription = await RecurlyClient.promises.getSubscription(recurlyId) + return subscription + } catch (err) { + if (err instanceof recurly.errors.NotFoundError) { + throw new NotFoundError({ + message: 'Subscription not found', + info: { userId, recurlyId }, + }) + } else { + throw err + } + } } async function pauseSubscription(user, pauseCycles) { // only allow pausing on monthly plans not in a trial const { subscription } = await LimitationsManager.promises.userHasSubscription(user) - if ( - !subscription || - !SubscriptionHelper.getPaidSubscriptionState(subscription) - ) { + if (!subscription || !subscription.recurlyStatus) { throw new Error('No active subscription to pause') } @@ -310,9 +360,10 @@ async function pauseSubscription(user, pauseCycles) { ) { throw new Error('Can only pause monthly individual plans') } - const trialEndsAt = - SubscriptionHelper.getSubscriptionTrialEndsAt(subscription) - if (trialEndsAt && trialEndsAt > new Date()) { + if ( + subscription.recurlyStatus.trialEndsAt && + subscription.recurlyStatus.trialEndsAt > new Date() + ) { throw new Error('Cannot pause a subscription in a trial') } if (subscription.addOns?.length) { @@ -328,10 +379,7 @@ async function pauseSubscription(user, pauseCycles) { async function resumeSubscription(user) { const { subscription } = await LimitationsManager.promises.userHasSubscription(user) - if ( - !subscription || - !SubscriptionHelper.getPaidSubscriptionState(subscription) - ) { + if (!subscription || !subscription.recurlyStatus) { throw new Error('No active subscription to resume') } await RecurlyClient.promises.resumeSubscriptionByUuid( @@ -384,7 +432,7 @@ async function revertPlanChange( throw new IndeterminateInvoiceError( 'cant determine invoice to fail for plan revert', { - recurlySubscriptionId, + info: { recurlySubscriptionId }, } ) } diff --git a/services/web/app/src/Features/Subscription/SubscriptionHelper.js b/services/web/app/src/Features/Subscription/SubscriptionHelper.js index 429432349d..efb8895280 100644 --- a/services/web/app/src/Features/Subscription/SubscriptionHelper.js +++ b/services/web/app/src/Features/Subscription/SubscriptionHelper.js @@ -1,25 +1,11 @@ const { formatCurrency } = require('../../util/currency') const GroupPlansData = require('./GroupPlansData') -const { isStandaloneAiAddOnPlanCode } = require('./PaymentProviderEntities') /** * If the user changes to a less expensive plan, we shouldn't apply the change immediately. * This is to avoid unintended/artifical credits on users Recurly accounts. */ -function shouldPlanChangeAtTermEnd(oldPlan, newPlan, isInTrial) { - if (isInTrial) { - // we should always upgrade or downgrade immediately if actively in trial - return false - } - - if ( - oldPlan.annual === newPlan.annual && - isStandaloneAiAddOnPlanCode(oldPlan.planCode) && - !isStandaloneAiAddOnPlanCode(newPlan.planCode) - ) { - // changing from an standalone AI add-on plan to a non-AI plan should not be considered a downgrade - return false - } +function shouldPlanChangeAtTermEnd(oldPlan, newPlan) { return oldPlan.price_in_cents > newPlan.price_in_cents } @@ -100,75 +86,7 @@ function generateInitialLocalizedGroupPrice(recommendedCurrency, locale) { } } -function isPaidSubscription(subscription) { - const hasRecurlySubscription = - subscription?.recurlySubscription_id && - subscription?.recurlySubscription_id !== '' - const hasStripeSubscription = - subscription?.paymentProvider?.subscriptionId && - subscription?.paymentProvider?.subscriptionId !== '' - return !!(subscription && (hasRecurlySubscription || hasStripeSubscription)) -} - -function isIndividualActivePaidSubscription(subscription) { - return ( - isPaidSubscription(subscription) && - subscription?.groupPlan === false && - subscription?.recurlyStatus?.state !== 'canceled' && - subscription?.paymentProvider?.state !== 'canceled' - ) -} - -function getPaymentProviderSubscriptionId(subscription) { - if (subscription?.recurlySubscription_id) { - return subscription.recurlySubscription_id - } - if (subscription?.paymentProvider?.subscriptionId) { - return subscription.paymentProvider.subscriptionId - } - return null -} - -function getPaidSubscriptionState(subscription) { - if (subscription?.recurlyStatus?.state) { - return subscription.recurlyStatus.state - } - if (subscription?.paymentProvider?.state) { - return subscription.paymentProvider.state - } - return null -} - -function getSubscriptionTrialStartedAt(subscription) { - if (subscription?.recurlyStatus?.trialStartedAt) { - return subscription.recurlyStatus?.trialStartedAt - } - return subscription?.paymentProvider?.trialStartedAt -} - -function getSubscriptionTrialEndsAt(subscription) { - if (subscription?.recurlyStatus?.trialEndsAt) { - return subscription.recurlyStatus?.trialEndsAt - } - return subscription?.paymentProvider?.trialEndsAt -} - -function isInTrial(trialEndsAt) { - if (!trialEndsAt) { - return false - } - - return trialEndsAt.getTime() > Date.now() -} - module.exports = { shouldPlanChangeAtTermEnd, generateInitialLocalizedGroupPrice, - isPaidSubscription, - isIndividualActivePaidSubscription, - getPaymentProviderSubscriptionId, - getPaidSubscriptionState, - getSubscriptionTrialStartedAt, - getSubscriptionTrialEndsAt, - isInTrial, } diff --git a/services/web/app/src/Features/Subscription/SubscriptionLocator.js b/services/web/app/src/Features/Subscription/SubscriptionLocator.js index c0c107eecf..978f4d41b7 100644 --- a/services/web/app/src/Features/Subscription/SubscriptionLocator.js +++ b/services/web/app/src/Features/Subscription/SubscriptionLocator.js @@ -162,45 +162,6 @@ const SubscriptionLocator = { } : null }, - - async getUserSubscriptionStatus(userId) { - let usersSubscription = { personal: false, group: false } - - if (!userId) { - return usersSubscription - } - - const memberSubscriptions = - await SubscriptionLocator.getMemberSubscriptions(userId) - - const hasActiveGroupSubscription = memberSubscriptions.some( - subscription => - subscription.recurlyStatus?.state === 'active' && subscription.groupPlan - ) - if (hasActiveGroupSubscription) { - // Member of a group plan - usersSubscription = { ...usersSubscription, group: true } - } - - const personalSubscription = - await SubscriptionLocator.getUsersSubscription(userId) - - if (personalSubscription) { - const hasActivePersonalSubscription = - personalSubscription.recurlyStatus?.state === 'active' - if (hasActivePersonalSubscription) { - if (personalSubscription.groupPlan) { - // Owner of a group plan - usersSubscription = { ...usersSubscription, group: true } - } else { - // Owner of an individual plan - usersSubscription = { ...usersSubscription, personal: true } - } - } - } - - return usersSubscription - }, } module.exports = { diff --git a/services/web/app/src/Features/Subscription/SubscriptionUpdater.js b/services/web/app/src/Features/Subscription/SubscriptionUpdater.js index 9de194f262..b0e24ce5ad 100644 --- a/services/web/app/src/Features/Subscription/SubscriptionUpdater.js +++ b/services/web/app/src/Features/Subscription/SubscriptionUpdater.js @@ -10,7 +10,6 @@ const { DeletedSubscription } = require('../../models/DeletedSubscription') const logger = require('@overleaf/logger') const Features = require('../../infrastructure/Features') const UserAuditLogHandler = require('../User/UserAuditLogHandler') -const UserUpdater = require('../User/UserUpdater') const AccountMappingHelper = require('../Analytics/AccountMappingHelper') const { SSOConfig } = require('../../models/SSOConfig') const mongoose = require('../../infrastructure/Mongoose') @@ -146,20 +145,6 @@ async function removeUserFromGroup(subscriptionId, userId, auditLog) { { _id: subscriptionId }, { $pull: { member_ids: userId } } ).exec() - - const subscription = await Subscription.findById(subscriptionId) - if (subscription.managedUsersEnabled) { - await UserUpdater.promises.updateUser( - { _id: userId }, - { - $unset: { - 'enrollment.managedBy': 1, - 'enrollment.enrolledAt': 1, - }, - } - ) - } - await FeaturesUpdater.promises.refreshFeatures( userId, 'remove-user-from-group' @@ -333,7 +318,38 @@ async function updateSubscriptionFromRecurly( requesterData ) { if (recurlySubscription.state === 'expired') { - await handleExpiredSubscription(subscription, requesterData) + const hasManagedUsersFeature = + Features.hasFeature('saas') && subscription?.managedUsersEnabled + + // If a payment lapses and if the group is managed or has group SSO, as a temporary measure we need to + // make sure that the group continues as-is and no destructive actions are taken. + if (hasManagedUsersFeature) { + logger.warn( + { subscriptionId: subscription._id }, + 'expired subscription has managedUsers feature enabled, skipping deletion' + ) + } else { + let hasGroupSSOEnabled = false + if (subscription?.ssoConfig) { + const ssoConfig = await SSOConfig.findOne({ + _id: subscription.ssoConfig._id || subscription.ssoConfig, + }) + .lean() + .exec() + if (ssoConfig.enabled) { + hasGroupSSOEnabled = true + } + } + + if (hasGroupSSOEnabled) { + logger.warn( + { subscriptionId: subscription._id }, + 'expired subscription has groupSSO feature enabled, skipping deletion' + ) + } else { + await deleteSubscription(subscription, requesterData) + } + } return } const updatedPlanCode = recurlySubscription.plan.plan_code @@ -434,41 +450,6 @@ async function _sendUserGroupPlanCodeUserProperty(userId) { } } -async function handleExpiredSubscription(subscription, requesterData) { - const hasManagedUsersFeature = - Features.hasFeature('saas') && subscription?.managedUsersEnabled - - // If a payment lapses and if the group is managed or has group SSO, as a temporary measure we need to - // make sure that the group continues as-is and no destructive actions are taken. - if (hasManagedUsersFeature) { - logger.warn( - { subscriptionId: subscription._id }, - 'expired subscription has managedUsers feature enabled, skipping deletion' - ) - } else { - let hasGroupSSOEnabled = false - if (subscription?.ssoConfig) { - const ssoConfig = await SSOConfig.findOne({ - _id: subscription.ssoConfig._id || subscription.ssoConfig, - }) - .lean() - .exec() - if (ssoConfig.enabled) { - hasGroupSSOEnabled = true - } - } - - if (hasGroupSSOEnabled) { - logger.warn( - { subscriptionId: subscription._id }, - 'expired subscription has groupSSO feature enabled, skipping deletion' - ) - } else { - await deleteSubscription(subscription, requesterData) - } - } -} - async function _sendSubscriptionEvent(userId, subscriptionId, event) { const subscription = await Subscription.findOne( { _id: subscriptionId }, @@ -522,7 +503,7 @@ async function setRestorePoint(subscriptionId, planCode, addOns, consumed) { } if (consumed) { - update.$inc = { timesRevertedDueToFailedPayment: 1 } + update.$inc = { revertedDueToFailedPayment: 1 } } await Subscription.updateOne({ _id: subscriptionId }, update).exec() @@ -587,6 +568,5 @@ module.exports = { setRestorePoint, setSubscriptionWasReverted, voidRestorePoint, - handleExpiredSubscription, }, } diff --git a/services/web/app/src/Features/Subscription/SubscriptionViewModelBuilder.js b/services/web/app/src/Features/Subscription/SubscriptionViewModelBuilder.js index 3681975a38..441d9c2c9b 100644 --- a/services/web/app/src/Features/Subscription/SubscriptionViewModelBuilder.js +++ b/services/web/app/src/Features/Subscription/SubscriptionViewModelBuilder.js @@ -1,5 +1,6 @@ // ts-check const Settings = require('@overleaf/settings') +const RecurlyWrapper = require('./RecurlyWrapper') const PlansLocator = require('./PlansLocator') const { isStandaloneAiAddOnPlanCode, @@ -7,6 +8,7 @@ const { } = require('./PaymentProviderEntities') const SubscriptionFormatters = require('./SubscriptionFormatters') const SubscriptionLocator = require('./SubscriptionLocator') +const SubscriptionUpdater = require('./SubscriptionUpdater') const InstitutionsGetter = require('../Institutions/InstitutionsGetter') const InstitutionsManager = require('../Institutions/InstitutionsManager') const PublishersGetter = require('../Publishers/PublishersGetter') @@ -225,7 +227,6 @@ async function buildUsersSubscriptionViewModel(user, locale = 'en') { // don't return subscription payment information delete personalSubscription.paymentProvider delete personalSubscription.recurly - delete personalSubscription.recurlySubscription_id const tax = paymentRecord.subscription.taxAmount || 0 // Some plans allow adding more seats than the base plan provides. @@ -373,6 +374,15 @@ async function buildUsersSubscriptionViewModel(user, locale = 'en') { } } +/** + * @param {{_id: string}} user + * @returns {Promise} + */ +async function getBestSubscription(user) { + const { bestSubscription } = await getUsersSubscriptionDetails(user) + return bestSubscription +} + /** * @param {{_id: string}} user * @returns {Promise<{bestSubscription:Subscription,individualSubscription:DBSubscription|null,memberGroupSubscriptions:DBSubscription[]}>} @@ -390,18 +400,15 @@ async function getUsersSubscriptionDetails(user) { if ( individualSubscription && !individualSubscription.customAccount && - SubscriptionHelper.getPaymentProviderSubscriptionId( - individualSubscription - ) && - !SubscriptionHelper.getPaidSubscriptionState(individualSubscription) + individualSubscription.recurlySubscription_id && + !individualSubscription.recurlyStatus?.state ) { - const paymentResults = await Modules.promises.hooks.fire( - 'getPaymentFromRecordPromise', - individualSubscription + const recurlySubscription = await RecurlyWrapper.promises.getSubscription( + individualSubscription.recurlySubscription_id, + { includeAccount: true } ) - await Modules.promises.hooks.fire( - 'syncSubscription', - paymentResults[0]?.subscription, + await SubscriptionUpdater.promises.updateSubscriptionFromRecurly( + recurlySubscription, individualSubscription ) individualSubscription = @@ -470,7 +477,7 @@ async function getUsersSubscriptionDetails(user) { return { bestSubscription, individualSubscription, memberGroupSubscriptions } } -function buildPlansList(currentPlan, isInTrial) { +function buildPlansList(currentPlan) { const { plans } = Settings const allPlans = {} @@ -484,11 +491,7 @@ function buildPlansList(currentPlan, isInTrial) { result.planCodesChangingAtTermEnd = _.map( _.filter(plans, plan => { if (!plan.hideFromUsers) { - return SubscriptionHelper.shouldPlanChangeAtTermEnd( - currentPlan, - plan, - isInTrial - ) + return SubscriptionHelper.shouldPlanChangeAtTermEnd(currentPlan, plan) } }), 'planCode' @@ -537,8 +540,7 @@ function _isPlanEqualOrBetter(planA, planB) { function _getRemainingTrialDays(subscription) { const now = new Date() - const trialEndDate = - SubscriptionHelper.getSubscriptionTrialEndsAt(subscription) + const trialEndDate = subscription.recurlyStatus?.trialEndsAt return trialEndDate && trialEndDate > now ? Math.ceil( (trialEndDate.getTime() - now.getTime()) / (24 * 60 * 60 * 1000) @@ -573,8 +575,8 @@ function buildGroupSubscriptionForView(groupSubscription) { } } -function buildPlansListForSubscriptionDash(currentPlan, isInTrial) { - const allPlansData = buildPlansList(currentPlan, isInTrial) +function buildPlansListForSubscriptionDash(currentPlan) { + const allPlansData = buildPlansList(currentPlan) const plans = [] // only list individual and visible plans for "change plans" UI if (allPlansData.studentAccounts) { @@ -603,8 +605,10 @@ module.exports = { buildUsersSubscriptionViewModel: callbackify(buildUsersSubscriptionViewModel), buildPlansList, buildPlansListForSubscriptionDash, + getBestSubscription: callbackify(getBestSubscription), promises: { buildUsersSubscriptionViewModel, + getBestSubscription, getUsersSubscriptionDetails, }, } diff --git a/services/web/app/src/Features/Subscription/TeamInvitesController.mjs b/services/web/app/src/Features/Subscription/TeamInvitesController.mjs index 1eb9ac2907..b2c9840de4 100644 --- a/services/web/app/src/Features/Subscription/TeamInvitesController.mjs +++ b/services/web/app/src/Features/Subscription/TeamInvitesController.mjs @@ -4,7 +4,6 @@ import OError from '@overleaf/o-error' import TeamInvitesHandler from './TeamInvitesHandler.js' import SessionManager from '../Authentication/SessionManager.js' import SubscriptionLocator from './SubscriptionLocator.js' -import SubscriptionHelper from './SubscriptionHelper.js' import ErrorController from '../Errors/ErrorController.js' import EmailHelper from '../Helpers/EmailHelper.js' import UserGetter from '../User/UserGetter.js' @@ -15,7 +14,6 @@ import EmailHandler from '../Email/EmailHandler.js' import { RateLimiter } from '../../infrastructure/RateLimiter.js' import Modules from '../../infrastructure/Modules.js' import UserAuditLogHandler from '../User/UserAuditLogHandler.js' -import { sanitizeSessionUserForFrontEnd } from '../../infrastructure/FrontEndUser.js' const rateLimiters = { resendGroupInvite: new RateLimiter('resend-group-invite', { @@ -89,10 +87,12 @@ async function viewInvite(req, res, next) { const personalSubscription = await SubscriptionLocator.promises.getUsersSubscription(userId) - const hasIndividualPaidSubscription = - SubscriptionHelper.isIndividualActivePaidSubscription( - personalSubscription - ) + const hasIndividualRecurlySubscription = + personalSubscription && + personalSubscription.groupPlan === false && + personalSubscription.recurlyStatus?.state !== 'canceled' && + personalSubscription.recurlySubscription_id && + personalSubscription.recurlySubscription_id !== '' if (subscription?.managedUsersEnabled) { if (!subscription.populated('groupPolicy')) { @@ -133,9 +133,6 @@ async function viewInvite(req, res, next) { logger.error({ err }, 'error getting subscription admin email') } - const usersSubscription = - await SubscriptionLocator.promises.getUserSubscriptionStatus(userId) - return res.render('subscriptions/team/invite-managed', { inviterName: invite.inviterName, inviteToken: invite.token, @@ -144,8 +141,7 @@ async function viewInvite(req, res, next) { currentManagedUserAdminEmail, groupSSOActive, subscriptionId: subscription._id.toString(), - user: sanitizeSessionUserForFrontEnd(sessionUser), - usersSubscription, + user: sessionUser, }) } else { let currentManagedUserAdminEmail @@ -159,13 +155,13 @@ async function viewInvite(req, res, next) { return res.render('subscriptions/team/invite', { inviterName: invite.inviterName, inviteToken: invite.token, - hasIndividualPaidSubscription, + hasIndividualRecurlySubscription, expired: req.query.expired, userRestrictions: Array.from(req.userRestrictions || []), currentManagedUserAdminEmail, groupSSOActive, subscriptionId: subscription._id.toString(), - user: sanitizeSessionUserForFrontEnd(sessionUser), + user: sessionUser, }) } } else { @@ -207,7 +203,7 @@ async function acceptInvite(req, res, next) { const subscription = await TeamInvitesHandler.promises.acceptInvite( token, userId, - req.ip + { initiatorId: userId, ipAddress: req.ip } ) const groupSSOActive = ( await Modules.promises.hooks.fire('hasGroupSSOEnabled', subscription) diff --git a/services/web/app/src/Features/Subscription/TeamInvitesHandler.js b/services/web/app/src/Features/Subscription/TeamInvitesHandler.js index f7a4908355..a89f0612f2 100644 --- a/services/web/app/src/Features/Subscription/TeamInvitesHandler.js +++ b/services/web/app/src/Features/Subscription/TeamInvitesHandler.js @@ -22,7 +22,6 @@ const { callbackifyMultiResult, } = require('@overleaf/promise-utils') const NotificationsBuilder = require('../Notifications/NotificationsBuilder') -const RecurlyClient = require('./RecurlyClient') async function getInvite(token) { const subscription = await Subscription.findOne({ @@ -65,50 +64,11 @@ async function importInvite(subscription, inviterName, email, token, sentAt) { return subscription.save() } -async function _deleteUserSubscription(userId, ipAddress) { - // Delete released user subscription to make it on a free plan - const subscription = - await SubscriptionLocator.promises.getUsersSubscription(userId) - - if (subscription) { - logger.debug( - { - subscriptionId: subscription._id, - }, - 'deleting user subscription' - ) - - const deleterData = { - id: userId, - ip: ipAddress, - } - await SubscriptionUpdater.promises.deleteSubscription( - subscription, - deleterData - ) - - // Terminate the subscription in Recurly - if (subscription.recurlySubscription_id) { - try { - await RecurlyClient.promises.terminateSubscriptionByUuid( - subscription.recurlySubscription_id - ) - } catch (err) { - logger.error( - { err, subscriptionId: subscription._id }, - 'terminating subscription failed' - ) - } - } - } -} - -async function acceptInvite(token, userId, ipAddress) { +async function acceptInvite(token, userId, auditLog) { const { invite, subscription } = await getInvite(token) if (!invite) { throw new Errors.NotFoundError('invite not found') } - const auditLog = { initiatorId: userId, ipAddress } await SubscriptionUpdater.promises.addUserToGroup( subscription._id, @@ -117,7 +77,6 @@ async function acceptInvite(token, userId, ipAddress) { ) if (subscription.managedUsersEnabled) { - await _deleteUserSubscription(userId, ipAddress) await Modules.promises.hooks.fire( 'enrollInManagedSubscription', userId, diff --git a/services/web/app/src/Features/Templates/TemplatesController.js b/services/web/app/src/Features/Templates/TemplatesController.js index 257de2b0c3..b238527430 100644 --- a/services/web/app/src/Features/Templates/TemplatesController.js +++ b/services/web/app/src/Features/Templates/TemplatesController.js @@ -4,9 +4,13 @@ const TemplatesManager = require('./TemplatesManager') const ProjectHelper = require('../Project/ProjectHelper') const logger = require('@overleaf/logger') const { expressify } = require('@overleaf/promise-utils') +const SplitTestHandler = require('../SplitTests/SplitTestHandler') const TemplatesController = { async getV1Template(req, res) { + // Read split test assignment so that it's available for Pug to read + await SplitTestHandler.promises.getAssignment(req, res, 'core-pug-bs5') + const templateId = req.params.Template_version_id const templateVersionId = req.query.version // if (!/^[0-9]+$/.test(templateVersionId) || !/^[0-9]+$/.test(templateId)) { diff --git a/services/web/app/src/Features/Tutorial/TutorialController.mjs b/services/web/app/src/Features/Tutorial/TutorialController.mjs index b4ab3f6727..e5fc940b34 100644 --- a/services/web/app/src/Features/Tutorial/TutorialController.mjs +++ b/services/web/app/src/Features/Tutorial/TutorialController.mjs @@ -15,7 +15,6 @@ const VALID_KEYS = [ 'editor-popup-ux-survey', 'wf-features-moved', 'review-mode', - 'new-error-logs-promo', ] async function completeTutorial(req, res, next) { diff --git a/services/web/app/src/Features/User/SAMLIdentityManager.js b/services/web/app/src/Features/User/SAMLIdentityManager.js index 0d3c382775..dc790c59ca 100644 --- a/services/web/app/src/Features/User/SAMLIdentityManager.js +++ b/services/web/app/src/Features/User/SAMLIdentityManager.js @@ -210,13 +210,9 @@ async function getUser(providerId, externalUserId, userIdAttribute) { ) } const user = await User.findOne({ - samlIdentifiers: { - $elemMatch: { - externalUserId: externalUserId.toString(), - providerId: providerId.toString(), - userIdAttribute: userIdAttribute.toString(), - }, - }, + 'samlIdentifiers.externalUserId': externalUserId.toString(), + 'samlIdentifiers.providerId': providerId.toString(), + 'samlIdentifiers.userIdAttribute': userIdAttribute.toString(), }).exec() return user diff --git a/services/web/app/src/Features/User/UserAuditLogHandler.js b/services/web/app/src/Features/User/UserAuditLogHandler.js index 87cd810161..b1d404303e 100644 --- a/services/web/app/src/Features/User/UserAuditLogHandler.js +++ b/services/web/app/src/Features/User/UserAuditLogHandler.js @@ -8,7 +8,6 @@ function _canHaveNoIpAddressId(operation, info) { if (operation === 'must-reset-password-set') return true if (operation === 'remove-email' && info.script) return true if (operation === 'release-managed-user' && info.script) return true - if (operation === 'unlink-dropbox' && info.batch) return true return false } diff --git a/services/web/app/src/Features/User/UserController.js b/services/web/app/src/Features/User/UserController.js index cabab8c891..04be431801 100644 --- a/services/web/app/src/Features/User/UserController.js +++ b/services/web/app/src/Features/User/UserController.js @@ -387,9 +387,6 @@ async function updateUserSettings(req, res, next) { if (req.body.mathPreview != null) { user.ace.mathPreview = req.body.mathPreview } - if (req.body.breadcrumbs != null) { - user.ace.breadcrumbs = Boolean(req.body.breadcrumbs) - } if (req.body.referencesSearchMode != null) { const mode = req.body.referencesSearchMode === 'simple' ? 'simple' : 'advanced' diff --git a/services/web/app/src/Features/User/UserDeleter.js b/services/web/app/src/Features/User/UserDeleter.js index c8d9891bf9..662c51ca65 100644 --- a/services/web/app/src/Features/User/UserDeleter.js +++ b/services/web/app/src/Features/User/UserDeleter.js @@ -87,29 +87,17 @@ async function deleteMongoUser(userId) { } async function expireDeletedUser(userId) { - logger.info({ userId }, 'expiring deleted user') - try { - logger.info({ userId }, 'firing expireDeletedUser hook') - await Modules.promises.hooks.fire('expireDeletedUser', userId) - logger.info({ userId }, 'removing deleted user feedback records') - await Feedback.deleteMany({ userId }).exec() - logger.info({ userId }, 'removing deleted user onboarding data') - await OnboardingDataCollectionManager.deleteOnboardingDataCollection(userId) - logger.info({ userId }, 'redacting PII from the deleted user record') - const deletedUser = await DeletedUser.findOne({ - 'deleterData.deletedUserId': userId, - }).exec() - deletedUser.user = undefined - deletedUser.deleterData.deleterIpAddress = undefined - await deletedUser.save() - logger.info({ userId }, 'deleted user expiry complete') - } catch (error) { - logger.warn( - { error, userId }, - 'something went wrong expiring the deleted user' - ) - throw error - } + await Modules.promises.hooks.fire('expireDeletedUser', userId) + const deletedUser = await DeletedUser.findOne({ + 'deleterData.deletedUserId': userId, + }).exec() + + await Feedback.deleteMany({ userId }).exec() + await OnboardingDataCollectionManager.deleteOnboardingDataCollection(userId) + + deletedUser.user = undefined + deletedUser.deleterData.deleterIpAddress = undefined + await deletedUser.save() } async function expireDeletedUsersAfterDuration() { @@ -124,27 +112,11 @@ async function expireDeletedUsersAfterDuration() { if (deletedUsers.length === 0) { return } - logger.info( - { deletedUsers: deletedUsers.length, retentionPeriodInDays: DURATION }, - 'expiring batch of deleted users older than retention period' - ) - try { - for (let i = 0; i < deletedUsers.length; i++) { - const deletedUserId = deletedUsers[i].deleterData.deletedUserId - await expireDeletedUser(deletedUserId) - logger.info({ deletedUserId }, 'removing deleted user audit log entries') - await UserAuditLogEntry.deleteMany({ userId: deletedUserId }).exec() - } - logger.info( - { deletedUsers: deletedUsers.length }, - 'batch of deleted users expired successfully' - ) - } catch (error) { - logger.warn( - { error }, - 'something went wrong expiring batch of deleted users' - ) - throw error + + for (let i = 0; i < deletedUsers.length; i++) { + const deletedUserId = deletedUsers[i].deleterData.deletedUserId + await expireDeletedUser(deletedUserId) + await UserAuditLogEntry.deleteMany({ userId: deletedUserId }).exec() } } diff --git a/services/web/app/src/Features/User/UserGetter.js b/services/web/app/src/Features/User/UserGetter.js index a5fbe42651..bce4568880 100644 --- a/services/web/app/src/Features/User/UserGetter.js +++ b/services/web/app/src/Features/User/UserGetter.js @@ -269,7 +269,6 @@ const UserGetter = { getUsers(query, projection, callback) { try { query = normalizeMultiQuery(query) - if (query?._id?.$in?.length === 0) return callback(null, []) // shortcut for getUsers([]) db.users.find(query, { projection }).toArray(callback) } catch (err) { callback(err) diff --git a/services/web/app/src/Features/User/UserPagesController.mjs b/services/web/app/src/Features/User/UserPagesController.mjs index 2f5d46d0d3..596357da76 100644 --- a/services/web/app/src/Features/User/UserPagesController.mjs +++ b/services/web/app/src/Features/User/UserPagesController.mjs @@ -174,7 +174,6 @@ async function settingsPage(req, res) { gitBridgeEnabled: Settings.enableGitBridge, isSaas: Features.hasFeature('saas'), memberOfSSOEnabledGroups, - capabilities: [...req.capabilitySet], }) } diff --git a/services/web/app/src/Features/User/UserUpdater.js b/services/web/app/src/Features/User/UserUpdater.js index f21ee9a1ed..627e73875d 100644 --- a/services/web/app/src/Features/User/UserUpdater.js +++ b/services/web/app/src/Features/User/UserUpdater.js @@ -11,6 +11,7 @@ const EmailHandler = require('../Email/EmailHandler') const EmailHelper = require('../Helpers/EmailHelper') const Errors = require('../Errors/Errors') const NewsletterManager = require('../Newsletter/NewsletterManager') +const RecurlyWrapper = require('../Subscription/RecurlyWrapper') const UserAuditLogHandler = require('./UserAuditLogHandler') const AnalyticsManager = require('../Analytics/AnalyticsManager') const SubscriptionLocator = require('../Subscription/SubscriptionLocator') @@ -251,11 +252,7 @@ async function setDefaultEmailAddress( } try { - await Modules.promises.hooks.fire( - 'updateAccountEmailAddress', - user._id, - email - ) + await RecurlyWrapper.promises.updateAccountEmailAddress(user._id, email) } catch (error) { // errors are ignored } diff --git a/services/web/app/src/Features/UserMembership/UserMembershipController.mjs b/services/web/app/src/Features/UserMembership/UserMembershipController.mjs index 4be1221255..aaa8fa5812 100644 --- a/services/web/app/src/Features/UserMembership/UserMembershipController.mjs +++ b/services/web/app/src/Features/UserMembership/UserMembershipController.mjs @@ -31,11 +31,8 @@ async function manageGroupMembers(req, res, next) { ) const ssoConfig = await SSOConfig.findById(subscription.ssoConfig).exec() const plan = PlansLocator.findLocalPlanInSettings(subscription.planCode) - const userId = SessionManager.getLoggedInUserId(req.session)?.toString() + const userId = SessionManager.getLoggedInUserId(req.session) const isAdmin = subscription.admin_id.toString() === userId - const isUserGroupManager = - Boolean(subscription.manager_ids?.some(id => id.toString() === userId)) && - !isAdmin const recurlySubscription = subscription.recurlySubscription_id ? await RecurlyClient.promises.getSubscription( subscription.recurlySubscription_id @@ -54,7 +51,6 @@ async function manageGroupMembers(req, res, next) { users, groupSize: subscription.membersLimit, managedUsersActive: subscription.managedUsersEnabled, - isUserGroupManager, groupSSOActive: ssoConfig?.enabled, canUseFlexibleLicensing: plan?.canUseFlexibleLicensing, canUseAddSeatsFeature, diff --git a/services/web/app/src/infrastructure/ExpressLocals.js b/services/web/app/src/infrastructure/ExpressLocals.js index 6fe955d386..487ea8ff41 100644 --- a/services/web/app/src/infrastructure/ExpressLocals.js +++ b/services/web/app/src/infrastructure/ExpressLocals.js @@ -19,7 +19,6 @@ const { const { addOptionalCleanupHandlerAfterDrainingConnections, } = require('./GracefulShutdown') -const { sanitizeSessionUserForFrontEnd } = require('./FrontEndUser') const IEEE_BRAND_ID = Settings.ieeeBrandId @@ -301,7 +300,11 @@ module.exports = function (webRouter, privateApiRouter, publicApiRouter) { webRouter.use(function (req, res, next) { const currentUser = SessionManager.getSessionUser(req.session) if (currentUser != null) { - res.locals.user = sanitizeSessionUserForFrontEnd(currentUser) + res.locals.user = { + email: currentUser.email, + first_name: currentUser.first_name, + last_name: currentUser.last_name, + } } next() }) diff --git a/services/web/app/src/infrastructure/Features.js b/services/web/app/src/infrastructure/Features.js index 1b7b060a7d..b58028539c 100644 --- a/services/web/app/src/infrastructure/Features.js +++ b/services/web/app/src/infrastructure/Features.js @@ -54,7 +54,7 @@ const Features = { case 'registration-page': return ( !Features.externalAuthenticationSystemUsed() || - Boolean(Settings.overleaf) || Settings.oidc?.disableJITAccountCreation + Boolean(Settings.overleaf) ) case 'registration': return Boolean(Settings.overleaf) diff --git a/services/web/app/src/infrastructure/FrontEndUser.js b/services/web/app/src/infrastructure/FrontEndUser.js deleted file mode 100644 index 5a4af9868c..0000000000 --- a/services/web/app/src/infrastructure/FrontEndUser.js +++ /dev/null @@ -1,15 +0,0 @@ -function sanitizeSessionUserForFrontEnd(sessionUser) { - if (sessionUser != null) { - return { - email: sessionUser.email, - first_name: sessionUser.first_name, - last_name: sessionUser.last_name, - } - } - - return null -} - -module.exports = { - sanitizeSessionUserForFrontEnd, -} diff --git a/services/web/app/src/infrastructure/mongodb.js b/services/web/app/src/infrastructure/mongodb.js index 24103b2d82..a3342c6575 100644 --- a/services/web/app/src/infrastructure/mongodb.js +++ b/services/web/app/src/infrastructure/mongodb.js @@ -61,6 +61,7 @@ const db = { projectHistoryFailures: internalDb.collection('projectHistoryFailures'), projectHistoryGlobalBlobs: internalDb.collection('projectHistoryGlobalBlobs'), projectHistoryLabels: internalDb.collection('projectHistoryLabels'), + projectHistoryMetaData: internalDb.collection('projectHistoryMetaData'), projectHistorySyncState: internalDb.collection('projectHistorySyncState'), projectInvites: internalDb.collection('projectInvites'), projects: internalDb.collection('projects'), diff --git a/services/web/app/src/models/GroupPolicy.js b/services/web/app/src/models/GroupPolicy.js index 55728a2415..e975834008 100644 --- a/services/web/app/src/models/GroupPolicy.js +++ b/services/web/app/src/models/GroupPolicy.js @@ -27,9 +27,6 @@ const GroupPolicySchema = new Schema( // User can't use the chat feature userCannotUseChat: Boolean, - - // User can't use the Dropbox feature - userCannotUseDropbox: Boolean, }, { minimize: false } ) diff --git a/services/web/app/src/models/SSOConfig.js b/services/web/app/src/models/SSOConfig.js index 6734b29f57..5d50d51d02 100644 --- a/services/web/app/src/models/SSOConfig.js +++ b/services/web/app/src/models/SSOConfig.js @@ -10,7 +10,6 @@ const SSOConfigSchema = new Schema( userLastNameAttribute: { type: String }, validated: { type: Boolean, default: false }, enabled: { type: Boolean, default: false }, - useSettingsUKAMF: { type: Boolean, default: false }, }, { diff --git a/services/web/app/src/models/User.js b/services/web/app/src/models/User.js index c1701023c4..d228c46b82 100644 --- a/services/web/app/src/models/User.js +++ b/services/web/app/src/models/User.js @@ -97,7 +97,6 @@ const UserSchema = new Schema( fontFamily: { type: String }, lineHeight: { type: String }, mathPreview: { type: Boolean, default: true }, - breadcrumbs: { type: Boolean, default: true }, referencesSearchMode: { type: String, default: 'advanced' }, // 'advanced' or 'simple' enableNewEditor: { type: Boolean }, }, diff --git a/services/web/app/src/router.mjs b/services/web/app/src/router.mjs index d336fc965b..2e31c5a30f 100644 --- a/services/web/app/src/router.mjs +++ b/services/web/app/src/router.mjs @@ -917,12 +917,6 @@ async function initialize(webRouter, privateApiRouter, publicApiRouter) { NotificationsController.markNotificationAsRead ) - webRouter.get( - '/user/notification/:notificationId', - AuthenticationController.requireLogin(), - NotificationsController.getNotification - ) - // Deprecated in favour of /internal/project/:project_id but still used by versioning privateApiRouter.get( '/project/:project_id/details', diff --git a/services/web/app/views/_customer_io.pug b/services/web/app/views/_customer_io.pug index 781dfaab13..81d75f7d7f 100644 --- a/services/web/app/views/_customer_io.pug +++ b/services/web/app/views/_customer_io.pug @@ -1,12 +1,10 @@ if(customerIoEnabled && ExposedSettings.cioWriteKey && ExposedSettings.cioSiteId) - script(type="text/javascript", id="cio-loader", nonce=scriptNonce, data-best-subscription=(usersBestSubscription && usersBestSubscription.type), data-cio-write-key=ExposedSettings.cioWriteKey, data-cio-site-id=ExposedSettings.cioSiteId, data-session-analytics-id=getSessionAnalyticsId(), data-user-id=getLoggedInUserId()). + script(type="text/javascript", id="cio-loader", nonce=scriptNonce, data-cio-write-key=ExposedSettings.cioWriteKey, data-cio-site-id=ExposedSettings.cioSiteId, data-session-analytics-id=getSessionAnalyticsId(), data-user-id=getLoggedInUserId()). var cioSettings = document.querySelector('#cio-loader').dataset; var analyticsId = cioSettings.sessionAnalyticsId; var siteId = cioSettings.cioSiteId; var writeKey = cioSettings.cioWriteKey; var userId = cioSettings.userId; - var usersBestSubscription = cioSettings.bestSubscription - !function(){var i="cioanalytics", analytics=(window[i]=window[i]||[]);if(!analytics.initialize)if(analytics.invoked)window.console&&console.error&&console.error("Snippet included twice.");else{analytics.invoked=!0;analytics.methods=["trackSubmit","trackClick","trackLink","trackForm","pageview","identify","reset","group","track","ready","alias","debug","page","once","off","on","addSourceMiddleware","addIntegrationMiddleware","setAnonymousId","addDestinationMiddleware"];analytics.factory=function(e){return function(){var t=Array.prototype.slice.call(arguments);t.unshift(e);analytics.push(t);return analytics}};for(var e=0;e 0) - p.thanks The Overleaf Bonus Program has been discontinued, but you'll continue to have access to the features you already earned. - else - p.thanks The Overleaf Bonus Program has been discontinued. - p.thanks Please contact us if you have any questions. - - if (refered_user_count > 0) - .row.ab-bonus - .col-lg-10.offset-lg-1(style="position: relative; height: 30px; margin-top: 20px;") - - for (var i = 0; i <= 10; i++) { - if (refered_user_count == i) - .number(style="left: "+i+"0%").active #{i} - else - .number(style="left: "+i+"0%") #{i} - - } - - .row.ab-bonus - .col-lg-10.offset-lg-1 - .progress - .progress-bar.progress-bar-info(style="width: "+refered_user_count+"0%") - - .row.ab-bonus - .col-lg-10.offset-lg-1(style="position: relative; height: 110px;") - .perk(style="left: 10%;", class = refered_user_count >= 1 ? "active" : "") #{translate("one_free_collab")} - .perk(style="left: 30%;", class = refered_user_count >= 3 ? "active" : "") #{translate("three_free_collab")} - .perk(style="left: 60%;", class = refered_user_count >= 6 ? "active" : "") #{translate("free_dropbox_and_history")} + #{translate("three_free_collab")} - .perk(style="left: 90%;", class = refered_user_count >= 9 ? "active" : "") #{translate("free_dropbox_and_history")} + #{translate("unlimited_collabs")} - .row   - - .row.ab-bonus - .col-lg-10.offset-lg-1.bonus-status - if (refered_user_count == 1) - p.thanks You’ve introduced 1 person to #{settings.appName}. + .container-fluid + .row + .col-md-10.col-md-offset-1 + if (refered_user_count > 0) + p.thanks The Overleaf Bonus Program has been discontinued, but you'll continue to have access to the features you already earned. + else + p.thanks The Overleaf Bonus Program has been discontinued. + p.thanks Please contact us if you have any questions. + + if (refered_user_count > 0) + .row.ab-bonus + .col-md-10.col-md-offset-1.bonus-banner(style="position: relative; height: 30px; margin-top: 20px;") + - for (var i = 0; i <= 10; i++) { + if (refered_user_count == i) + .number(style="left: "+i+"0%").active #{i} else - p.thanks You’ve introduced #{refered_user_count} people to #{settings.appName}. + .number(style="left: "+i+"0%") #{i} + - } + + .row.ab-bonus + .col-md-10.col-md-offset-1.bonus-banner + .progress + .progress-bar.progress-bar-info(style="width: "+refered_user_count+"0%") + + .row.ab-bonus + .col-md-10.col-md-offset-1.bonus-banner(style="position: relative; height: 110px;") + .perk(style="left: 10%;", class = refered_user_count >= 1 ? "active" : "") #{translate("one_free_collab")} + .perk(style="left: 30%;", class = refered_user_count >= 3 ? "active" : "") #{translate("three_free_collab")} + .perk(style="left: 60%;", class = refered_user_count >= 6 ? "active" : "") #{translate("free_dropbox_and_history")} + #{translate("three_free_collab")} + .perk(style="left: 90%;", class = refered_user_count >= 9 ? "active" : "") #{translate("free_dropbox_and_history")} + #{translate("unlimited_collabs")} + .row   + + .row.ab-bonus + .col-md-10.col-md-offset-1.bonus-banner.bonus-status + if (refered_user_count == 1) + p.thanks You’ve introduced 1 person to #{settings.appName}. + else + p.thanks You’ve introduced #{refered_user_count} people to #{settings.appName}. diff --git a/services/web/app/views/subscriptions/dashboard-react.pug b/services/web/app/views/subscriptions/dashboard-react.pug index 2b6251f2a3..d6a1bff49c 100644 --- a/services/web/app/views/subscriptions/dashboard-react.pug +++ b/services/web/app/views/subscriptions/dashboard-react.pug @@ -27,7 +27,6 @@ block append meta meta(name="ol-user" data-type="json" content=user) if (personalSubscription && personalSubscription.payment) meta(name="ol-recurlyApiKey" content=settings.apis.recurly.publicKey) - meta(name="ol-stripeUKApiKey" content=settings.apis.stripeUK.publishableKey) meta(name="ol-recommendedCurrency" content=personalSubscription.payment.currency) meta(name="ol-groupPlans" data-type="json" content=groupPlans) diff --git a/services/web/app/views/subscriptions/plans/_faq_new.pug b/services/web/app/views/subscriptions/plans/_faq_new.pug index 3c926fb22d..baefb6ed3f 100644 --- a/services/web/app/views/subscriptions/plans/_faq_new.pug +++ b/services/web/app/views/subscriptions/plans/_faq_new.pug @@ -1,6 +1,5 @@ include ./_plans_faq_tabs include ../../_mixins/eyebrow -include ../../_mixins/material_symbol - var managingYourSubscription = 'managingYourSubscription' - var overleafIndividualPlans = 'overleafIndividualPlans' @@ -82,10 +81,6 @@ include ../../_mixins/material_symbol .row .col-xs-12.plans-faq-support span #{translate('still_have_questions')} - button( - data-ol-open-contact-form-modal="general" - data-bs-toggle=bootstrapVersion === 5 ? "modal" : undefined - data-bs-target=bootstrapVersion === 5 ? "#contactUsModal" : undefined - ) + button(data-ol-open-contact-form-modal="general") span(style="margin-right: 4px") #{translate('contact_support')} - +material-symbol-rounded("arrow_right_alt", "icon-md") + i.icon-md.material-symbols.material-symbols-rounded.material-symbols-arrow-right(aria-hidden="true") arrow_right_alt diff --git a/services/web/app/views/subscriptions/plans/_plans_faq_tabs.pug b/services/web/app/views/subscriptions/plans/_plans_faq_tabs.pug index a598f4774c..f312ebeb46 100644 --- a/services/web/app/views/subscriptions/plans/_plans_faq_tabs.pug +++ b/services/web/app/views/subscriptions/plans/_plans_faq_tabs.pug @@ -1,6 +1,5 @@ //- If the `plans-page-bs5` split test has been completed, remove the `data-toggle` and `data-target` because it is not needed anymore (bs5 uses `data-bs-toggle` and `data-bs-target`) -include ../../_mixins/material_symbol - + mixin managingYourSubscription() .ol-accordions-container .custom-accordion-item @@ -15,7 +14,7 @@ mixin managingYourSubscription() ) | Can I change plans or cancel later? span.custom-accordion-icon - +material-symbol-outlined("keyboard_arrow_down") + i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down .collapse(id="managingYourSubscriptionQ1") .custom-accordion-body span Yes, you can do this at any time by going to @@ -33,7 +32,7 @@ mixin managingYourSubscription() ) | If I change or cancel my Overleaf plan, will I lose my projects? span.custom-accordion-icon - +material-symbol-outlined("keyboard_arrow_down") + i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down .collapse(id="managingYourSubscriptionQ2") .custom-accordion-body | No. Changing or canceling your plan won’t affect your projects, the only change will be to the features available to you. You can see which features are available only on paid plans in the comparison table. @@ -49,7 +48,7 @@ mixin managingYourSubscription() ) | Can I pay by invoice or purchase order? span.custom-accordion-icon - +material-symbol-outlined("keyboard_arrow_down") + i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down .collapse(id="managingYourSubscriptionQ3") .custom-accordion-body | This is possible when you’re purchasing a group subscription for five or more people, or a site license. For individual subscriptions, we can only accept payment online via credit card, debit card, or PayPal. @@ -65,7 +64,7 @@ mixin managingYourSubscription() ) | How do I view/update the credit card being charged for my subscription? span.custom-accordion-icon - +material-symbol-outlined("keyboard_arrow_down") + i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down .collapse(id="managingYourSubscriptionQ4") .custom-accordion-body | You can view and update the card on file by going to Account > @@ -97,7 +96,7 @@ mixin overleafIndividualPlans() ) | How does the free trial work? span.custom-accordion-icon - +material-symbol-outlined("keyboard_arrow_down") + i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down .collapse(id="overleafIndividualPlansQ1") .custom-accordion-body span You get full access to your chosen plan during your 7-day free trial, and there’s no obligation to continue beyond the trial. Your card will be charged at the end of your trial unless you cancel before then. To cancel, go to @@ -125,7 +124,7 @@ mixin overleafIndividualPlans() ) | What’s a collaborator on an Overleaf individual subscription? span.custom-accordion-icon - +material-symbol-outlined("keyboard_arrow_down") + i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down .collapse(id="overleafIndividualPlansQ2") .custom-accordion-body | A collaborator is someone you invite to work with you on a project. So, for example, on our Standard plan you can have up to 10 people collaborating with you on any given project. @@ -142,7 +141,7 @@ mixin overleafIndividualPlans() ) | The individual Standard plan has 10 project collaborators, does it mean that 10 people will be upgraded? span.custom-accordion-icon - +material-symbol-outlined("keyboard_arrow_down") + i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down .collapse(id="overleafIndividualPlansQ3") .custom-accordion-body span No. Only the subscriber’s account will be upgraded. An individual Standard subscription allows you to invite 10 people per project to edit the project with you. Your collaborators can access features such as the full document history and extended compile time, but @@ -160,7 +159,7 @@ mixin overleafIndividualPlans() ) | Do collaborators also have access to the editing and collaboration features I’ve paid for? span.custom-accordion-icon - +material-symbol-outlined("keyboard_arrow_down") + i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down .collapse(id="overleafIndividualPlansQ4") .custom-accordion-body span If you have an Overleaf subscription, then your project collaborators will have access to features like real-time track changes and document history, but @@ -178,7 +177,7 @@ mixin overleafIndividualPlans() ) | Can I purchase an individual plan on behalf of someone else? span.custom-accordion-icon - +material-symbol-outlined("keyboard_arrow_down") + i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down .collapse(id="overleafIndividualPlansQ5") .custom-accordion-body | Individual subscriptions must be purchased by the account that will be the end user. If you want to purchase a plan for someone else, you’ll need to provide them with relevant payment details to enable them to make the purchase. @@ -194,7 +193,7 @@ mixin overleafIndividualPlans() ) | Who is eligible for the Student plan? span.custom-accordion-icon - +material-symbol-outlined("keyboard_arrow_down") + i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down .collapse(id="overleafIndividualPlansQ6") .custom-accordion-body | As the name suggests, the Student plan is only for students at educational institutions. This includes graduate students. @@ -210,7 +209,7 @@ mixin overleafIndividualPlans() ) | Can I transfer an individual subscription to someone else? span.custom-accordion-icon - +material-symbol-outlined("keyboard_arrow_down") + i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down .collapse(id="overleafIndividualPlansQ7") .custom-accordion-body | No. Individual plans can’t be transferred. @@ -233,7 +232,7 @@ mixin overleafGroupPlans() ) | What’s the difference between users and collaborators on an Overleaf group subscription? span.custom-accordion-icon - +material-symbol-outlined("keyboard_arrow_down") + i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down .collapse(id="overleafGroupPlansQ1") .custom-accordion-body div On any of our group plans, the number of users refers to the number of people you can invite to join your group. All of these people will have access to the plan’s paid-for features across all their projects, such as real-time track changes and document history. @@ -250,7 +249,7 @@ mixin overleafGroupPlans() ) | What is the benefit of purchasing an Overleaf Group plan? span.custom-accordion-icon - +material-symbol-outlined("keyboard_arrow_down") + i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down .collapse(id="overleafGroupPlansQ2") .custom-accordion-body | Our Group subscriptions allow you to purchase access to our premium features for multiple people. They’re easy to manage, help save on paperwork, and allow groups of 5 or more to purchase via purchase order (PO). We also offer discounts on purchases of Group subscriptions for more than 20 users; just get in touch with our @@ -276,7 +275,7 @@ mixin overleafGroupPlans() ) | Who is eligible for the educational discount? span.custom-accordion-icon - +material-symbol-outlined("keyboard_arrow_down") + i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down .collapse(id="overleafGroupPlansQ3") .custom-accordion-body | The educational discount for group subscriptions is for students or faculty who are using Overleaf primarily for teaching. @@ -292,7 +291,7 @@ mixin overleafGroupPlans() ) | How do I add more licenses to my group subscription, and what will it cost? span.custom-accordion-icon - +material-symbol-outlined("keyboard_arrow_down") + i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down .collapse(id="overleafGroupPlansQ4") .custom-accordion-body div @@ -341,7 +340,7 @@ mixin overleafGroupPlans() ) | How do I upgrade my plan from Group Standard to Group Professional? span.custom-accordion-icon - +material-symbol-outlined("keyboard_arrow_down") + i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down .collapse(id="overleafGroupPlansQ5") .custom-accordion-body | You can upgrade your plan from Group Standard to Group Professional on the diff --git a/services/web/app/views/subscriptions/team/invite-managed.pug b/services/web/app/views/subscriptions/team/invite-managed.pug index d31f12656b..f59b8b4937 100644 --- a/services/web/app/views/subscriptions/team/invite-managed.pug +++ b/services/web/app/views/subscriptions/team/invite-managed.pug @@ -13,7 +13,6 @@ block append meta meta(name="ol-groupSSOActive" data-type="boolean" content=groupSSOActive) meta(name="ol-subscriptionId" data-type="string" content=subscriptionId) meta(name="ol-user" data-type="json" content=user) - meta(name="ol-usersSubscription" data-type="json" content=usersSubscription) block content main.content.content-alt.team-invite#invite-managed-root diff --git a/services/web/app/views/subscriptions/team/invite.pug b/services/web/app/views/subscriptions/team/invite.pug index 1b2ecb4646..dc1b509cbf 100644 --- a/services/web/app/views/subscriptions/team/invite.pug +++ b/services/web/app/views/subscriptions/team/invite.pug @@ -4,7 +4,7 @@ block entrypointVar - entrypoint = 'pages/user/subscription/invite' block append meta - meta(name="ol-hasIndividualPaidSubscription" data-type="boolean" content=hasIndividualPaidSubscription) + meta(name="ol-hasIndividualRecurlySubscription" data-type="boolean" content=hasIndividualRecurlySubscription) meta(name="ol-inviterName" data-type="string" content=inviterName) meta(name="ol-inviteToken" data-type="string" content=inviteToken) meta(name="ol-currentManagedUserAdminEmail" data-type="string" content=currentManagedUserAdminEmail) diff --git a/services/web/app/views/subscriptions/team/invite_logged_out.pug b/services/web/app/views/subscriptions/team/invite_logged_out.pug index e5930aba4f..d07fa5368c 100644 --- a/services/web/app/views/subscriptions/team/invite_logged_out.pug +++ b/services/web/app/views/subscriptions/team/invite_logged_out.pug @@ -1,4 +1,4 @@ -extends ../../layout-marketing +extends ../../layout-react block append meta meta(name="ol-user" data-type="json" content=user) @@ -14,7 +14,7 @@ block content .card-body .page-header // TODO: Remove `team-invite-name` once we fully migrated to Bootstrap 5 - h1.text-center !{translate("invited_to_group", {inviterName: inviterName, appName: appName }, [{name: 'span', attrs: {class: 'team-invite-name'}}])} + h1.text-centered !{translate("invited_to_group", {inviterName: inviterName, appName: appName }, [{name: 'span', attrs: {class: 'team-invite-name'}}])} if (accountExists) div diff --git a/services/web/app/views/user/accountSuspended.pug b/services/web/app/views/user/accountSuspended.pug index 7231713416..da57f4d9ff 100644 --- a/services/web/app/views/user/accountSuspended.pug +++ b/services/web/app/views/user/accountSuspended.pug @@ -4,12 +4,12 @@ block vars - var suppressNavbar = true - var suppressFooter = true - metadata.robotsNoindexNofollow = true + - bootstrap5PageStatus = 'disabled' block content main.content.content-alt#main-content .container-custom-sm.mx-auto .card - .card-body - h3 #{translate('your_account_is_suspended')} - p #{translate('sorry_this_account_has_been_suspended')} - p !{translate('please_contact_us_if_you_think_this_is_in_error', {}, [{name: 'a', attrs: {href: `mailto:${settings.adminEmail}`}}])} + h3 #{translate('your_account_is_suspended')} + p #{translate('sorry_this_account_has_been_suspended')} + p !{translate('please_contact_us_if_you_think_this_is_in_error', {}, [{name: 'a', attrs: {href: `mailto:${settings.adminEmail}`}}])} diff --git a/services/web/app/views/user/confirm_email.pug b/services/web/app/views/user/confirm_email.pug index 13e911f386..37c04880b1 100644 --- a/services/web/app/views/user/confirm_email.pug +++ b/services/web/app/views/user/confirm_email.pug @@ -1,57 +1,60 @@ extends ../layout-marketing -include ../_mixins/notification + +block vars + - bootstrap5PageStatus = 'disabled' block content main.content.content-alt#main-content .container .row - .col-lg-8.offset-lg-2.col-xl-6.offset-xl-3 + .col-md-8.col-md-offset-2.col-lg-6.col-lg-offset-3 .card - .card-body - .page-header(data-ol-hide-on-error-message="confirm-email-wrong-user") - h1 #{translate("confirm_email")} - form( - method="POST" - action="/logout" - id="logoutForm" - ) - input(type="hidden", name="_csrf", value=csrfToken) - input(type="hidden", name="redirect", value=currentUrlWithQueryParams) - form( - data-ol-async-form, - data-ol-auto-submit, - name="confirmEmailForm" - action="/user/emails/confirm", - method="POST", - id="confirmEmailForm", - ) - input(type="hidden", name="_csrf", value=csrfToken) - input(type="hidden", name="token", value=token) - - div(data-ol-not-sent) - +formMessages() - div(data-ol-custom-form-message="confirm-email-wrong-user" hidden) - h1.h3 #{translate("we_cant_confirm_this_email")} - p !{translate("to_confirm_email_address_you_must_be_logged_in_with_the_requesting_account")} - p !{translate("you_are_currently_logged_in_as", {email: getUserEmail()})} - .actions - button.btn-primary.btn.w-100( - form="logoutForm" - ) #{translate('log_in_with_a_different_account')} + .page-header(data-ol-hide-on-error-message="confirm-email-wrong-user") + h1 #{translate("confirm_email")} + form( + method="POST" + action="/logout" + id="logoutForm" + ) + input(type="hidden", name="_csrf", value=csrfToken) + input(type="hidden", name="redirect", value=currentUrlWithQueryParams) + form( + data-ol-async-form, + data-ol-auto-submit, + name="confirmEmailForm" + action="/user/emails/confirm", + method="POST", + id="confirmEmailForm", + ) + input(type="hidden", name="_csrf", value=csrfToken) + input(type="hidden", name="token", value=token) + div(data-ol-not-sent) + +formMessages() + div(data-ol-custom-form-message="confirm-email-wrong-user" hidden) + h1.h3 #{translate("we_cant_confirm_this_email")} + p !{translate("to_confirm_email_address_you_must_be_logged_in_with_the_requesting_account")} + p !{translate("you_are_currently_logged_in_as", {email: getUserEmail()})} .actions - button.btn-primary.btn.w-100( - type='submit', - data-ol-disabled-inflight - data-ol-hide-on-error-message="confirm-email-wrong-user" - ) - span(data-ol-inflight="idle") - | #{translate('confirm')} - span(hidden data-ol-inflight="pending") - span(role='status').spinner-border.spinner-border-sm.mx-2 + button.btn-primary.btn.btn-block( + form="logoutForm" + ) #{translate('log_in_with_a_different_account')} - div(hidden data-ol-sent) - +notification({ariaLive: 'polite', type: 'success', className: 'mb-3', content: translate("thank_you_email_confirmed")}) - div.text-center - a.btn.btn-primary(href="/user/settings") - | #{translate('go_to_account_settings')} + .actions + button.btn-primary.btn.btn-block( + type='submit', + data-ol-disabled-inflight + data-ol-hide-on-error-message="confirm-email-wrong-user" + ) + span(data-ol-inflight="idle") + | #{translate('confirm')} + span(hidden data-ol-inflight="pending") + i.fa.fa-fw.fa-spin.fa-spinner(aria-hidden="true") + |  #{translate('confirming')}… + + div(hidden data-ol-sent) + .alert.alert-success + | #{translate('thank_you_email_confirmed')} + div.text-center + a.btn.btn-primary(href="/user/settings") + | #{translate('go_to_account_settings')} diff --git a/services/web/app/views/user/email-preferences.pug b/services/web/app/views/user/email-preferences.pug index 86ebc5f841..465ffede37 100644 --- a/services/web/app/views/user/email-preferences.pug +++ b/services/web/app/views/user/email-preferences.pug @@ -1,47 +1,49 @@ extends ../layout-marketing include ../_mixins/back_to_btns +block vars + - bootstrap5PageStatus = 'disabled' + block content main.content.content-alt#main-content .container .row - .col-lg-10.offset-lg-1.col-xl-8.offset-xl-2 + .col-md-10.col-md-offset-1.col-lg-8.col-lg-offset-2 .card - .card-body - .page-header - h1 #{translate("newsletter_info_title")} - - p #{translate("newsletter_info_summary")} - - - var submitAction - if subscribed - - submitAction = '/user/newsletter/unsubscribe' - p !{translate("newsletter_info_subscribed", {}, ['strong'])} - else - - submitAction = '/user/newsletter/subscribe' - p !{translate("newsletter_info_unsubscribed", {}, ['strong'])} - - form( - data-ol-async-form - data-ol-reload-on-success - name="newsletterForm" - action=submitAction - method="POST" - ) - input(name='_csrf', type='hidden', value=csrfToken) - +formMessages() - p.actions.text-center - if subscribed - button.btn-danger.btn(type='submit', data-ol-disabled-inflight) - span(data-ol-inflight="idle") #{translate("unsubscribe")} - span(hidden data-ol-inflight="pending") #{translate("saving")}… - else - button.btn-primary.btn(type='submit', data-ol-disabled-inflight) - span(data-ol-inflight="idle") #{translate("subscribe")} - span(hidden data-ol-inflight="pending") #{translate("saving")}… - - if subscribed - p #{translate("newsletter_info_note")} - - .page-separator - +back-to-btns() + .page-header + h1 #{translate("newsletter_info_title")} + + p #{translate("newsletter_info_summary")} + + - var submitAction + if subscribed + - submitAction = '/user/newsletter/unsubscribe' + p !{translate("newsletter_info_subscribed", {}, ['strong'])} + else + - submitAction = '/user/newsletter/subscribe' + p !{translate("newsletter_info_unsubscribed", {}, ['strong'])} + + form( + data-ol-async-form + data-ol-reload-on-success + name="newsletterForm" + action=submitAction + method="POST" + ) + input(name='_csrf', type='hidden', value=csrfToken) + +formMessages() + p.actions.text-center + if subscribed + button.btn-danger.btn(type='submit', data-ol-disabled-inflight) + span(data-ol-inflight="idle") #{translate("unsubscribe")} + span(hidden data-ol-inflight="pending") #{translate("saving")}… + else + button.btn-primary.btn(type='submit', data-ol-disabled-inflight) + span(data-ol-inflight="idle") #{translate("subscribe")} + span(hidden data-ol-inflight="pending") #{translate("saving")}… + + if subscribed + p #{translate("newsletter_info_note")} + + .page-separator + +back-to-btns() diff --git a/services/web/app/views/user/one_time_login.pug b/services/web/app/views/user/one_time_login.pug index 648f6d93c1..89e1491913 100644 --- a/services/web/app/views/user/one_time_login.pug +++ b/services/web/app/views/user/one_time_login.pug @@ -1,18 +1,20 @@ extends ../layout-marketing +block vars + - bootstrap5PageStatus = 'disabled' + block content main.content.content-alt#main-content .container .row - .col-lg-6.offset-lg-3.col-xl-4.offset-xl-4 + .col-md-6.col-md-offset-3.col-lg-4.col-lg-offset-4 .card - .card-body - .page-header - h1 We're back! - p Overleaf is now running normally. - p - | Please - | - a(href="/login") log in - | - | to continue working on your projects. + .page-header + h1 We're back! + p Overleaf is now running normally. + p + | Please + | + a(href="/login") log in + | + | to continue working on your projects. diff --git a/services/web/app/views/user/passwordReset-bs5.pug b/services/web/app/views/user/passwordReset-bs5.pug index 08e0a71b9d..7637a91062 100644 --- a/services/web/app/views/user/passwordReset-bs5.pug +++ b/services/web/app/views/user/passwordReset-bs5.pug @@ -1,11 +1,10 @@ -extends ../layout-website-redesign +extends ../layout-website-redesign-bootstrap-5 include ../_mixins/recaptcha include ../_mixins/notification block vars - var suppressNavbar = true - var suppressFooter = true - - isWebsiteRedesign = true block content - var showCaptcha = settings.recaptcha && settings.recaptcha.siteKey && !(settings.recaptcha.disabled && settings.recaptcha.disabled.passwordReset) diff --git a/services/web/app/views/user/passwordReset.pug b/services/web/app/views/user/passwordReset.pug index 4eb1adbca5..1d019b65fc 100644 --- a/services/web/app/views/user/passwordReset.pug +++ b/services/web/app/views/user/passwordReset.pug @@ -1,6 +1,5 @@ extends ../layout-marketing include ../_mixins/recaptcha -include ../_mixins/material_symbol block vars - bootstrap5PageStatus = 'disabled' @@ -49,7 +48,7 @@ block content div(data-ol-custom-form-message="no-password-allowed-due-to-sso" hidden) .notification.notification-type-error(aria-live="polite" style="margin-bottom: 10px;") .notification-icon - +material-symbol-rounded("error") + span.material-symbols.material-symbols-rounded(aria-hidden="true") error .notification-content-and-cta .notification-content p diff --git a/services/web/app/views/user/primaryEmailCheck-bs5.pug b/services/web/app/views/user/primaryEmailCheck-bs5.pug index b25136927a..0828c06e4b 100644 --- a/services/web/app/views/user/primaryEmailCheck-bs5.pug +++ b/services/web/app/views/user/primaryEmailCheck-bs5.pug @@ -1,8 +1,4 @@ -extends ../layout-website-redesign - -block vars - - bootstrap5PageStatus = 'enabled' - - isWebsiteRedesign = true +extends ../layout-website-redesign-bootstrap-5 block content main#main-content diff --git a/services/web/app/views/user/reconfirm-bs5.pug b/services/web/app/views/user/reconfirm-bs5.pug index fce9a44295..8d9d13955f 100644 --- a/services/web/app/views/user/reconfirm-bs5.pug +++ b/services/web/app/views/user/reconfirm-bs5.pug @@ -1,72 +1,69 @@ -extends ../layout-website-redesign +extends ../layout-website-redesign-bootstrap-5 include ../_mixins/recaptcha -block vars - - isWebsiteRedesign = true - block content - - var email = reconfirm_email ? reconfirm_email : "" - - var showCaptcha = settings.recaptcha && settings.recaptcha.siteKey && !(settings.recaptcha.disabled && settings.recaptcha.disabled.passwordReset) + - var email = reconfirm_email ? reconfirm_email : "" + - var showCaptcha = settings.recaptcha && settings.recaptcha.siteKey && !(settings.recaptcha.disabled && settings.recaptcha.disabled.passwordReset) - if showCaptcha - script(type="text/javascript", nonce=scriptNonce, src="https://www.recaptcha.net/recaptcha/api.js?render=explicit") - div( - id="recaptcha" - class="g-recaptcha" - data-sitekey=settings.recaptcha.siteKey - data-size="invisible" - data-badge="inline" - ) + if showCaptcha + script(type="text/javascript", nonce=scriptNonce, src="https://www.recaptcha.net/recaptcha/api.js?render=explicit") + div( + id="recaptcha" + class="g-recaptcha" + data-sitekey=settings.recaptcha.siteKey + data-size="invisible" + data-badge="inline" + ) - main#main-content(data-ol-captcha-retry-trigger-area="") - .container.auth-aux-container(style="max-width: 420px;") - form( - data-ol-async-form - name="reconfirmAccountForm" - action="/user/reconfirm" - method="POST" - aria-label=translate('request_reconfirmation_email') - captcha=(showCaptcha ? '' : false) - captcha-action-name=(showCaptcha ? "passwordReset" : false) - ) - h1.h5.mb-3 #{translate("reconfirm_account")} - p #{translate('reconfirm_explained')} - | - a(href=`mailto:${settings.adminEmail}`) #{settings.adminEmail} - | . - - div(data-ol-not-sent) - +formMessagesNewStyle() + main#main-content(data-ol-captcha-retry-trigger-area="") + .container.auth-aux-container(style="max-width: 420px;") + form( + data-ol-async-form + name="reconfirmAccountForm" + action="/user/reconfirm" + method="POST" + aria-label=translate('request_reconfirmation_email') + captcha=(showCaptcha ? '' : false) + captcha-action-name=(showCaptcha ? "passwordReset" : false) + ) + h1.h5.mb-3 #{translate("reconfirm_account")} + p #{translate('reconfirm_explained')} + | + a(href=`mailto:${settings.adminEmail}`) #{settings.adminEmail} + | . + + div(data-ol-not-sent) + +formMessagesNewStyle() - input(type="hidden" name="_csrf" value=csrfToken) - .form-group.mb-3 - label.form-label(for='email') #{translate("please_enter_email")} - input.form-control( - aria-label="email" - type='email' - name='email' - placeholder='email@example.com' - required - autofocus - value=email - ) - .actions - button.btn.btn-primary.w-100( - style="white-space: normal;" - type='submit' - data-ol-disabled-inflight - aria-label=translate('request_password_reset_to_reconfirm') - ) - span(data-ol-inflight="idle") - | #{translate('request_password_reset_to_reconfirm')} - span(hidden data-ol-inflight="pending") - | #{translate('request_password_reset_to_reconfirm')}… - div(hidden data-ol-sent) - div.alert.alert-success( - role="alert" - aria-live="polite" - ) - span #{translate('password_reset_email_sent')} + input(type="hidden" name="_csrf" value=csrfToken) + .form-group.mb-3 + label.form-label(for='email') #{translate("please_enter_email")} + input.form-control( + aria-label="email" + type='email' + name='email' + placeholder='email@example.com' + required + autofocus + value=email + ) + .actions + button.btn.btn-primary.w-100( + style="white-space: normal;" + type='submit' + data-ol-disabled-inflight + aria-label=translate('request_password_reset_to_reconfirm') + ) + span(data-ol-inflight="idle") + | #{translate('request_password_reset_to_reconfirm')} + span(hidden data-ol-inflight="pending") + | #{translate('request_password_reset_to_reconfirm')}… + div(hidden data-ol-sent) + div.alert.alert-success( + role="alert" + aria-live="polite" + ) + span #{translate('password_reset_email_sent')} - if showCaptcha - +recaptchaConditions + if showCaptcha + +recaptchaConditions diff --git a/services/web/app/views/user/reconfirm.pug b/services/web/app/views/user/reconfirm.pug index 23b77d278d..7c17423d5a 100644 --- a/services/web/app/views/user/reconfirm.pug +++ b/services/web/app/views/user/reconfirm.pug @@ -23,7 +23,7 @@ block content .row .col-sm-12.col-md-6.col-md-offset-3 .card - h1.card-header #{translate("reconfirm_account")} + h1.card-header #{translate("reconfirm")} #{translate("Account")} p #{translate('reconfirm_explained')}  a(href=`mailto:${settings.adminEmail}`) #{settings.adminEmail} | . diff --git a/services/web/app/views/user/restricted.pug b/services/web/app/views/user/restricted.pug index 0140064a99..eba1d2ab05 100644 --- a/services/web/app/views/user/restricted.pug +++ b/services/web/app/views/user/restricted.pug @@ -1,5 +1,4 @@ extends ../layout-marketing -include ../_mixins/material_symbol block content main.content#main-content @@ -7,8 +6,8 @@ block content .row .col-md-8.offset-md-2.text-center .page-header - h1 #{translate("restricted_no_permission")} + h2 #{translate("restricted_no_permission")} p - a.inline-material-symbols(href="/") - +material-symbol("arrow_left_alt") - | #{translate("take_me_home")} + span.inline-material-symbols + a(href="/").material-symbols(aria-hidden="true") arrow_left_alt + a(href="/") #{translate("take_me_home")} diff --git a/services/web/app/views/user/sessions.pug b/services/web/app/views/user/sessions.pug index ffd65a3548..187c1dae75 100644 --- a/services/web/app/views/user/sessions.pug +++ b/services/web/app/views/user/sessions.pug @@ -1,70 +1,72 @@ extends ../layout-marketing +block vars + - bootstrap5PageStatus = 'disabled' + block content main.content.content-alt#main-content .container .row - .col-lg-10.offset-lg-1.col-xl-8.offset-xl-2 + .col-md-10.col-md-offset-1.col-lg-8.col-lg-offset-2 .card.clear-user-sessions - .card-body - .page-header - h1 #{translate("your_sessions")} - - if currentSession.ip_address && currentSession.session_created - h3 #{translate("current_session")} - div + .page-header + h1 #{translate("your_sessions")} + + if currentSession.ip_address && currentSession.session_created + h3 #{translate("current_session")} + div + table.table.table-striped + thead + tr + th #{translate("ip_address")} + th #{translate("session_created_at")} + tr + td #{currentSession.ip_address} + td #{moment(currentSession.session_created).utc().format('Do MMM YYYY, h:mm a')} UTC + + h3 #{translate("other_sessions")} + div + p.small + | !{translate("clear_sessions_description")} + + form( + data-ol-async-form + action='/user/sessions/clear' + method='POST' + ) + input(name='_csrf' type='hidden' value=csrfToken) + div(data-ol-not-sent) + if sessions.length == 0 + p.text-center + | #{translate("no_other_sessions")} + + if sessions.length > 0 table.table.table-striped thead tr th #{translate("ip_address")} th #{translate("session_created_at")} + for session in sessions tr - td #{currentSession.ip_address} - td #{moment(currentSession.session_created).utc().format('Do MMM YYYY, h:mm a')} UTC - - h3 #{translate("other_sessions")} - div - p.small - | !{translate("clear_sessions_description")} - - form( - data-ol-async-form - action='/user/sessions/clear' - method='POST' - ) - input(name='_csrf' type='hidden' value=csrfToken) - div(data-ol-not-sent) - if sessions.length == 0 - p.text-center - | #{translate("no_other_sessions")} - - if sessions.length > 0 - table.table.table-striped - thead - tr - th #{translate("ip_address")} - th #{translate("session_created_at")} - for session in sessions - tr - td #{session.ip_address} - td #{moment(session.session_created).utc().format('Do MMM YYYY, h:mm a')} UTC - - p.actions - .text-center - button.btn.btn-lg.btn-primary( - type="submit" - data-ol-disable-inflight - ) - span(data-ol-inflight="idle") #{translate('clear_sessions')} - span(hidden data-ol-inflight="pending") #{translate("processing")}… - - div(hidden data-ol-sent) - p.text-center - | #{translate("no_other_sessions")} - - p.text-success.text-center - | #{translate('clear_sessions_success')} - .page-separator - .d-flex.gap-3 - a.btn.btn-secondary(href='/user/settings') #{translate('back_to_account_settings')} - a.btn.btn-secondary(href='/project') #{translate('back_to_your_projects')} + td #{session.ip_address} + td #{moment(session.session_created).utc().format('Do MMM YYYY, h:mm a')} UTC + + p.actions + .text-center + button.btn.btn-lg.btn-primary( + type="submit" + data-ol-disable-inflight + ) + span(data-ol-inflight="idle") #{translate('clear_sessions')} + span(hidden data-ol-inflight="pending") #{translate("processing")}… + + div(hidden data-ol-sent) + p.text-center + | #{translate("no_other_sessions")} + + p.text-success.text-center + | #{translate('clear_sessions_success')} + .page-separator + a.btn.btn-secondary(href='/user/settings') #{translate('back_to_account_settings')} + | + a.btn.btn-secondary(href='/project') #{translate('back_to_your_projects')} diff --git a/services/web/app/views/user/setPassword-bs5.pug b/services/web/app/views/user/setPassword-bs5.pug index 83c3a531bb..007ae5e87c 100644 --- a/services/web/app/views/user/setPassword-bs5.pug +++ b/services/web/app/views/user/setPassword-bs5.pug @@ -1,91 +1,90 @@ -extends ../layout-website-redesign +extends ../layout-website-redesign-bootstrap-5 block vars - - var suppressNavbar = true - - var suppressFooter = true - - isWebsiteRedesign = true + - var suppressNavbar = true + - var suppressFooter = true block content - main#main-content - a.auth-aux-logo(href="/") - img(src=buildImgPath("ol-brand/overleaf-o-dark.svg") alt=settings.appName) - .auth-aux-container - form( - data-ol-async-form - name="passwordResetForm" - action="/user/password/set" - method="POST" - data-ol-hide-on-error="token-expired" - ) - div( - hidden - data-ol-sent - ) - h1.h3.mb-3.mt-0 #{translate("password_updated")} - p.mb-4 #{translate("your_password_has_been_successfully_changed")}. - a.btn.btn-primary.w-100(href='/login') #{translate("log_in_now")} + main#main-content + a.auth-aux-logo(href="/") + img(src=buildImgPath("ol-brand/overleaf-o-dark.svg") alt=settings.appName) + .auth-aux-container + form( + data-ol-async-form + name="passwordResetForm" + action="/user/password/set" + method="POST" + data-ol-hide-on-error="token-expired" + ) + div( + hidden + data-ol-sent + ) + h1.h3.mb-3.mt-0 #{translate("password_updated")} + p.mb-4 #{translate("your_password_has_been_successfully_changed")}. + a.btn.btn-primary.w-100(href='/login') #{translate("log_in_now")} - div(data-ol-not-sent) - h1.h3.mb-3.mt-0 #{translate("reset_your_password")} - p(data-ol-hide-on-error-message="token-expired") #{translate("create_a_new_password_for_your_account")}. - +formMessagesNewStyle() + div(data-ol-not-sent) + h1.h3.mb-3.mt-0 #{translate("reset_your_password")} + p(data-ol-hide-on-error-message="token-expired") #{translate("create_a_new_password_for_your_account")}. + +formMessagesNewStyle() - +customFormMessageNewStyle('password-contains-email', 'danger') - | #{translate('invalid_password_contains_email')}. - | #{translate('use_a_different_password')}. + +customFormMessageNewStyle('password-contains-email', 'danger') + | #{translate('invalid_password_contains_email')}. + | #{translate('use_a_different_password')}. - +customFormMessageNewStyle('password-too-similar', 'danger') - | #{translate('invalid_password_too_similar')}. - | #{translate('use_a_different_password')}. + +customFormMessageNewStyle('password-too-similar', 'danger') + | #{translate('invalid_password_too_similar')}. + | #{translate('use_a_different_password')}. - +customFormMessageNewStyle('token-expired', 'danger') - | #{translate('password_reset_token_expired')} - br - a(href="/user/password/reset") - | #{translate('request_new_password_reset_email')} + +customFormMessageNewStyle('token-expired', 'danger') + | #{translate('password_reset_token_expired')} + br + a(href="/user/password/reset") + | #{translate('request_new_password_reset_email')} - input(type="hidden" name="_csrf" value=csrfToken) - input(type="text" hidden name="email" autocomplete="username" value=email) + input(type="hidden" name="_csrf" value=csrfToken) + input(type="text" hidden name="email" autocomplete="username" value=email) - .form-group.mb-3 - label.form-label(for='passwordField', data-ol-hide-on-error-message="token-expired") #{translate("new_password")} - input.form-control.auth-aux-new-password#passwordField( - type='password' - name='password' - autocomplete="new-password" - autofocus - required - minlength=settings.passwordStrengthOptions.length.min - ) + .form-group.mb-3 + label.form-label(for='passwordField', data-ol-hide-on-error-message="token-expired") #{translate("new_password")} + input.form-control.auth-aux-new-password#passwordField( + type='password' + name='password' + autocomplete="new-password" + autofocus + required + minlength=settings.passwordStrengthOptions.length.min + ) - +customValidationMessageNewStyle('invalid-password') - | #{translate('invalid_password')}. + +customValidationMessageNewStyle('invalid-password') + | #{translate('invalid_password')}. - +customValidationMessageNewStyle('password-must-be-different') - | #{translate('password_cant_be_the_same_as_current_one')}. + +customValidationMessageNewStyle('password-must-be-different') + | #{translate('password_cant_be_the_same_as_current_one')}. - +customValidationMessageNewStyle('password-must-be-strong') - | !{translate('password_was_detected_on_a_public_list_of_known_compromised_passwords', {}, [{name: 'a', attrs: {href: 'https://haveibeenpwned.com/passwords', rel: 'noopener noreferrer', target: '_blank'}}])}. - | #{translate('use_a_different_password')}. + +customValidationMessageNewStyle('password-must-be-strong') + | !{translate('password_was_detected_on_a_public_list_of_known_compromised_passwords', {}, [{name: 'a', attrs: {href: 'https://haveibeenpwned.com/passwords', rel: 'noopener noreferrer', target: '_blank'}}])}. + | #{translate('use_a_different_password')}. - input( - type="hidden" - name="passwordResetToken" - value=passwordResetToken - ) - div(data-ol-hide-on-error-message="token-expired") - div #{translate('in_order_to_have_a_secure_account_make_sure_your_password')} - ul.mb-3.ps-4 - li #{translate('is_longer_than_n_characters', {n: settings.passwordStrengthOptions.length.min})} - li #{translate('does_not_contain_or_significantly_match_your_email')} - li #{translate('is_not_used_on_any_other_website')} - .actions - button.btn.btn-primary.w-100( - type='submit' - data-ol-disabled-inflight - aria-label=translate('set_new_password') - ) - span(data-ol-inflight="idle") - | #{translate('set_new_password')} - span(hidden data-ol-inflight="pending") - | #{translate('set_new_password')}… + input( + type="hidden" + name="passwordResetToken" + value=passwordResetToken + ) + div(data-ol-hide-on-error-message="token-expired") + div #{translate('in_order_to_have_a_secure_account_make_sure_your_password')} + ul.mb-3.ps-4 + li #{translate('is_longer_than_n_characters', {n: settings.passwordStrengthOptions.length.min})} + li #{translate('does_not_contain_or_significantly_match_your_email')} + li #{translate('is_not_used_on_any_other_website')} + .actions + button.btn.btn-primary.w-100( + type='submit' + data-ol-disabled-inflight + aria-label=translate('set_new_password') + ) + span(data-ol-inflight="idle") + | #{translate('set_new_password')} + span(hidden data-ol-inflight="pending") + | #{translate('set_new_password')}… diff --git a/services/web/app/views/user/settings.pug b/services/web/app/views/user/settings.pug index 4ac35bef71..4f939a41ca 100644 --- a/services/web/app/views/user/settings.pug +++ b/services/web/app/views/user/settings.pug @@ -32,7 +32,6 @@ block append meta meta(name="ol-gitBridgeEnabled" data-type="boolean" content=gitBridgeEnabled) meta(name="ol-isSaas" data-type="boolean" content=isSaas) meta(name="ol-memberOfSSOEnabledGroups" data-type="json" content=memberOfSSOEnabledGroups) - meta(name="ol-capabilities" data-type="json" content=capabilities) block content main.content.content-alt#main-content diff --git a/services/web/app/views/user_membership/group-members-react.pug b/services/web/app/views/user_membership/group-members-react.pug index 05327c4b6d..5e8971172d 100644 --- a/services/web/app/views/user_membership/group-members-react.pug +++ b/services/web/app/views/user_membership/group-members-react.pug @@ -10,7 +10,6 @@ block append meta meta(name="ol-groupName", data-type="string", content=name) meta(name="ol-groupSize", data-type="json", content=groupSize) meta(name="ol-managedUsersActive", data-type="boolean", content=managedUsersActive) - meta(name="ol-isUserGroupManager", data-type="boolean", content=isUserGroupManager) meta(name="ol-groupSSOActive", data-type="boolean", content=groupSSOActive) meta(name="ol-canUseFlexibleLicensing", data-type="boolean", content=canUseFlexibleLicensing) meta(name="ol-canUseAddSeatsFeature", data-type="boolean", content=canUseAddSeatsFeature) diff --git a/services/web/config/settings.defaults.js b/services/web/config/settings.defaults.js index 16a330b686..acfff367f6 100644 --- a/services/web/config/settings.defaults.js +++ b/services/web/config/settings.defaults.js @@ -896,7 +896,6 @@ module.exports = { 'figcaption', 'span', 'source', - 'track', 'video', 'del', ], @@ -922,7 +921,7 @@ module.exports = { col: ['width'], figure: ['class', 'id', 'style'], figcaption: ['class', 'id', 'style'], - i: ['aria-hidden', 'aria-label', 'class', 'id', 'translate'], + i: ['aria-hidden', 'aria-label', 'class', 'id'], iframe: [ 'allowfullscreen', 'frameborder', @@ -947,7 +946,6 @@ module.exports = { 'style', ], tr: ['class'], - track: ['src', 'kind', 'srcLang', 'label'], video: ['alt', 'class', 'controls', 'height', 'width'], }, }, @@ -971,7 +969,6 @@ module.exports = { editorToolbarButtons: [], sourceEditorExtensions: [], sourceEditorComponents: [], - pdfLogEntryHeaderActionComponents: [], pdfLogEntryComponents: [], pdfLogEntriesComponents: [], pdfPreviewPromotions: [], @@ -1002,10 +999,8 @@ module.exports = { toastGenerators: [], editorSidebarComponents: [], fileTreeToolbarComponents: [], - fullProjectSearchPanel: [], integrationPanelComponents: [], referenceSearchSetting: [], - errorLogsComponents: [], }, moduleImportSequence: [ diff --git a/services/web/docker-compose.ci.yml b/services/web/docker-compose.ci.yml index 33b5a3ca2e..5cffe19810 100644 --- a/services/web/docker-compose.ci.yml +++ b/services/web/docker-compose.ci.yml @@ -13,9 +13,6 @@ services: logging: driver: local user: node - volumes: - - ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it - entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 -- command: npm run test:unit:app working_dir: /overleaf/services/web env_file: docker-compose.common.env @@ -42,9 +39,6 @@ services: OVERLEAF_CONFIG: extra_hosts: - 'www.overleaf.test:127.0.0.1' - volumes: - - ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it - entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 -- command: npm run test:acceptance:app user: root depends_on: @@ -92,7 +86,7 @@ services: user: root redis: - image: redis:7.4.3 + image: redis mongo: image: mongo:7.0.20 diff --git a/services/web/docker-compose.yml b/services/web/docker-compose.yml index 10e0a7842c..5314e94ed3 100644 --- a/services/web/docker-compose.yml +++ b/services/web/docker-compose.yml @@ -11,7 +11,6 @@ services: - .:/overleaf/services/web - ../../node_modules:/overleaf/node_modules - ../../libraries:/overleaf/libraries - - ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it working_dir: /overleaf/services/web env_file: docker-compose.common.env environment: @@ -21,7 +20,6 @@ services: LOG_LEVEL: ${LOG_LEVEL:-} NODE_ENV: test NODE_OPTIONS: "--unhandled-rejections=strict" - entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 -- command: npm run --silent test:unit:app user: node depends_on: @@ -33,7 +31,6 @@ services: - .:/overleaf/services/web - ../../node_modules:/overleaf/node_modules - ../../libraries:/overleaf/libraries - - ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it user: node working_dir: /overleaf/services/web env_file: docker-compose.common.env @@ -53,7 +50,6 @@ services: - mongo - saml - ldap - entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 -- command: npm run --silent test:acceptance:app test_frontend: @@ -88,7 +84,7 @@ services: - "cypress:run-ct" redis: - image: redis:7.4.3 + image: redis mongo: image: mongo:7.0.20 diff --git a/services/web/frontend/extracted-translations.json b/services/web/frontend/extracted-translations.json index 4e36bb6197..f780667d8e 100644 --- a/services/web/frontend/extracted-translations.json +++ b/services/web/frontend/extracted-translations.json @@ -1,9 +1,7 @@ { - "0_free_suggestions": "", + "12x_more_compile_time": "", "1_2_width": "", "1_4_width": "", - "1_free_suggestion": "", - "24x_more_compile_time": "", "3_4_width": "", "About": "", "Account": "", @@ -32,7 +30,6 @@ "about_to_enable_managed_users": "", "about_to_leave_project": "", "about_to_leave_projects": "", - "about_to_remove_user_preamble": "", "about_to_trash_projects": "", "abstract": "", "accept_and_continue": "", @@ -110,7 +107,6 @@ "agree_with_the_terms": "", "ai_assist_in_overleaf_is_included_via_writefull_groups": "", "ai_assist_in_overleaf_is_included_via_writefull_individual": "", - "ai_assist_unavailable_due_to_subscription_type": "", "ai_assistance_to_help_you": "", "ai_based_language_tools": "", "ai_can_make_mistakes": "", @@ -136,7 +132,6 @@ "an_email_has_already_been_sent_to": "", "an_error_occured_while_restoring_project": "", "an_error_occurred_when_verifying_the_coupon_code": "", - "and_upgrade_for_compile_time": "", "annual_discount": "", "anonymous": "", "anyone_with_link_can_edit": "", @@ -194,7 +189,6 @@ "blog": "", "bold": "", "booktabs": "", - "breadcrumbs": "", "browser": "", "bullet_list": "", "buy_licenses": "", @@ -206,8 +200,6 @@ "can_view_content": "", "cancel": "", "cancel_add_on": "", - "cancel_any_existing_subscriptions": "", - "cancel_any_existing_subscriptions_and_leave_any_group_subscriptions": "", "cancel_anytime": "", "cancel_my_account": "", "cancel_my_subscription": "", @@ -299,8 +291,6 @@ "compile_error_entry_description": "", "compile_error_handling": "", "compile_larger_projects": "", - "compile_limit_reached": "", - "compile_limit_upgrade_prompt": "", "compile_mode": "", "compile_terminated_by_user": "", "compiler": "", @@ -319,7 +309,6 @@ "confirm_reject_selected_changes": "", "confirm_reject_selected_changes_plural": "", "confirm_remove_sso_config_enter_email": "", - "confirm_remove_user_type_email_address": "", "confirm_secondary_email": "", "confirm_your_email": "", "confirming": "", @@ -428,6 +417,7 @@ "discount": "", "discount_of": "", "discover_the_fastest_way_to_search_and_cite": "", + "dismiss_error_popup": "", "display": "", "display_deleted_user": "", "display_math": "", @@ -547,7 +537,6 @@ "error": "", "error_assist": "", "error_log": "", - "error_logs_have_had_an_update": "", "error_opening_document": "", "error_opening_document_detail": "", "error_performing_request": "", @@ -641,7 +630,6 @@ "generic_if_problem_continues_contact_us": "", "generic_linked_file_compile_error": "", "generic_something_went_wrong": "", - "get_ai_assist": "", "get_collaborative_benefits": "", "get_discounted_plan": "", "get_error_assist": "", @@ -698,8 +686,6 @@ "go_next_page": "", "go_page": "", "go_prev_page": "", - "go_to_account_settings": "", - "go_to_code_location": "", "go_to_code_location_in_pdf": "", "go_to_overleaf": "", "go_to_pdf_location_in_code": "", @@ -857,7 +843,6 @@ "integrations": "", "integrations_like_github": "", "interested_in_cheaper_personal_plan": "", - "introducing_shorter_compile_timeout": "", "invalid_confirmation_code": "", "invalid_email": "", "invalid_file_name": "", @@ -994,7 +979,6 @@ "login_count": "", "login_to_accept_invitation": "", "login_with_service": "", - "logs": "", "logs_and_output_files": "", "looking_multiple_licenses": "", "looks_like_youre_at": "", @@ -1065,7 +1049,6 @@ "more_compile_time": "", "more_editor_toolbar_item": "", "more_info": "", - "more_logs_and_files": "", "more_options": "", "my_library": "", "n_items": "", @@ -1089,7 +1072,6 @@ "neither_agree_nor_disagree": "", "new_compile_domain_notice": "", "new_create_tables_and_equations": "", - "new_error_logs_panel": "", "new_file": "", "new_folder": "", "new_font_open_dyslexic": "", @@ -1159,11 +1141,10 @@ "on_free_plan_upgrade_to_access_features": "", "one_step_away_from_professional_features": "", "only_group_admin_or_managers_can_delete_your_account_1": "", + "only_group_admin_or_managers_can_delete_your_account_2": "", "only_group_admin_or_managers_can_delete_your_account_3": "", - "only_group_admin_or_managers_can_delete_your_account_6": "", - "only_group_admin_or_managers_can_delete_your_account_7": "", - "only_group_admin_or_managers_can_delete_your_account_8": "", - "only_group_admin_or_managers_can_delete_your_account_9": "", + "only_group_admin_or_managers_can_delete_your_account_4": "", + "only_group_admin_or_managers_can_delete_your_account_5": "", "only_importer_can_refresh": "", "open_action_menu": "", "open_advanced_reference_search": "", @@ -1179,6 +1160,7 @@ "organization_name": "", "organize_tags": "", "other": "", + "other_causes_of_compile_timeouts": "", "other_logs_and_files": "", "other_output_files": "", "our_team_will_get_back_to_you_shortly": "", @@ -1326,7 +1308,6 @@ "project_ownership_transfer_confirmation_2": "", "project_renamed_or_deleted": "", "project_renamed_or_deleted_detail": "", - "project_search": "", "project_search_file_count": "", "project_search_file_count_plural": "", "project_search_result_count": "", @@ -1362,8 +1343,6 @@ "reactivate_subscription": "", "read_lines_from_path": "", "read_more": "", - "read_more_about_compile_timeout_changes": "", - "read_more_about_fix_prevent_timeout": "", "read_more_about_free_compile_timeouts_servers": "", "read_only_dropbox_sync_message": "", "read_only_token": "", @@ -1419,7 +1398,6 @@ "remove_secondary_email_addresses": "", "remove_sso_login_option": "", "remove_tag": "", - "remove_user": "", "removed_from_project": "", "removing": "", "rename": "", @@ -1440,6 +1418,7 @@ "resend": "", "resend_confirmation_code": "", "resend_confirmation_email": "", + "resend_email": "", "resend_group_invite": "", "resend_link_sso": "", "resend_managed_user_invite": "", @@ -1519,7 +1498,6 @@ "search_whole_word": "", "search_within_selection": "", "searched_path_for_lines_containing": "", - "searching_all_project_files_is_now_available": "", "security": "", "see_suggestions_from_collaborators": "", "select_a_column_or_a_merged_cell_to_align": "", @@ -1561,6 +1539,7 @@ "send_message": "", "send_request": "", "sending": "", + "sent": "", "server_error": "", "server_pro_license_entitlement_line_1": "", "server_pro_license_entitlement_line_2": "", @@ -1581,8 +1560,6 @@ "sharelatex_beta_program": "", "shortcut_to_open_advanced_reference_search": "", "show_all_projects": "", - "show_breadcrumbs": "", - "show_breadcrumbs_in_toolbar": "", "show_document_preamble": "", "show_equation_preview": "", "show_file_tree": "", @@ -1678,7 +1655,6 @@ "start_a_free_trial": "", "start_by_adding_your_email": "", "start_by_fixing_the_first_error_in_your_doc": "", - "start_by_fixing_the_first_error_in_your_document": "", "start_free_trial": "", "start_free_trial_without_exclamation": "", "start_the_conversation_by_saying_hello_or_sharing_an_update": "", @@ -1713,7 +1689,6 @@ "suggest_a_different_fix": "", "suggest_fix": "", "suggested": "", - "suggested_code": "", "suggested_fix_for_error_in_path": "", "suggestion_applied": "", "suggests_code_completions_while_typing": "", @@ -1796,12 +1771,6 @@ "there_is_an_unrecoverable_latex_error": "", "there_was_a_problem_restoring_the_project_please_try_again_in_a_few_moments_or_contact_us": "", "they_lose_access_to_account": "", - "they_will_be_removed_from_the_group": "", - "they_will_continue_to_have_access_to_any_projects_shared_with_them": "", - "they_will_no_longer_be_a_managed_user": "", - "they_will_retain_ownership_of_projects_currently_owned_by_them_and_collaborators_will_become_read_only": "", - "they_will_retain_their_existing_account_on_the_free_plan": "", - "they_wont_be_able_to_log_in_with_sso_they_will_need_to_set_password": "", "this_action_cannot_be_reversed": "", "this_action_cannot_be_undone": "", "this_address_will_be_shown_on_the_invoice": "", @@ -1813,7 +1782,6 @@ "this_is_a_new_feature": "", "this_is_the_file_that_references_pulled_from_your_reference_manager_will_be_added_to": "", "this_project_already_has_maximum_collaborators": "", - "this_project_compiled_but_soon_might_not": "", "this_project_contains_a_file_called_output": "", "this_project_exceeded_collaborator_limit": "", "this_project_exceeded_compile_timeout_limit_on_free_plan": "", @@ -1998,7 +1966,7 @@ "updating": "", "upgrade": "", "upgrade_cc_btn": "", - "upgrade_for_more_compile_time": "", + "upgrade_for_12x_more_compile_time": "", "upgrade_my_plan": "", "upgrade_now": "", "upgrade_plan": "", @@ -2029,7 +1997,6 @@ "user_deletion_error": "", "user_deletion_password_reset_tip": "", "user_first_name_attribute": "", - "user_has_left_organization_and_need_to_transfer_their_projects": "", "user_last_name_attribute": "", "user_sessions": "", "using_latex": "", @@ -2096,8 +2063,8 @@ "were_making_some_changes_to_project_sharing_this_means_you_will_be_visible": "", "were_performing_maintenance": "", "were_redesigning_our_editor_to_make_it_easier_to_use": "", - "were_reducing_compile_timeout": "", - "what_did_you_find_most_helpful": "", + "weve_recently_reduced_the_compile_timeout_limit_which_may_have_affected_this_project": "", + "weve_recently_reduced_the_compile_timeout_limit_which_may_have_affected_your_project": "", "what_do_you_need_help_with": "", "what_does_this_mean": "", "what_does_this_mean_for_you": "", @@ -2169,7 +2136,6 @@ "you_have_been_removed_from_this_project_and_will_be_redirected_to_project_dashboard": "", "you_have_x_licenses_and_your_plan_supports_up_to_y": "", "you_have_x_licenses_on_your_subscription": "", - "you_may_be_able_to_fix_issues_to_speed_up_the_compile": "", "you_need_to_configure_your_sso_settings": "", "you_unpaused_your_subscription": "", "you_will_be_able_to_reassign_subscription": "", @@ -2204,7 +2170,6 @@ "your_plan_is_limited_to_n_editors": "", "your_plan_is_limited_to_n_editors_plural": "", "your_premium_plan_is_paused": "", - "your_project_compiled_but_soon_might_not": "", "your_project_exceeded_collaborator_limit": "", "your_project_exceeded_compile_timeout_limit_on_free_plan": "", "your_project_near_compile_timeout_limit": "", diff --git a/services/web/frontend/fonts/material-symbols/MaterialSymbolsRoundedUnfilledPartialSlice.woff2 b/services/web/frontend/fonts/material-symbols/MaterialSymbolsRoundedUnfilledPartialSlice.woff2 index a507329c8e..df942df176 100644 Binary files a/services/web/frontend/fonts/material-symbols/MaterialSymbolsRoundedUnfilledPartialSlice.woff2 and b/services/web/frontend/fonts/material-symbols/MaterialSymbolsRoundedUnfilledPartialSlice.woff2 differ diff --git a/services/web/frontend/fonts/material-symbols/unfilled-symbols.mjs b/services/web/frontend/fonts/material-symbols/unfilled-symbols.mjs index 222be1fd36..baefac05aa 100644 --- a/services/web/frontend/fonts/material-symbols/unfilled-symbols.mjs +++ b/services/web/frontend/fonts/material-symbols/unfilled-symbols.mjs @@ -4,32 +4,26 @@ // You may need to hard reload your browser window to see the changes. export default /** @type {const} */ ([ - 'auto_delete', 'book_5', 'brush', 'code', - 'content_copy', 'create_new_folder', 'delete', 'description', - 'error', 'experiment', 'forum', 'help', 'image', 'info', 'integration_instructions', - 'lightbulb', 'note_add', 'picture_as_pdf', 'rate_review', 'report', - 'search', 'settings', 'space_dashboard', 'table_chart', - 'thumb_down', - 'thumb_up', 'upload_file', 'web_asset', + 'error', ]) diff --git a/services/web/frontend/js/features/chat/context/chat-context.tsx b/services/web/frontend/js/features/chat/context/chat-context.tsx index 2ba0ff5f5d..9feca60579 100644 --- a/services/web/frontend/js/features/chat/context/chat-context.tsx +++ b/services/web/frontend/js/features/chat/context/chat-context.tsx @@ -193,7 +193,7 @@ export const ChatContext = createContext< >(undefined) export const ChatProvider: FC = ({ children }) => { - const chatEnabled = getMeta('ol-capabilities')?.includes('chat') + const chatEnabled = getMeta('ol-chatEnabled') const clientId = useRef() if (clientId.current === undefined) { diff --git a/services/web/frontend/js/features/contact-form/index.js b/services/web/frontend/js/features/contact-form/index.js index 51aff806e3..0b4a4898aa 100644 --- a/services/web/frontend/js/features/contact-form/index.js +++ b/services/web/frontend/js/features/contact-form/index.js @@ -23,7 +23,7 @@ document }) document.querySelectorAll('[data-ol-contact-form]').forEach(el => { - el.addEventListener('submit', function () { + el.addEventListener('submit', function (e) { const emailValue = document.querySelector( '[data-ol-contact-form-email-input]' ).value diff --git a/services/web/frontend/js/features/contact-form/search.js b/services/web/frontend/js/features/contact-form/search.js index 1787a068be..10e2ab2f63 100644 --- a/services/web/frontend/js/features/contact-form/search.js +++ b/services/web/frontend/js/features/contact-form/search.js @@ -47,9 +47,8 @@ export function setupSearch(formEl) { const iconEl = document.createElement('i') iconEl.className = 'material-symbols dropdown-item-trailing-icon' - iconEl.textContent = 'open_in_new' + iconEl.innerText = 'open_in_new' iconEl.setAttribute('aria-hidden', 'true') - iconEl.translate = false linkEl.append(iconEl) resultsEl.append(liEl) diff --git a/services/web/frontend/js/features/editor-left-menu/context/project-settings-context.tsx b/services/web/frontend/js/features/editor-left-menu/context/project-settings-context.tsx index e5cd576ba1..e40c4c6872 100644 --- a/services/web/frontend/js/features/editor-left-menu/context/project-settings-context.tsx +++ b/services/web/frontend/js/features/editor-left-menu/context/project-settings-context.tsx @@ -27,7 +27,6 @@ type ProjectSettingsSetterContextValue = { setLineHeight: (lineHeight: UserSettings['lineHeight']) => void setPdfViewer: (pdfViewer: UserSettings['pdfViewer']) => void setMathPreview: (mathPreview: UserSettings['mathPreview']) => void - setBreadcrumbs: (breadcrumbs: UserSettings['breadcrumbs']) => void } type ProjectSettingsContextValue = Partial & @@ -75,8 +74,6 @@ export const ProjectSettingsProvider: FC = ({ setPdfViewer, mathPreview, setMathPreview, - breadcrumbs, - setBreadcrumbs, } = useUserWideSettings() useProjectWideSettingsSocketListener() @@ -113,8 +110,6 @@ export const ProjectSettingsProvider: FC = ({ setPdfViewer, mathPreview, setMathPreview, - breadcrumbs, - setBreadcrumbs, }), [ compiler, @@ -147,8 +142,6 @@ export const ProjectSettingsProvider: FC = ({ setPdfViewer, mathPreview, setMathPreview, - breadcrumbs, - setBreadcrumbs, ] ) diff --git a/services/web/frontend/js/features/editor-left-menu/hooks/use-root-doc-id.tsx b/services/web/frontend/js/features/editor-left-menu/hooks/use-root-doc-id.tsx index 8a704f87ac..ca2e85841f 100644 --- a/services/web/frontend/js/features/editor-left-menu/hooks/use-root-doc-id.tsx +++ b/services/web/frontend/js/features/editor-left-menu/hooks/use-root-doc-id.tsx @@ -6,7 +6,7 @@ import useSaveProjectSettings from './use-save-project-settings' export default function useRootDocId() { const [rootDocId] = - useScopeValue('project.rootDocId') + useScopeValue('project.rootDoc_id') const { permissionsLevel } = useEditorContext() const saveProjectSettings = useSaveProjectSettings() diff --git a/services/web/frontend/js/features/editor-left-menu/hooks/use-save-project-settings.tsx b/services/web/frontend/js/features/editor-left-menu/hooks/use-save-project-settings.tsx index f34c506708..07a20a10fa 100644 --- a/services/web/frontend/js/features/editor-left-menu/hooks/use-save-project-settings.tsx +++ b/services/web/frontend/js/features/editor-left-menu/hooks/use-save-project-settings.tsx @@ -19,7 +19,13 @@ export default function useSaveProjectSettings() { await saveProjectSettings(projectId, { [key]: newSetting, }) - setProjectSettings({ ...projectSettings, [key]: newSetting }) + + // rootDocId is used in our tsx and our endpoint, but rootDoc_id is used in our project $scope, etc + // as we use both namings in many files, and convert back and forth, + // its complicated to seperate and choose one name for all usages + // todo: make rootDocId or rootDoc_id consistent, and remove need for this/ other conversions + const settingsKey = key === 'rootDocId' ? 'rootDoc_id' : key + setProjectSettings({ ...projectSettings, [settingsKey]: newSetting }) } } } diff --git a/services/web/frontend/js/features/editor-left-menu/hooks/use-user-wide-settings.tsx b/services/web/frontend/js/features/editor-left-menu/hooks/use-user-wide-settings.tsx index 978148721a..70202c9446 100644 --- a/services/web/frontend/js/features/editor-left-menu/hooks/use-user-wide-settings.tsx +++ b/services/web/frontend/js/features/editor-left-menu/hooks/use-user-wide-settings.tsx @@ -20,7 +20,6 @@ export default function useUserWideSettings() { lineHeight, pdfViewer, mathPreview, - breadcrumbs, } = userSettings const setOverallTheme = useSetOverallTheme() @@ -94,13 +93,6 @@ export default function useUserWideSettings() { [saveUserSettings] ) - const setBreadcrumbs = useCallback( - (breadcrumbs: UserSettings['breadcrumbs']) => { - saveUserSettings('breadcrumbs', breadcrumbs) - }, - [saveUserSettings] - ) - return { autoComplete, setAutoComplete, @@ -124,7 +116,5 @@ export default function useUserWideSettings() { setPdfViewer, mathPreview, setMathPreview, - breadcrumbs, - setBreadcrumbs, } } diff --git a/services/web/frontend/js/features/editor-navigation-toolbar/components/toolbar-header.tsx b/services/web/frontend/js/features/editor-navigation-toolbar/components/toolbar-header.tsx index 87bcbc0aac..4304768c48 100644 --- a/services/web/frontend/js/features/editor-navigation-toolbar/components/toolbar-header.tsx +++ b/services/web/frontend/js/features/editor-navigation-toolbar/components/toolbar-header.tsx @@ -80,7 +80,7 @@ const ToolbarHeader = React.memo(function ToolbarHeader({ openShareModal: () => void trackChangesVisible: boolean | undefined }) { - const chatEnabled = getMeta('ol-capabilities')?.includes('chat') + const chatEnabled = getMeta('ol-chatEnabled') const { t } = useTranslation() const shouldDisplayPublishButton = hasPublishPermissions && PublishButton diff --git a/services/web/frontend/js/features/event-tracking/search-events.ts b/services/web/frontend/js/features/event-tracking/search-events.ts index 630d07aeaa..cd9ff4b8ba 100644 --- a/services/web/frontend/js/features/event-tracking/search-events.ts +++ b/services/web/frontend/js/features/event-tracking/search-events.ts @@ -6,7 +6,7 @@ type SearchEventSegmentation = { searchType: 'full-project' } & ( | { method: 'keyboard' } - | { method: 'button'; location: 'toolbar' | 'search-form' | 'rail' } + | { method: 'button'; location: 'toolbar' | 'search-form' } )) | ({ searchType: 'document' diff --git a/services/web/frontend/js/features/file-tree/components/file-tree-create/modes/file-tree-upload-doc.tsx b/services/web/frontend/js/features/file-tree/components/file-tree-create/modes/file-tree-upload-doc.tsx index 909e1a1962..2ffd591032 100644 --- a/services/web/frontend/js/features/file-tree/components/file-tree-create/modes/file-tree-upload-doc.tsx +++ b/services/web/frontend/js/features/file-tree/components/file-tree-create/modes/file-tree-upload-doc.tsx @@ -176,6 +176,7 @@ export default function FileTreeUploadDoc() { // close the modal when all the uploads completed successfully .on('complete', result => { if (!result.failed.length) { + // $scope.$emit('done', { name: name }) cancel() } }) diff --git a/services/web/frontend/js/features/form-helpers/create-icon.js b/services/web/frontend/js/features/form-helpers/create-icon.js index 13b2a04bf3..fc26724bee 100644 --- a/services/web/frontend/js/features/form-helpers/create-icon.js +++ b/services/web/frontend/js/features/form-helpers/create-icon.js @@ -2,7 +2,6 @@ export default function createIcon(type) { const icon = document.createElement('span') icon.className = 'material-symbols' icon.setAttribute('aria-hidden', 'true') - icon.setAttribute('translate', 'no') icon.textContent = type return icon } diff --git a/services/web/frontend/js/features/group-management/components/members-table/dropdown-button.tsx b/services/web/frontend/js/features/group-management/components/members-table/dropdown-button.tsx index 9e7038363a..bd3b5ee10e 100644 --- a/services/web/frontend/js/features/group-management/components/members-table/dropdown-button.tsx +++ b/services/web/frontend/js/features/group-management/components/members-table/dropdown-button.tsx @@ -20,7 +20,6 @@ import getMeta from '@/utils/meta' import MaterialIcon from '@/shared/components/material-icon' import DropdownListItem from '@/features/ui/components/bootstrap-5/dropdown-list-item' import { Spinner } from 'react-bootstrap' -import { sendMB } from '@/infrastructure/event-tracking' type resendInviteResponse = { success: boolean @@ -29,7 +28,6 @@ type resendInviteResponse = { type ManagedUserDropdownButtonProps = { user: User openOffboardingModalForUser: (user: User) => void - openRemoveModalForUser: (user: User) => void openUnlinkUserModal: (user: User) => void groupId: string setGroupUserAlert: Dispatch> @@ -38,7 +36,6 @@ type ManagedUserDropdownButtonProps = { export default function DropdownButton({ user, openOffboardingModalForUser, - openRemoveModalForUser, openUnlinkUserModal, groupId, setGroupUserAlert, @@ -60,8 +57,7 @@ export default function DropdownButton({ const managedUsersActive = getMeta('ol-managedUsersActive') const groupSSOActive = getMeta('ol-groupSSOActive') - const userId = getMeta('ol-user_id') - const isUserGroupManager = getMeta('ol-isUserGroupManager') + const userPending = user.invite const isGroupSSOLinked = !userPending && user.enrollment?.sso?.some(sso => sso.groupId === groupId) @@ -173,15 +169,9 @@ export default function DropdownButton({ } const onDeleteUserClick = () => { - sendMB('delete-managed-user-selected') openOffboardingModalForUser(user) } - const onReleaseUserClick = () => { - sendMB('remove-managed-user-selected') - openRemoveModalForUser(user) - } - const onRemoveFromGroup = () => { removeMember(user) } @@ -239,13 +229,10 @@ export default function DropdownButton({ ) } - if ( - isUserManaged && - !user.isEntityAdmin && - (!isUserGroupManager || userId !== user._id) - ) { + if (isUserManaged && !user.isEntityAdmin) { buttons.push( ) - buttons.push( - - {t('remove_user')} - - ) } else if (!isUserManaged) { buttons.push( {t('remove_from_group')} @@ -277,7 +256,7 @@ export default function DropdownButton({ if (buttons.length === 0) { buttons.push( - + void - openRemoveModalForUser: (user: User) => void openUnlinkUserModal: (user: User) => void groupId: string setGroupUserAlert: Dispatch> @@ -25,7 +24,6 @@ type ManagedUserRowProps = { export default function MemberRow({ user, openOffboardingModalForUser, - openRemoveModalForUser, openUnlinkUserModal, setGroupUserAlert, groupId, @@ -114,7 +112,6 @@ export default function MemberRow({ ( undefined ) - const [userToRemove, setUserToRemove] = useState(undefined) const [groupUserAlert, setGroupUserAlert] = useState(undefined) const [userToUnlink, setUserToUnlink] = useState(undefined) @@ -103,7 +101,6 @@ export default function MembersList({ groupId }: ManagedUsersListProps) { key={user.email} user={user} openOffboardingModalForUser={setUserToOffboard} - openRemoveModalForUser={setUserToRemove} openUnlinkUserModal={setUserToUnlink} setGroupUserAlert={setGroupUserAlert} groupId={groupId} @@ -119,13 +116,6 @@ export default function MembersList({ groupId }: ManagedUsersListProps) { onClose={() => setUserToOffboard(undefined)} /> )} - {userToRemove && ( - setUserToRemove(undefined)} - /> - )} {userToUnlink && ( ) => { + const handleDeleteUserSubmit = (event: any) => { event.preventDefault() - sendMB('delete-managed-user-confirmed') runAsync( postJSON(`/manage/groups/${groupId}/offboardManagedUser/${user._id}`, { body: { diff --git a/services/web/frontend/js/features/group-management/components/members-table/remove-managed-user-modal.tsx b/services/web/frontend/js/features/group-management/components/members-table/remove-managed-user-modal.tsx deleted file mode 100644 index c3c6f8caa4..0000000000 --- a/services/web/frontend/js/features/group-management/components/members-table/remove-managed-user-modal.tsx +++ /dev/null @@ -1,138 +0,0 @@ -import { User } from '../../../../../../types/group-management/user' -import { useState } from 'react' -import useAsync from '@/shared/hooks/use-async' -import { useTranslation, Trans } from 'react-i18next' -import { useLocation } from '@/shared/hooks/use-location' -import { FetchError, postJSON } from '@/infrastructure/fetch-json' -import { debugConsole } from '@/utils/debugging' -import OLModal, { - OLModalBody, - OLModalFooter, - OLModalHeader, - OLModalTitle, -} from '@/features/ui/components/ol/ol-modal' -import OLFormGroup from '@/features/ui/components/ol/ol-form-group' -import OLButton from '@/features/ui/components/ol/ol-button' -import OLNotification from '@/features/ui/components/ol/ol-notification' -import OLFormControl from '@/features/ui/components/ol/ol-form-control' -import OLFormLabel from '@/features/ui/components/ol/ol-form-label' -import { sendMB } from '@/infrastructure/event-tracking' - -type RemoveManagedUserModalProps = { - user: User - groupId: string - onClose: () => void -} - -export default function RemoveManagedUserModal({ - user, - groupId, - onClose, -}: RemoveManagedUserModalProps) { - const { t } = useTranslation() - const location = useLocation() - const { isLoading, isSuccess, error, setError, runAsync } = useAsync< - any, - any - >() - const [suppliedEmail, setSuppliedEmail] = useState() - const shouldEnableRemoveUserButton = suppliedEmail === user.email - const userFullName = user.last_name - ? `${user.first_name || ''} ${user.last_name || ''}` - : user.first_name - - const handleReleaseUserSubmit = (event: React.FormEvent) => { - event.preventDefault() - sendMB('remove-managed-user-confirmed') - runAsync( - postJSON(`/manage/groups/${groupId}/release-managed-user/${user._id}`, { - body: { - verificationEmail: suppliedEmail, - }, - }) - .then(() => { - location.reload() - }) - .catch(err => { - setError( - err instanceof FetchError ? err.getUserFacingMessage() : err.message - ) - debugConsole.error(err) - }) - ) - } - - return ( - -
- - {t('remove_user')} - - -

- {t('about_to_remove_user_preamble', { - userName: userFullName, - userEmail: user.email, - })} -

-
    -
  • {t('they_will_be_removed_from_the_group')}
  • -
  • {t('they_will_no_longer_be_a_managed_user')}
  • -
  • - {t('they_will_retain_their_existing_account_on_the_free_plan')} -
  • -
  • - {t( - 'they_will_retain_ownership_of_projects_currently_owned_by_them_and_collaborators_will_become_read_only' - )} -
  • -
  • - {t( - 'they_will_continue_to_have_access_to_any_projects_shared_with_them' - )} -
  • -
  • - {t( - 'they_wont_be_able_to_log_in_with_sso_they_will_need_to_set_password' - )} -
  • -
-

- ]} // eslint-disable-line react/jsx-key - /> -

- - - {t('confirm_remove_user_type_email_address', { - userName: userFullName, - })} - - setSuppliedEmail(e.target.value)} - /> - - {error && ( - - )} -
- - - {t('cancel')} - - - {t('remove_user')} - - -
-
- ) -} diff --git a/services/web/frontend/js/features/history/components/change-list/dropdown/actions-dropdown.tsx b/services/web/frontend/js/features/history/components/change-list/dropdown/actions-dropdown.tsx index df31a6c58f..9029260057 100644 --- a/services/web/frontend/js/features/history/components/change-list/dropdown/actions-dropdown.tsx +++ b/services/web/frontend/js/features/history/components/change-list/dropdown/actions-dropdown.tsx @@ -2,21 +2,20 @@ import React, { ReactNode } from 'react' import { Dropdown, DropdownMenu, - DropdownToggle, } from '@/features/ui/components/bootstrap-5/dropdown-menu' -import OLTooltip from '@/features/ui/components/ol/ol-tooltip' +import DropdownToggleWithTooltip from '@/features/ui/components/bootstrap-5/dropdown-toggle-with-tooltip' type ActionDropdownProps = { id: string children: React.ReactNode isOpened: boolean iconTag: ReactNode - tooltipDescription: string + toolTipDescription: string setIsOpened: (isOpened: boolean) => void } function ActionsDropdown(props: ActionDropdownProps) { - const { id, children, isOpened, iconTag, setIsOpened, tooltipDescription } = + const { id, children, isOpened, iconTag, setIsOpened, toolTipDescription } = props return ( setIsOpened(open)} > - + {iconTag} + {children} diff --git a/services/web/frontend/js/features/history/components/change-list/dropdown/compare-version-dropdown.tsx b/services/web/frontend/js/features/history/components/change-list/dropdown/compare-version-dropdown.tsx index 11967e3302..91f0bf991a 100644 --- a/services/web/frontend/js/features/history/components/change-list/dropdown/compare-version-dropdown.tsx +++ b/services/web/frontend/js/features/history/components/change-list/dropdown/compare-version-dropdown.tsx @@ -21,7 +21,7 @@ function CompareVersionDropdown({ id={id} isOpened={isOpened} setIsOpened={setIsOpened} - tooltipDescription={t('compare')} + toolTipDescription={t('compare')} iconTag={ {permissions.labelVersion && ( - + )} diff --git a/services/web/frontend/js/features/history/components/change-list/dropdown/menu-item/add-label.tsx b/services/web/frontend/js/features/history/components/change-list/dropdown/menu-item/add-label.tsx index 1381b620d4..882bb9a439 100644 --- a/services/web/frontend/js/features/history/components/change-list/dropdown/menu-item/add-label.tsx +++ b/services/web/frontend/js/features/history/components/change-list/dropdown/menu-item/add-label.tsx @@ -4,12 +4,18 @@ import OLDropdownMenuItem from '@/features/ui/components/ol/ol-dropdown-menu-ite import OLTagIcon from '@/features/ui/components/ol/icons/ol-tag-icon' import AddLabelModal from '../../add-label-modal' -type AddLabelProps = { +type DownloadProps = { + projectId: string version: number closeDropdown: () => void } -function AddLabel({ version, closeDropdown, ...props }: AddLabelProps) { +function AddLabel({ + version, + projectId, + closeDropdown, + ...props +}: DownloadProps) { const { t } = useTranslation() const [showModal, setShowModal] = useState(false) diff --git a/services/web/frontend/js/features/history/components/change-list/dropdown/menu-item/compare-items.tsx b/services/web/frontend/js/features/history/components/change-list/dropdown/menu-item/compare-items.tsx index dd236ed98b..78ba0aae75 100644 --- a/services/web/frontend/js/features/history/components/change-list/dropdown/menu-item/compare-items.tsx +++ b/services/web/frontend/js/features/history/components/change-list/dropdown/menu-item/compare-items.tsx @@ -34,7 +34,7 @@ function CompareItems({ toVTimestamp: selRange.toVTimestamp, }} closeDropdown={closeDropdown} - tooltipDescription={t('history_compare_from_this_version')} + toolTipDescription={t('history_compare_from_this_version')} icon={ void } function Compare({ comparisonRange, closeDropdown, - tooltipDescription, + toolTipDescription, icon, }: CompareProps) { const { setSelection } = useHistoryContext() @@ -32,12 +32,12 @@ function Compare({ return ( diff --git a/services/web/frontend/js/features/history/components/file-tree/history-file-tree-doc.tsx b/services/web/frontend/js/features/history/components/file-tree/history-file-tree-doc.tsx index e3543ef527..3b788eb046 100644 --- a/services/web/frontend/js/features/history/components/file-tree/history-file-tree-doc.tsx +++ b/services/web/frontend/js/features/history/components/file-tree/history-file-tree-doc.tsx @@ -1,12 +1,9 @@ import { memo } from 'react' import classNames from 'classnames' import HistoryFileTreeItem from './history-file-tree-item' -import iconTypeFromName, { - newEditorIconTypeFromName, -} from '../../../file-tree/util/icon-type-from-name' +import iconTypeFromName from '../../../file-tree/util/icon-type-from-name' import type { FileDiff } from '../../services/types/file' import MaterialIcon from '@/shared/components/material-icon' -import { useIsNewEditorEnabled } from '@/features/ide-redesign/utils/new-editor-utils' type HistoryFileTreeDocProps = { file: FileDiff @@ -23,16 +20,6 @@ function HistoryFileTreeDoc({ onClick, onKeyDown, }: HistoryFileTreeDocProps) { - const newEditor = useIsNewEditorEnabled() - const icon = newEditor ? ( - - ) : ( - - ) return (
  • + } />
  • ) diff --git a/services/web/frontend/js/features/history/components/file-tree/history-file-tree-folder.tsx b/services/web/frontend/js/features/history/components/file-tree/history-file-tree-folder.tsx index 44cb7f2921..6c2c912f8c 100644 --- a/services/web/frontend/js/features/history/components/file-tree/history-file-tree-folder.tsx +++ b/services/web/frontend/js/features/history/components/file-tree/history-file-tree-folder.tsx @@ -6,7 +6,6 @@ import HistoryFileTreeFolderList from './history-file-tree-folder-list' import type { HistoryDoc, HistoryFileTree } from '../../utils/file-tree' import MaterialIcon from '@/shared/components/material-icon' -import { useIsNewEditorEnabled } from '@/features/ide-redesign/utils/new-editor-utils' type HistoryFileTreeFolderProps = { name: string @@ -36,7 +35,6 @@ function HistoryFileTreeFolder({ docs, }: HistoryFileTreeFolderProps) { const { t } = useTranslation() - const newEditor = useIsNewEditorEnabled() const [expanded, setExpanded] = useState(() => { return hasChanges({ name, folders, docs }) @@ -54,12 +52,10 @@ function HistoryFileTreeFolder({ className="file-tree-expand-icon" /> - {!newEditor && ( - - )} + ) @@ -83,11 +79,7 @@ function HistoryFileTreeFolder({ {expanded ? ( - + ) : null} ) diff --git a/services/web/frontend/js/features/history/extensions/highlights.ts b/services/web/frontend/js/features/history/extensions/highlights.ts index 1f81f82e74..ce274cf724 100644 --- a/services/web/frontend/js/features/history/extensions/highlights.ts +++ b/services/web/frontend/js/features/history/extensions/highlights.ts @@ -238,7 +238,7 @@ class EmptyLineAdditionMarkerWidget extends WidgetType { super() } - toDOM(): HTMLElement { + toDOM(view: EditorView): HTMLElement { const element = document.createElement('span') element.classList.add( 'ol-cm-empty-line-addition-marker', @@ -255,7 +255,7 @@ class EmptyLineDeletionMarkerWidget extends WidgetType { super() } - toDOM(): HTMLElement { + toDOM(view: EditorView): HTMLElement { const element = document.createElement('span') element.classList.add( 'ol-cm-empty-line-deletion-marker', @@ -297,7 +297,7 @@ class ChangeGutterMarker extends GutterMarker { super() } - toDOM() { + toDOM(view: EditorView) { const el = document.createElement('div') el.className = 'ol-cm-changed-line-gutter' el.style.setProperty('--hue', this.hue.toString()) diff --git a/services/web/frontend/js/features/ide-react/components/layout/main-layout.tsx b/services/web/frontend/js/features/ide-react/components/layout/main-layout.tsx index 93382d613a..b0a65e12bb 100644 --- a/services/web/frontend/js/features/ide-react/components/layout/main-layout.tsx +++ b/services/web/frontend/js/features/ide-react/components/layout/main-layout.tsx @@ -47,8 +47,7 @@ export const MainLayout: FC = () => { handlePaneExpand: handleChatExpand, } = useChatPane() - const chatEnabled = - getMeta('ol-capabilities')?.includes('chat') && !isRestrictedTokenMember + const chatEnabled = getMeta('ol-chatEnabled') && !isRestrictedTokenMember const { t } = useTranslation() diff --git a/services/web/frontend/js/features/ide-react/context/command-registry-context.tsx b/services/web/frontend/js/features/ide-react/context/command-registry-context.tsx index ff54c21f2a..e8bec19b8b 100644 --- a/services/web/frontend/js/features/ide-react/context/command-registry-context.tsx +++ b/services/web/frontend/js/features/ide-react/context/command-registry-context.tsx @@ -1,11 +1,4 @@ -import { isMac } from '@/shared/utils/os' -import { - createContext, - useCallback, - useContext, - useMemo, - useState, -} from 'react' +import { createContext, useCallback, useContext, useState } from 'react' type CommandInvocationContext = { location?: string @@ -17,21 +10,17 @@ export type Command = { handler?: (context: CommandInvocationContext) => void href?: string disabled?: boolean + // TODO: Keybinding? } const CommandRegistryContext = createContext( undefined ) -export type Shortcut = { key: string } - -export type Shortcuts = Record - type CommandRegistry = { registry: Map register: (...elements: Command[]) => void unregister: (...id: string[]) => void - shortcuts: Shortcuts } export const CommandRegistryProvider: React.FC = ({ @@ -54,35 +43,8 @@ export const CommandRegistryProvider: React.FC = ({ ) }, []) - // NOTE: This is where we'd add functionality for customising shortcuts. - const shortcuts: Record = useMemo( - () => ({ - undo: [ - { - key: 'Mod-z', - }, - ], - redo: [ - { - key: 'Mod-y', - }, - { - key: 'Mod-Shift-Z', - }, - ], - find: [{ key: 'Mod-f' }], - 'select-all': [{ key: 'Mod-a' }], - 'insert-comment': [{ key: 'Mod-Shift-C' }], - 'format-bold': [{ key: 'Mod-b' }], - 'format-italics': [{ key: 'Mod-i' }], - }), - [] - ) - return ( - + {children} ) @@ -97,92 +59,3 @@ export const useCommandRegistry = (): CommandRegistry => { } return context } - -function parseShortcut(shortcut: Shortcut) { - // Based on KeyBinding type of CodeMirror 6 - let alt = false - let ctrl = false - let shift = false - let meta = false - - let character = null - // isMac ? shortcut.mac : shortcut.key etc. - const shortcutString = shortcut.key ?? '' - const keys = shortcutString.split(/-(?!$)/) ?? [] - - for (let i = 0; i < keys.length; i++) { - const isLast = i === keys.length - 1 - const key = keys[i] - if (!key) { - throw new Error('Empty key in shortcut: ' + shortcutString) - } - if (key === 'Alt' || (!isLast && key === 'a')) { - alt = true - } else if ( - key === 'Ctrl' || - key === 'Control' || - (!isLast && key === 'c') - ) { - ctrl = true - } else if (key === 'Shift' || (!isLast && key === 's')) { - shift = true - } else if (key === 'Meta' || key === 'Cmd' || (!isLast && key === 'm')) { - meta = true - } else if (key === 'Mod') { - if (isMac) { - meta = true - } else { - ctrl = true - } - } else { - if (key === 'Space') { - character = ' ' - } - if (!isLast) { - throw new Error( - 'Character key must be last in shortcut: ' + shortcutString - ) - } - if (key.length !== 1) { - throw new Error(`Invalid key '${key}' in shortcut: ${shortcutString}`) - } - if (character) { - throw new Error('Multiple characters in shortcut: ' + shortcutString) - } - character = key - } - } - if (!character) { - throw new Error('No character in shortcut: ' + shortcutString) - } - - return { - alt, - ctrl, - shift, - meta, - character, - } -} - -export const formatShortcut = (shortcut: Shortcut): string => { - const { alt, ctrl, shift, meta, character } = parseShortcut(shortcut) - - if (isMac) { - return [ - ctrl ? '⌃' : '', - alt ? '⌥' : '', - shift ? '⇧' : '', - meta ? '⌘' : '', - character.toUpperCase(), - ].join('') - } - - return [ - ctrl ? 'Ctrl' : '', - shift ? 'Shift' : '', - meta ? 'Meta' : '', - alt ? 'Alt' : '', - character.toUpperCase(), - ].join(' ') -} diff --git a/services/web/frontend/js/features/ide-react/context/editor-manager-context.tsx b/services/web/frontend/js/features/ide-react/context/editor-manager-context.tsx index e830d7ec1a..e1bb49c39c 100644 --- a/services/web/frontend/js/features/ide-react/context/editor-manager-context.tsx +++ b/services/web/frontend/js/features/ide-react/context/editor-manager-context.tsx @@ -18,7 +18,6 @@ import { useConnectionContext } from '@/features/ide-react/context/connection-co import { debugConsole } from '@/utils/debugging' import { DocumentContainer } from '@/features/ide-react/editor/document-container' import { useLayoutContext } from '@/shared/context/layout-context' -import { useUserContext } from '@/shared/context/user-context' import { GotoLineOptions } from '@/features/ide-react/types/goto-line-options' import { Doc } from '../../../../../types/doc' import { useFileTreeData } from '@/shared/context/file-tree-data-context' @@ -100,7 +99,6 @@ export const EditorManagerProvider: FC = ({ const { view, setView } = useLayoutContext() const { showGenericMessageModal, genericModalVisible, showOutOfSyncModal } = useModalsContext() - const { id: userId } = useUserContext() const [showSymbolPalette, setShowSymbolPalette] = useScopeValue( 'editor.showSymbolPalette' @@ -311,7 +309,7 @@ export const EditorManagerProvider: FC = ({ const tryToggle = () => { const saved = doc.getInflightOp() == null && doc.getPendingOp() == null if (saved) { - doc.setTrackChangesUserId(want ? userId : null) + doc.setTrackingChanges(want) setTrackChanges(want) } else { syncTimeoutRef.current = window.setTimeout(tryToggle, 100) @@ -320,7 +318,7 @@ export const EditorManagerProvider: FC = ({ tryToggle() }, - [setTrackChanges, userId] + [setTrackChanges] ) const doOpenNewDocument = useCallback( diff --git a/services/web/frontend/js/features/ide-react/context/ide-react-context.tsx b/services/web/frontend/js/features/ide-react/context/ide-react-context.tsx index 51ecbdc6c9..bb3d0c1a3c 100644 --- a/services/web/frontend/js/features/ide-react/context/ide-react-context.tsx +++ b/services/web/frontend/js/features/ide-react/context/ide-react-context.tsx @@ -16,6 +16,7 @@ import { } from '@/features/ide-react/create-ide-event-emitter' import { JoinProjectPayload } from '@/features/ide-react/connection/join-project-payload' import { useConnectionContext } from '@/features/ide-react/context/connection-context' +import { getMockIde } from '@/shared/context/mock/mock-ide' import { populateEditorScope } from '@/features/ide-react/scope-adapters/editor-manager-context-adapter' import { postJSON } from '@/infrastructure/fetch-json' import { ReactScopeEventEmitter } from '@/features/ide-react/scope-event-emitter/react-scope-event-emitter' @@ -127,11 +128,10 @@ export const IdeReactProvider: FC = ({ children }) => { // Populate scope values when joining project, then fire project:joined event useEffect(() => { function handleJoinProjectResponse({ - project: { rootDoc_id: rootDocId, ..._project }, + project, permissionsLevel, }: JoinProjectPayload) { - const project = { ..._project, rootDocId } - scopeStore.set('project', project) + scopeStore.set('project', { rootDoc_id: null, ...project }) scopeStore.set('permissionsLevel', permissionsLevel) // Make watchers update immediately scopeStore.flushUpdates() @@ -157,11 +157,11 @@ export const IdeReactProvider: FC = ({ children }) => { const ide = useMemo(() => { return { - _id: projectId, + ...getMockIde(), socket, reportError, } - }, [projectId, socket, reportError]) + }, [socket, reportError]) const value = useMemo( () => ({ diff --git a/services/web/frontend/js/features/ide-react/context/online-users-context.tsx b/services/web/frontend/js/features/ide-react/context/online-users-context.tsx index 1195f9ae7c..1dba40e6d7 100644 --- a/services/web/frontend/js/features/ide-react/context/online-users-context.tsx +++ b/services/web/frontend/js/features/ide-react/context/online-users-context.tsx @@ -95,7 +95,7 @@ export const OnlineUsersProvider: FC = ({ for (const [clientId, user] of Object.entries(onlineUsers)) { const decoratedUser = { ...user } const docId = user.doc_id - if (docId && fileTreeData) { + if (docId) { decoratedUser.doc = findDocEntityById(fileTreeData, docId) } diff --git a/services/web/frontend/js/features/ide-react/context/snapshot-context.tsx b/services/web/frontend/js/features/ide-react/context/snapshot-context.tsx index 817e03fe86..70f170a8b0 100644 --- a/services/web/frontend/js/features/ide-react/context/snapshot-context.tsx +++ b/services/web/frontend/js/features/ide-react/context/snapshot-context.tsx @@ -24,14 +24,10 @@ export const StubSnapshotUtils = { throw new Error('not implemented') } }, - // unused vars kept to document the interface - // eslint-disable-next-line @typescript-eslint/no-unused-vars buildFileTree(snapshot: Snapshot): Folder { throw new Error('not implemented') }, - // unused vars kept to document the interface - // eslint-disable-next-line @typescript-eslint/no-unused-vars - createFolder(id: string, name: string): Folder { + createFolder(_id: string, name: string): Folder { throw new Error('not implemented') }, } diff --git a/services/web/frontend/js/features/ide-react/editor/document-container.ts b/services/web/frontend/js/features/ide-react/editor/document-container.ts index 28bcb955d1..fee359f146 100644 --- a/services/web/frontend/js/features/ide-react/editor/document-container.ts +++ b/services/web/frontend/js/features/ide-react/editor/document-container.ts @@ -196,13 +196,9 @@ export class DocumentContainer extends EventEmitter { return this.doc?.hasBufferedOps() } - setTrackChangesUserId(userId: string | null) { - this.track_changes_as = userId + setTrackingChanges(track_changes: boolean) { if (this.doc) { - this.doc.setTrackChangesUserId(userId) - } - if (this.cm6) { - this.cm6.setTrackChangesUserId(userId) + this.doc.track_changes = track_changes } } @@ -599,7 +595,7 @@ export class DocumentContainer extends EventEmitter { this.doc.on('remoteop', (...ops: AnyOperation[]) => { return this.trigger('remoteop', ...ops) }) - this.doc.on('op:sent', () => { + this.doc.on('op:sent', (op: AnyOperation) => { return this.trigger('op:sent') }) this.doc.on('op:acknowledged', (op: AnyOperation) => { @@ -609,7 +605,7 @@ export class DocumentContainer extends EventEmitter { }) return this.trigger('op:acknowledged') }) - this.doc.on('op:timeout', () => { + this.doc.on('op:timeout', (op: AnyOperation) => { this.trigger('op:timeout') return this.onError(new Error('op timed out')) }) diff --git a/services/web/frontend/js/features/ide-react/editor/share-js-doc.ts b/services/web/frontend/js/features/ide-react/editor/share-js-doc.ts index 5b362299d2..96e866afec 100644 --- a/services/web/frontend/js/features/ide-react/editor/share-js-doc.ts +++ b/services/web/frontend/js/features/ide-react/editor/share-js-doc.ts @@ -12,20 +12,17 @@ import { Message, ShareJsConnectionState, ShareJsOperation, + ShareJsTextType, TrackChangesIdSeeds, } from '@/features/ide-react/editor/types/document' import { EditorFacade } from '@/features/source-editor/extensions/realtime' import { recordDocumentFirstChangeEvent } from '@/features/event-tracking/document-first-change-event' import getMeta from '@/utils/meta' -import { historyOTType } from './share-js-history-ot-type' +import { HistoryOTType } from './share-js-history-ot-type' +import { StringFileData } from 'overleaf-editor-core/index' import { - StringFileData, - TrackedChangeList, - EditOperationBuilder, -} from 'overleaf-editor-core' -import { - StringFileRawData, RawEditOperation, + StringFileRawData, } from 'overleaf-editor-core/lib/types' // All times below are in milliseconds @@ -71,17 +68,19 @@ export class ShareJsDoc extends EventEmitter { readonly type: OTType = 'sharejs-text-ot' ) { super() - let sharejsType + let sharejsType: ShareJsTextType = sharejs.types.text // Decode any binary bits of data let snapshot: string | StringFileData if (this.type === 'history-ot') { snapshot = StringFileData.fromRaw( docLines as unknown as StringFileRawData ) - sharejsType = historyOTType + sharejsType = new HistoryOTType(snapshot) as ShareJsTextType< + StringFileData, + RawEditOperation[] + > } else { snapshot = docLines.map(line => decodeUtf8(line)).join('\n') - sharejsType = sharejs.types.text } this.connection = { @@ -160,18 +159,6 @@ export class ShareJsDoc extends EventEmitter { this.removeCarriageReturnCharFromShareJsDoc() } - setTrackChangesUserId(userId: string | null) { - this.track_changes = userId != null - } - - getTrackedChanges() { - if (this._doc.otType === 'history-ot') { - return this._doc.snapshot.getTrackedChanges() as TrackedChangeList - } else { - return null - } - } - private removeCarriageReturnCharFromShareJsDoc() { const doc = this._doc let nextPos @@ -266,15 +253,7 @@ export class ShareJsDoc extends EventEmitter { // issues are resolved. processUpdateFromServer(message: Message) { try { - if (this.type === 'history-ot' && message.op != null) { - const ops = message.op as RawEditOperation[] - this._doc._onMessage({ - ...message, - op: ops.map(EditOperationBuilder.fromJSON), - }) - } else { - this._doc._onMessage(message) - } + this._doc._onMessage(message) } catch (error) { // Version mismatches are thrown as errors debugConsole.log(error) diff --git a/services/web/frontend/js/features/ide-react/editor/share-js-history-ot-type.ts b/services/web/frontend/js/features/ide-react/editor/share-js-history-ot-type.ts index 81243bb8c7..fde66d89a1 100644 --- a/services/web/frontend/js/features/ide-react/editor/share-js-history-ot-type.ts +++ b/services/web/frontend/js/features/ide-react/editor/share-js-history-ot-type.ts @@ -1,79 +1,49 @@ +import EventEmitter from '@/utils/EventEmitter' import { - EditOperation, + EditOperationBuilder, EditOperationTransformer, + InsertOp, + RemoveOp, + RetainOp, StringFileData, + TextOperation, } from 'overleaf-editor-core' -import { ShareDoc } from '../../../../../types/share-doc' +import { RawEditOperation } from 'overleaf-editor-core/lib/types' -type Api = { - otType: 'history-ot' - trackChangesUserId: string | null +export class HistoryOTType extends EventEmitter { + // stub interface, these are actually on the Doc + api: HistoryOTType + snapshot: StringFileData - getText(): string - getLength(): number -} + constructor(snapshot: StringFileData) { + super() + this.api = this + this.snapshot = snapshot + } -const api: Api & ThisType = { - otType: 'history-ot', - trackChangesUserId: null, + transformX(raw1: RawEditOperation[], raw2: RawEditOperation[]) { + const [a, b] = EditOperationTransformer.transform( + EditOperationBuilder.fromJSON(raw1[0]), + EditOperationBuilder.fromJSON(raw2[0]) + ) + return [[a.toJSON()], [b.toJSON()]] + } - getText() { - return this.snapshot.getContent({ filterTrackedDeletes: true }) - }, - - getLength() { - return this.snapshot.getStringLength() - }, -} - -export const historyOTType = { - api, - - transformX(ops1: EditOperation[], ops2: EditOperation[]) { - // Dynamic programming algorithm: gradually transform both sides in a nested - // loop. - const left = [...ops1] - const right = [...ops2] - for (let i = 0; i < left.length; i++) { - for (let j = 0; j < right.length; j++) { - // At this point: - // left[0..i] is ops1[0..i] rebased over ops2[0..j-1] - // right[0..j] is ops2[0..j] rebased over ops1[0..i-1] - const [a, b] = EditOperationTransformer.transform(left[i], right[j]) - left[i] = a - right[j] = b - } - } - return [left, right] - }, - - apply(snapshot: StringFileData, ops: EditOperation[]) { + apply(snapshot: StringFileData, rawEditOperation: RawEditOperation[]) { + const operation = EditOperationBuilder.fromJSON(rawEditOperation[0]) const afterFile = StringFileData.fromRaw(snapshot.toRaw()) - for (const op of ops) { - afterFile.edit(op) - } + afterFile.edit(operation) + this.snapshot = afterFile return afterFile - }, + } - compose(ops1: EditOperation[], ops2: EditOperation[]) { - const ops = [...ops1, ...ops2] - let currentOp = ops.shift() - if (currentOp === undefined) { - // No ops to process - return [] - } - const result = [] - for (const op of ops) { - if (currentOp.canBeComposedWith(op)) { - currentOp = currentOp.compose(op) - } else { - result.push(currentOp) - currentOp = op - } - } - result.push(currentOp) - return result - }, + compose(op1: RawEditOperation[], op2: RawEditOperation[]) { + return [ + EditOperationBuilder.fromJSON(op1[0]) + .compose(EditOperationBuilder.fromJSON(op2[0])) + .toJSON(), + ] + } // Do not provide normalize, used by submitOp to fixup bad input. // normalize(op: TextOperation) {} @@ -81,4 +51,83 @@ export const historyOTType = { // Do not provide invert, only needed for reverting a rejected update. // We are displaying an out-of-sync modal when an op is rejected. // invert(op: TextOperation) {} + + // API + insert(pos: number, text: string, fromUndo: boolean) { + const old = this.getText() + const op = new TextOperation() + op.retain(pos) + op.insert(text) + op.retain(old.length - pos) + this.submitOp([op.toJSON()]) + } + + del(pos: number, length: number, fromUndo: boolean) { + const old = this.getText() + const op = new TextOperation() + op.retain(pos) + op.remove(length) + op.retain(old.length - pos - length) + this.submitOp([op.toJSON()]) + } + + getText() { + return this.snapshot.getContent({ filterTrackedDeletes: true }) + } + + getLength() { + return this.getText().length + } + + _register() { + this.on( + 'remoteop', + (rawEditOperation: RawEditOperation[], oldSnapshot: StringFileData) => { + const operation = EditOperationBuilder.fromJSON(rawEditOperation[0]) + if (operation instanceof TextOperation) { + const str = oldSnapshot.getContent() + if (str.length !== operation.baseLength) + throw new TextOperation.ApplyError( + "The operation's base length must be equal to the string's length.", + operation, + str + ) + + let outputCursor = 0 + let inputCursor = 0 + for (const op of operation.ops) { + if (op instanceof RetainOp) { + inputCursor += op.length + outputCursor += op.length + } else if (op instanceof InsertOp) { + this.emit( + 'insert', + outputCursor, + op.insertion, + op.insertion.length + ) + outputCursor += op.insertion.length + } else if (op instanceof RemoveOp) { + this.emit( + 'delete', + outputCursor, + str.slice(inputCursor, inputCursor + op.length) + ) + inputCursor += op.length + } + } + + if (inputCursor !== str.length) + throw new TextOperation.ApplyError( + "The operation didn't operate on the whole string.", + operation, + str + ) + } + } + ) + } + + // stub-interface, provided by sharejs.Doc + submitOp(op: RawEditOperation[]) {} } diff --git a/services/web/frontend/js/features/ide-react/editor/types/document.ts b/services/web/frontend/js/features/ide-react/editor/types/document.ts index f6e5f6aebb..fbed3ab8f1 100644 --- a/services/web/frontend/js/features/ide-react/editor/types/document.ts +++ b/services/web/frontend/js/features/ide-react/editor/types/document.ts @@ -1,6 +1,5 @@ import { StringFileData } from 'overleaf-editor-core' import { AnyOperation } from '../../../../../../types/change' -import { RawEditOperation } from 'overleaf-editor-core/lib/types' export type Version = number @@ -37,5 +36,4 @@ export type Message = { doc?: string snapshot?: string | StringFileData type?: ShareJsTextType - op?: AnyOperation[] | RawEditOperation[] } diff --git a/services/web/frontend/js/features/ide-redesign/components/breadcrumbs.tsx b/services/web/frontend/js/features/ide-redesign/components/breadcrumbs.tsx index 9949b98c7f..f148e0142e 100644 --- a/services/web/frontend/js/features/ide-redesign/components/breadcrumbs.tsx +++ b/services/web/frontend/js/features/ide-redesign/components/breadcrumbs.tsx @@ -1,7 +1,4 @@ -import { - findInTree, - findInTreeOrThrow, -} from '@/features/file-tree/util/find-in-tree' +import { findInTreeOrThrow } from '@/features/file-tree/util/find-in-tree' import { useFileTreeOpenContext } from '@/features/ide-react/context/file-tree-open-context' import { useOutlineContext } from '@/features/ide-react/context/outline-context' import useNestedOutline from '@/features/outline/hooks/use-nested-outline' @@ -42,41 +39,35 @@ export default function Breadcrumbs() { const { highlightedLine, canShowOutline } = useOutlineContext() const folderHierarchy = useMemo(() => { - if (openEntity?.type !== 'doc' || !fileTreeData) { + if (!openEntity || !fileTreeData) { return [] } - try { - return openEntity.path - .filter(id => id !== fileTreeData._id) // Filter out the root folder - .map(id => { - return findInTreeOrThrow(fileTreeData, id)?.entity - }) - } catch { - // If any of the folders in the path are not found, the entire hierarchy - // is invalid. - return [] - } + return openEntity.path + .filter(id => id !== fileTreeData._id) // Filter out the root folder + .map(id => { + return findInTreeOrThrow(fileTreeData, id)?.entity + }) }, [openEntity, fileTreeData]) const fileName = useMemo(() => { // NOTE: openEntity.entity.name may not always be accurate, so we read it // from the file tree data instead. - if (openEntity?.type !== 'doc' || !fileTreeData) { + if (!openEntity || !fileTreeData) { return undefined } - return findInTree(fileTreeData, openEntity.entity._id)?.entity.name + return findInTreeOrThrow(fileTreeData, openEntity.entity._id)?.entity.name }, [fileTreeData, openEntity]) const outlineHierarchy = useMemo(() => { - if (openEntity?.type !== 'doc' || !canShowOutline || !outline) { + if (!canShowOutline || !outline) { return [] } return constructOutlineHierarchy(outline.items, highlightedLine) - }, [outline, highlightedLine, canShowOutline, openEntity]) + }, [outline, highlightedLine, canShowOutline]) - if (openEntity?.type !== 'doc' || !fileTreeData) { + if (!openEntity || !fileTreeData) { return null } diff --git a/services/web/frontend/js/features/ide-redesign/components/chat/chat.tsx b/services/web/frontend/js/features/ide-redesign/components/chat/chat.tsx index 54d098c6c8..9ebe33e065 100644 --- a/services/web/frontend/js/features/ide-redesign/components/chat/chat.tsx +++ b/services/web/frontend/js/features/ide-redesign/components/chat/chat.tsx @@ -9,8 +9,8 @@ import { useUserContext } from '@/shared/context/user-context' import { lazy, Suspense, useEffect } from 'react' import { useTranslation } from 'react-i18next' import classNames from 'classnames' +import { RailPanelHeader } from '../rail' import { RailIndicator } from '../rail-indicator' -import RailPanelHeader from '../rail-panel-header' const MessageList = lazy(() => import('../../../chat/components/message-list')) diff --git a/services/web/frontend/js/features/ide-redesign/components/error-logs/error-logs-header.tsx b/services/web/frontend/js/features/ide-redesign/components/error-logs/error-logs-header.tsx deleted file mode 100644 index 2f3a54b095..0000000000 --- a/services/web/frontend/js/features/ide-redesign/components/error-logs/error-logs-header.tsx +++ /dev/null @@ -1,98 +0,0 @@ -import { useTranslation } from 'react-i18next' -import RailPanelHeader from '../rail-panel-header' -import OLIconButton from '@/features/ui/components/ol/ol-icon-button' -import { useDetachCompileContext as useCompileContext } from '@/shared/context/detach-compile-context' -import { - Dropdown, - DropdownMenu, - DropdownToggle, -} from '@/features/ui/components/bootstrap-5/dropdown-menu' -import PdfFileList from '@/features/pdf-preview/components/pdf-file-list' -import { forwardRef } from 'react' -import OLTooltip from '@/features/ui/components/ol/ol-tooltip' - -export default function ErrorLogsHeader() { - const { t } = useTranslation() - - return ( - , - , - ]} - /> - ) -} - -const ClearCacheButton = () => { - const { compiling, clearCache, clearingCache } = useCompileContext() - const { t } = useTranslation() - - return ( - - clearCache()} - className="rail-panel-header-button-subdued" - icon="auto_delete" - isLoading={clearingCache} - disabled={clearingCache || compiling} - accessibilityLabel={t('clear_cached_files')} - size="sm" - /> - - ) -} - -const DownloadFileDropdown = () => { - const { fileList } = useCompileContext() - - const { t } = useTranslation() - - return ( - - - {t('other_logs_and_files')} - - {fileList && ( - - - - )} - - ) -} - -const DownloadFileDropdownToggleButton = forwardRef< - HTMLButtonElement, - { onClick: React.MouseEventHandler } ->(function DownloadFileDropdownToggleButton({ onClick }, ref) { - const { compiling, fileList } = useCompileContext() - const { t } = useTranslation() - - return ( - - - - ) -}) diff --git a/services/web/frontend/js/features/ide-redesign/components/error-logs/error-logs-panel.tsx b/services/web/frontend/js/features/ide-redesign/components/error-logs/error-logs-panel.tsx deleted file mode 100644 index 2cff048256..0000000000 --- a/services/web/frontend/js/features/ide-redesign/components/error-logs/error-logs-panel.tsx +++ /dev/null @@ -1,14 +0,0 @@ -import { PdfPreviewProvider } from '@/features/pdf-preview/components/pdf-preview-provider' -import ErrorLogs from './error-logs' -import ErrorLogsHeader from './error-logs-header' - -export default function ErrorLogsPanel() { - return ( - -
    - - -
    -
    - ) -} diff --git a/services/web/frontend/js/features/ide-redesign/components/error-logs/error-logs.tsx b/services/web/frontend/js/features/ide-redesign/components/error-logs/error-logs.tsx deleted file mode 100644 index a6a62e998a..0000000000 --- a/services/web/frontend/js/features/ide-redesign/components/error-logs/error-logs.tsx +++ /dev/null @@ -1,142 +0,0 @@ -import { useTranslation } from 'react-i18next' -import { ElementType, memo, useMemo, useState } from 'react' -import { usePdfPreviewContext } from '@/features/pdf-preview/components/pdf-preview-provider' -import StopOnFirstErrorPrompt from '@/features/pdf-preview/components/stop-on-first-error-prompt' -import PdfPreviewError from '@/features/pdf-preview/components/pdf-preview-error' -import PdfValidationIssue from '@/features/pdf-preview/components/pdf-validation-issue' -import PdfLogsEntries from '@/features/pdf-preview/components/pdf-logs-entries' -import PdfPreviewErrorBoundaryFallback from '@/features/pdf-preview/components/pdf-preview-error-boundary-fallback' -import withErrorBoundary from '@/infrastructure/error-boundary' -import { useDetachCompileContext as useCompileContext } from '@/shared/context/detach-compile-context' -import { Nav, NavLink, TabContainer, TabContent } from 'react-bootstrap' -import { LogEntry as LogEntryData } from '@/features/pdf-preview/util/types' -import LogEntry from './log-entry' -import importOverleafModules from '../../../../../macros/import-overleaf-module.macro' - -const logsComponents: Array<{ - import: { default: ElementType } - path: string -}> = importOverleafModules('errorLogsComponents') - -type ErrorLogTab = { - key: string - label: string - entries: LogEntryData[] | undefined -} - -function ErrorLogs() { - const { error, logEntries, rawLog, validationIssues, stoppedOnFirstError } = - useCompileContext() - - const tabs = useMemo(() => { - return [ - { key: 'all', label: 'All', entries: logEntries?.all }, - { key: 'errors', label: 'Errors', entries: logEntries?.errors }, - { key: 'warnings', label: 'Warnings', entries: logEntries?.warnings }, - { key: 'info', label: 'Info', entries: logEntries?.typesetting }, - ] - }, [logEntries]) - - const { loadingError } = usePdfPreviewContext() - - const { t } = useTranslation() - - const [activeTab, setActiveTab] = useState('all') - - const entries = useMemo(() => { - return tabs.find(tab => tab.key === activeTab)?.entries || [] - }, [activeTab, tabs]) - - const includeErrors = activeTab === 'all' || activeTab === 'errors' - const includeWarnings = activeTab === 'all' || activeTab === 'warnings' - - return ( - - - {logsComponents.map(({ import: { default: Component }, path }) => ( - - ))} - -
    - {stoppedOnFirstError && includeErrors && } - - {loadingError && ( - - )} - - {error && ( - - )} - - {includeErrors && - validationIssues && - Object.entries(validationIssues).map(([name, issue]) => ( - - ))} - - {entries && ( - 0} - /> - )} - - {rawLog && activeTab === 'all' && ( - - )} -
    -
    -
    - ) -} - -function formatErrorNumber(num: number | undefined) { - if (num === undefined) { - return undefined - } - - if (num > 99) { - return '99+' - } - - return Math.floor(num).toString() -} - -const TabHeader = ({ tab, active }: { tab: ErrorLogTab; active: boolean }) => { - return ( - - {tab.label} -
    - {/* TODO: it would be nice if this number included custom errors */} - {formatErrorNumber(tab.entries?.length)} -
    -
    - ) -} - -export default withErrorBoundary(memo(ErrorLogs), () => ( - -)) diff --git a/services/web/frontend/js/features/ide-redesign/components/error-logs/log-entry-header.tsx b/services/web/frontend/js/features/ide-redesign/components/error-logs/log-entry-header.tsx deleted file mode 100644 index ce43af3744..0000000000 --- a/services/web/frontend/js/features/ide-redesign/components/error-logs/log-entry-header.tsx +++ /dev/null @@ -1,167 +0,0 @@ -import classNames from 'classnames' -import { useState, useRef, MouseEventHandler, ElementType } from 'react' -import { useTranslation } from 'react-i18next' -import OLTooltip from '@/features/ui/components/ol/ol-tooltip' -import { - ErrorLevel, - SourceLocation, - LogEntry as LogEntryData, -} from '@/features/pdf-preview/util/types' -import useResizeObserver from '@/features/preview/hooks/use-resize-observer' -import OLIconButton from '@/features/ui/components/ol/ol-icon-button' -import importOverleafModules from '../../../../../macros/import-overleaf-module.macro' -import MaterialIcon from '@/shared/components/material-icon' - -const actionComponents = importOverleafModules( - 'pdfLogEntryHeaderActionComponents' -) as { - import: { default: ElementType } - path: string -}[] - -function LogEntryHeader({ - sourceLocation, - level, - headerTitle, - logType, - showSourceLocationLink = true, - onSourceLocationClick, - collapsed, - onToggleCollapsed, - id, - logEntry, - actionButtonsOverride, - openCollapseIconOverride, -}: { - headerTitle: string | React.ReactNode - level: ErrorLevel - logType?: string - sourceLocation?: SourceLocation - showSourceLocationLink?: boolean - onSourceLocationClick?: MouseEventHandler - collapsed: boolean - onToggleCollapsed: () => void - id?: string - logEntry?: LogEntryData - actionButtonsOverride?: React.ReactNode - openCollapseIconOverride?: string -}) { - const { t } = useTranslation() - const logLocationSpanRef = useRef(null) - const [locationSpanOverflown, setLocationSpanOverflown] = useState(false) - - useResizeObserver( - logLocationSpanRef, - locationSpanOverflown, - checkLocationSpanOverflow - ) - - const file = sourceLocation ? sourceLocation.file : null - const line = sourceLocation ? sourceLocation.line : null - const logEntryHeaderTextClasses = classNames('log-entry-header-text', { - 'log-entry-header-text-error': level === 'error', - 'log-entry-header-text-warning': level === 'warning', - 'log-entry-header-text-info': level === 'info' || level === 'typesetting', - 'log-entry-header-text-success': level === 'success', - 'log-entry-header-text-raw': level === 'raw', - }) - - function checkLocationSpanOverflow(observedElement: ResizeObserverEntry) { - const spanEl = observedElement.target - const isOverflowing = spanEl.scrollWidth > spanEl.clientWidth - setLocationSpanOverflown(isOverflowing) - } - - const locationText = - showSourceLocationLink && file ? `${file}${line ? `, ${line}` : ''}` : null - - // Because we want an ellipsis on the left-hand side (e.g. "...longfilename.tex"), the - // `log-entry-location` class has text laid out from right-to-left using the CSS - // rule `direction: rtl;`. - // This works most of the times, except when the first character of the filename is considered - // a punctuation mark, like `/` (e.g. `/foo/bar/baz.sty`). In this case, because of - // right-to-left writing rules, the punctuation mark is moved to the right-side of the string, - // resulting in `...bar/baz.sty/` instead of `...bar/baz.sty`. - // To avoid this edge-case, we wrap the `logLocationLinkText` in two directional formatting - // characters: - // * \u202A LEFT-TO-RIGHT EMBEDDING Treat the following text as embedded left-to-right. - // * \u202C POP DIRECTIONAL FORMATTING End the scope of the last LRE, RLE, RLO, or LRO. - // This essentially tells the browser that, althought the text is laid out from right-to-left, - // the wrapped portion of text should follow left-to-right writing rules. - const formattedLocationText = locationText ? ( - - {`\u202A${locationText}\u202C`} - - ) : null - - const headerTitleText = logType ? `${logType} ${headerTitle}` : headerTitle - - return ( -
    - - - - - {actionButtonsOverride ?? ( -
    - {showSourceLocationLink && ( - - - - )} - {actionComponents.map(({ import: { default: Component }, path }) => ( - - ))} -
    - )} -
    - ) -} - -export default LogEntryHeader diff --git a/services/web/frontend/js/features/ide-redesign/components/error-logs/log-entry.tsx b/services/web/frontend/js/features/ide-redesign/components/error-logs/log-entry.tsx deleted file mode 100644 index a7539450ce..0000000000 --- a/services/web/frontend/js/features/ide-redesign/components/error-logs/log-entry.tsx +++ /dev/null @@ -1,140 +0,0 @@ -import { - Dispatch, - MouseEventHandler, - useCallback, - memo, - SetStateAction, - useState, -} from 'react' -import HumanReadableLogsHints from '../../../../ide/human-readable-logs/HumanReadableLogsHints' -import { sendMB } from '@/infrastructure/event-tracking' -import { - ErrorLevel, - LogEntry as LogEntryData, - SourceLocation, -} from '@/features/pdf-preview/util/types' -import LogEntryHeader from './log-entry-header' -import PdfLogEntryContent from '@/features/pdf-preview/components/pdf-log-entry-content' -import classNames from 'classnames' - -type LogEntryProps = { - headerTitle: string | React.ReactNode - level: ErrorLevel - ruleId?: string - rawContent?: string - logType?: string - formattedContent?: React.ReactNode - extraInfoURL?: string | null - sourceLocation?: SourceLocation - showSourceLocationLink?: boolean - entryAriaLabel?: string - contentDetails?: string[] - onSourceLocationClick?: (sourceLocation: SourceLocation) => void - index?: number - logEntry?: LogEntryData - id?: string - alwaysExpandRawContent?: boolean - className?: string - actionButtonsOverride?: React.ReactNode - openCollapseIconOverride?: string -} - -function LogEntry(props: LogEntryProps) { - const [collapsed, setCollapsed] = useState(true) - - return ( - - ) -} - -export function ControlledLogEntry({ - ruleId, - headerTitle, - rawContent, - logType, - formattedContent, - extraInfoURL, - level, - sourceLocation, - showSourceLocationLink = true, - entryAriaLabel = undefined, - contentDetails, - onSourceLocationClick, - index, - logEntry, - id, - alwaysExpandRawContent = false, - className, - collapsed, - setCollapsed, - actionButtonsOverride, - openCollapseIconOverride, -}: LogEntryProps & { - collapsed: boolean - setCollapsed: Dispatch> -}) { - if (ruleId && HumanReadableLogsHints[ruleId]) { - const hint = HumanReadableLogsHints[ruleId] - formattedContent = hint.formattedContent(contentDetails) - extraInfoURL = hint.extraInfoURL - } - - const handleLogEntryLinkClick: MouseEventHandler = - useCallback( - event => { - event.preventDefault() - - if (onSourceLocationClick && sourceLocation) { - onSourceLocationClick(sourceLocation) - - const parts = sourceLocation?.file?.split('.') - const extension = - parts?.length && parts?.length > 1 ? parts.pop() : '' - sendMB('log-entry-link-click', { level, ruleId, extension }) - } - }, - [level, onSourceLocationClick, ruleId, sourceLocation] - ) - - return ( -
    - setCollapsed(collapsed => !collapsed)} - id={id} - logEntry={logEntry} - actionButtonsOverride={actionButtonsOverride} - openCollapseIconOverride={openCollapseIconOverride} - /> -
    - -
    - ) -} - -export default memo(LogEntry) diff --git a/services/web/frontend/js/features/ide-redesign/components/error-logs/new-error-logs-promo.tsx b/services/web/frontend/js/features/ide-redesign/components/error-logs/new-error-logs-promo.tsx deleted file mode 100644 index 1589fa819d..0000000000 --- a/services/web/frontend/js/features/ide-redesign/components/error-logs/new-error-logs-promo.tsx +++ /dev/null @@ -1,59 +0,0 @@ -import Close from '@/shared/components/close' -import { useEditorContext } from '@/shared/context/editor-context' -import useTutorial from '@/shared/hooks/promotions/use-tutorial' -import { useCallback, useEffect } from 'react' -import { Overlay, Popover } from 'react-bootstrap' -import { useTranslation } from 'react-i18next' - -const TUTORIAL_KEY = 'new-error-logs-promo' -const EVENT_DATA = { name: 'new-error-logs-promotion' } - -export default function NewErrorLogsPromo({ - target, -}: { - target: HTMLElement | null -}) { - const { t } = useTranslation() - - const { inactiveTutorials } = useEditorContext() - const { showPopup, tryShowingPopup, hideUntilReload, completeTutorial } = - useTutorial(TUTORIAL_KEY, EVENT_DATA) - - useEffect(() => { - if (!inactiveTutorials.includes(TUTORIAL_KEY)) { - tryShowingPopup() - } - }, [tryShowingPopup, inactiveTutorials]) - - const onHide = useCallback(() => { - hideUntilReload() - }, [hideUntilReload]) - - const onClose = useCallback(() => { - completeTutorial({ - action: 'complete', - event: 'promo-dismiss', - }) - }, [completeTutorial]) - - if (!target) { - return null - } - - return ( - - - - {t('error_logs_have_had_an_update')} - - - - - ) -} diff --git a/services/web/frontend/js/features/ide-redesign/components/error-logs/old-error-pane.tsx b/services/web/frontend/js/features/ide-redesign/components/error-logs/old-error-pane.tsx deleted file mode 100644 index 7794747d30..0000000000 --- a/services/web/frontend/js/features/ide-redesign/components/error-logs/old-error-pane.tsx +++ /dev/null @@ -1,10 +0,0 @@ -import PdfLogsViewer from '@/features/pdf-preview/components/pdf-logs-viewer' -import { PdfPreviewProvider } from '@/features/pdf-preview/components/pdf-preview-provider' - -export default function OldErrorPane() { - return ( - - - - ) -} diff --git a/services/web/frontend/js/features/ide-redesign/components/error-logs/error-indicator.tsx b/services/web/frontend/js/features/ide-redesign/components/errors.tsx similarity index 56% rename from services/web/frontend/js/features/ide-redesign/components/error-logs/error-indicator.tsx rename to services/web/frontend/js/features/ide-redesign/components/errors.tsx index 7b721a1d51..2313022d3c 100644 --- a/services/web/frontend/js/features/ide-redesign/components/error-logs/error-indicator.tsx +++ b/services/web/frontend/js/features/ide-redesign/components/errors.tsx @@ -1,7 +1,9 @@ +import PdfLogsViewer from '@/features/pdf-preview/components/pdf-logs-viewer' +import { PdfPreviewProvider } from '@/features/pdf-preview/components/pdf-preview-provider' import { useDetachCompileContext as useCompileContext } from '@/shared/context/detach-compile-context' -import { RailIndicator } from '../rail-indicator' +import { RailIndicator } from './rail-indicator' -export default function ErrorIndicator() { +export const ErrorIndicator = () => { const { logEntries } = useCompileContext() if (!logEntries) { @@ -23,3 +25,11 @@ export default function ErrorIndicator() { /> ) } + +export const ErrorPane = () => { + return ( + + + + ) +} diff --git a/services/web/frontend/js/features/ide-redesign/components/full-project-search-panel.tsx b/services/web/frontend/js/features/ide-redesign/components/full-project-search-panel.tsx deleted file mode 100644 index 926341ce89..0000000000 --- a/services/web/frontend/js/features/ide-redesign/components/full-project-search-panel.tsx +++ /dev/null @@ -1,19 +0,0 @@ -import { ElementType } from 'react' -import importOverleafModules from '../../../../macros/import-overleaf-module.macro' - -const componentModule = importOverleafModules('fullProjectSearchPanel')[0] as - | { - import: { default: ElementType } - path: string - } - | undefined - -export const FullProjectSearchPanel = () => { - if (!componentModule) { - return null - } - const FullProjectSearch = componentModule.import.default - return -} - -export const hasFullProjectSearch = Boolean(componentModule) diff --git a/services/web/frontend/js/features/ide-redesign/components/integrations-panel/integrations-panel.tsx b/services/web/frontend/js/features/ide-redesign/components/integrations-panel/integrations-panel.tsx index e477602e3e..d1e4358907 100644 --- a/services/web/frontend/js/features/ide-redesign/components/integrations-panel/integrations-panel.tsx +++ b/services/web/frontend/js/features/ide-redesign/components/integrations-panel/integrations-panel.tsx @@ -1,7 +1,7 @@ import { ElementType } from 'react' import importOverleafModules from '../../../../../macros/import-overleaf-module.macro' +import { RailPanelHeader } from '../rail' import { useTranslation } from 'react-i18next' -import RailPanelHeader from '../rail-panel-header' const integrationPanelComponents = importOverleafModules( 'integrationPanelComponents' diff --git a/services/web/frontend/js/features/ide-redesign/components/main-layout.tsx b/services/web/frontend/js/features/ide-redesign/components/main-layout.tsx index 8ec00a397e..2c422af279 100644 --- a/services/web/frontend/js/features/ide-redesign/components/main-layout.tsx +++ b/services/web/frontend/js/features/ide-redesign/components/main-layout.tsx @@ -56,9 +56,6 @@ export default function MainLayout() { {pdfLayout === 'sideBySide' && ( -
    + )} diff --git a/services/web/frontend/js/features/ide-redesign/components/pdf-preview/pdf-code-check-failed-banner.tsx b/services/web/frontend/js/features/ide-redesign/components/pdf-preview/pdf-code-check-failed-banner.tsx deleted file mode 100644 index ea7c9894d7..0000000000 --- a/services/web/frontend/js/features/ide-redesign/components/pdf-preview/pdf-code-check-failed-banner.tsx +++ /dev/null @@ -1,22 +0,0 @@ -import MaterialIcon from '@/shared/components/material-icon' -import { useTranslation } from 'react-i18next' -import { useDetachCompileContext as useCompileContext } from '@/shared/context/detach-compile-context' - -export default function PdfCodeCheckFailedBanner() { - const { t } = useTranslation() - - const { codeCheckFailed } = useCompileContext() - - if (!codeCheckFailed) { - return null - } - - return ( -
    -
    - - {t('code_check_failed_explanation')} -
    -
    - ) -} diff --git a/services/web/frontend/js/features/ide-redesign/components/pdf-preview/pdf-error-state.tsx b/services/web/frontend/js/features/ide-redesign/components/pdf-preview/pdf-error-state.tsx index a4f53ae614..ef77c0fa5d 100644 --- a/services/web/frontend/js/features/ide-redesign/components/pdf-preview/pdf-error-state.tsx +++ b/services/web/frontend/js/features/ide-redesign/components/pdf-preview/pdf-error-state.tsx @@ -5,37 +5,31 @@ import { useRailContext } from '../../contexts/rail-context' import { usePdfPreviewContext } from '@/features/pdf-preview/components/pdf-preview-provider' import { useDetachCompileContext as useCompileContext } from '@/shared/context/detach-compile-context' import { useIsNewEditorEnabled } from '../../utils/new-editor-utils' -import { upgradePlan } from '@/main/account-upgrade' -import classNames from 'classnames' function PdfErrorState() { const { loadingError } = usePdfPreviewContext() // TODO ide-redesign-cleanup: rename showLogs to something else and check usages - const { hasShortCompileTimeout, error, showLogs } = useCompileContext() + const { showLogs } = useCompileContext() + const { t } = useTranslation() + const { openTab: openRailTab } = useRailContext() const newEditor = useIsNewEditorEnabled() if (!newEditor || (!loadingError && !showLogs)) { return null } - if (hasShortCompileTimeout && error === 'timedout') { - return - } - - return -} - -const GeneralErrorState = () => { - const { t } = useTranslation() - const { openTab: openRailTab } = useRailContext() - return ( - +
    +
    + +
    +
    +

    {t('pdf_couldnt_compile')}

    +

    + {t('we_are_unable_to_generate_the_pdf_at_this_time')} +

    +
    { > {t('check_logs')} - } - extraContent={ -
    -
    - - {t('why_might_this_happen')} -
    -
      -
    • {t('there_is_an_unrecoverable_latex_error')}
    • -
    • {t('the_document_environment_contains_no_content')}
    • -
    • {t('this_project_contains_a_file_called_output')}
    • -
    -
    - } - /> - ) -} - -const CompileTimeoutErrorState = () => { - const { t } = useTranslation() - - return ( - upgradePlan('compile-timeout')} - > - {t('upgrade')} - - } - /> - ) -} - -const ErrorState = ({ - title, - description, - iconType, - actions, - iconClassName, - extraContent, -}: { - title: string - description: string - iconType: string - actions: React.ReactNode - iconClassName?: string - extraContent?: React.ReactNode -}) => { - return ( -
    -
    -
    - -
    -
    -

    {title}

    -

    {description}

    -
    - {actions}
    - {extraContent} +
    +
    + + {t('why_might_this_happen')} +
    +
      +
    • {t('there_is_an_unrecoverable_latex_error')}
    • +
    • {t('the_document_environment_contains_no_content')}
    • +
    • {t('this_project_contains_a_file_called_output')}
    • +
    +
    ) } + export default PdfErrorState diff --git a/services/web/frontend/js/features/ide-redesign/components/rail-panel-header.tsx b/services/web/frontend/js/features/ide-redesign/components/rail-panel-header.tsx deleted file mode 100644 index 94ac2f42af..0000000000 --- a/services/web/frontend/js/features/ide-redesign/components/rail-panel-header.tsx +++ /dev/null @@ -1,31 +0,0 @@ -import { useTranslation } from 'react-i18next' -import { useRailContext } from '../contexts/rail-context' -import OLIconButton from '@/features/ui/components/ol/ol-icon-button' -import React from 'react' - -export default function RailPanelHeader({ - title, - actions, -}: { - title: string - actions?: React.ReactNode[] -}) { - const { t } = useTranslation() - const { handlePaneCollapse } = useRailContext() - return ( -
    -

    {title}

    - -
    - {actions} - -
    -
    - ) -} diff --git a/services/web/frontend/js/features/ide-redesign/components/rail.tsx b/services/web/frontend/js/features/ide-redesign/components/rail.tsx index f3c741155c..d6e1112536 100644 --- a/services/web/frontend/js/features/ide-redesign/components/rail.tsx +++ b/services/web/frontend/js/features/ide-redesign/components/rail.tsx @@ -1,11 +1,4 @@ -import { - FC, - forwardRef, - ReactElement, - useCallback, - useMemo, - useRef, -} from 'react' +import { FC, ReactElement, useCallback, useMemo } from 'react' import { useTranslation } from 'react-i18next' import { Nav, NavLink, Tab, TabContainer } from 'react-bootstrap' import MaterialIcon, { @@ -13,7 +6,7 @@ import MaterialIcon, { } from '@/shared/components/material-icon' import { Panel } from 'react-resizable-panels' import { useLayoutContext } from '@/shared/context/layout-context' -import ErrorIndicator from './error-logs/error-indicator' +import { ErrorIndicator, ErrorPane } from './errors' import { RailModalKey, RailTabKey, @@ -41,17 +34,6 @@ import OLTooltip from '@/features/ui/components/ol/ol-tooltip' import OLIconButton from '@/features/ui/components/ol/ol-icon-button' import { useChatContext } from '@/features/chat/context/chat-context' import { useEditorAnalytics } from '@/shared/hooks/use-editor-analytics' -import { - FullProjectSearchPanel, - hasFullProjectSearch, -} from './full-project-search-panel' -import { sendSearchEvent } from '@/features/event-tracking/search-events' -import ErrorLogsPanel from './error-logs/error-logs-panel' -import { useDetachCompileContext as useCompileContext } from '@/shared/context/detach-compile-context' -import OldErrorPane from './error-logs/old-error-pane' -import { useFeatureFlag } from '@/shared/context/split-test-context' -import { useSurveyUrl } from '../hooks/use-survey-url' -import NewErrorLogsPromo from './error-logs/new-error-logs-promo' type RailElement = { icon: AvailableUnfilledIcon @@ -60,7 +42,6 @@ type RailElement = { indicator?: ReactElement title: string hide?: boolean - disabled?: boolean } type RailActionButton = { @@ -110,10 +91,6 @@ export const RailLayout = () => { togglePane, setResizing, } = useRailContext() - const { logEntries } = useCompileContext() - const errorLogsDisabled = !logEntries - - const errorsTabRef = useRef(null) const { view, setLeftMenuShown } = useLayoutContext() @@ -121,8 +98,6 @@ export const RailLayout = () => { const isHistoryView = view === 'history' - const newErrorlogs = useFeatureFlag('new-editor-error-logs-redesign') - const railTabs: RailElement[] = useMemo( () => [ { @@ -131,13 +106,6 @@ export const RailLayout = () => { title: t('file_tree'), component: , }, - { - key: 'full-project-search', - icon: 'search', - title: t('project_search'), - component: , - hide: !hasFullProjectSearch, - }, { key: 'integrations', icon: 'integration_instructions', @@ -156,18 +124,17 @@ export const RailLayout = () => { component: , indicator: , title: t('chat'), - hide: !getMeta('ol-capabilities')?.includes('chat'), + hide: !getMeta('ol-chatEnabled'), }, { key: 'errors', icon: 'report', title: t('error_log'), - component: newErrorlogs ? : , + component: , indicator: , - disabled: errorLogsDisabled, }, ], - [t, errorLogsDisabled, newErrorlogs] + [t] ) const railActions: RailAction[] = useMemo( @@ -203,17 +170,10 @@ export const RailLayout = () => { // Attempting to open a non-existent tab return } - const keyOrDefault = (key ?? 'file-tree') as RailTabKey + const keyOrDefault = key ?? 'file-tree' // Change the selected tab and make sure it's open - openTab(keyOrDefault) + openTab(keyOrDefault as RailTabKey) sendEvent('rail-click', { tab: keyOrDefault }) - if (keyOrDefault === 'full-project-search') { - sendSearchEvent('search-open', { - searchType: 'full-project', - method: 'button', - location: 'rail', - }) - } if (key === 'chat') { markMessagesAsRead() @@ -238,31 +198,24 @@ export const RailLayout = () => {
    { hidden: isHistoryView, })} > - + {railTabs .filter(({ hide }) => !hide) .map(({ key, component }) => ( @@ -313,17 +266,19 @@ export const RailLayout = () => { ) } -const RailTab = forwardRef< - HTMLAnchorElement, - { - icon: AvailableUnfilledIcon - eventKey: string - open: boolean - indicator?: ReactElement - title: string - disabled?: boolean - } ->(({ icon, eventKey, open, indicator, title, disabled = false }, ref) => { +const RailTab = ({ + icon, + eventKey, + open, + indicator, + title, +}: { + icon: AvailableUnfilledIcon + eventKey: string + open: boolean + indicator?: ReactElement + title: string +}) => { return ( {open ? ( ) -}) - -RailTab.displayName = 'RailTab' +} const RailActionElement = ({ action }: { action: RailAction }) => { const onActionClick = useCallback(() => { @@ -412,6 +363,23 @@ const RailActionElement = ({ action }: { action: RailAction }) => { } } +export const RailPanelHeader: FC<{ title: string }> = ({ title }) => { + const { t } = useTranslation() + const { handlePaneCollapse } = useRailContext() + return ( +
    +

    {title}

    + +
    + ) +} + const RailHelpDropdown = () => { const showSupport = getMeta('ol-showSupport') const { t } = useTranslation() @@ -422,8 +390,6 @@ const RailHelpDropdown = () => { const openContactUsModal = useCallback(() => { setActiveModal('contact-us') }, [setActiveModal]) - const surveyURL = useSurveyUrl() - return ( @@ -444,7 +410,7 @@ const RailHelpDropdown = () => { )} - ) -} diff --git a/services/web/frontend/js/features/ide-redesign/components/settings/editor-settings/editor-settings.tsx b/services/web/frontend/js/features/ide-redesign/components/settings/editor-settings/editor-settings.tsx index a58b0c101e..28dcef8a9b 100644 --- a/services/web/frontend/js/features/ide-redesign/components/settings/editor-settings/editor-settings.tsx +++ b/services/web/frontend/js/features/ide-redesign/components/settings/editor-settings/editor-settings.tsx @@ -9,7 +9,6 @@ import PDFViewerSetting from './pdf-viewer-setting' import SpellCheckSetting from './spell-check-setting' import DictionarySetting from './dictionary-setting' import importOverleafModules from '../../../../../../macros/import-overleaf-module.macro' -import BreadcrumbsSetting from './breadcrumbs-setting' const [referenceSearchSettingModule] = importOverleafModules( 'referenceSearchSetting' @@ -34,7 +33,6 @@ export default function EditorSettings() { - diff --git a/services/web/frontend/js/features/ide-redesign/components/switcher-modal/modal.tsx b/services/web/frontend/js/features/ide-redesign/components/switcher-modal/modal.tsx index be3868ed89..6942674de5 100644 --- a/services/web/frontend/js/features/ide-redesign/components/switcher-modal/modal.tsx +++ b/services/web/frontend/js/features/ide-redesign/components/switcher-modal/modal.tsx @@ -15,8 +15,6 @@ import Notification from '@/shared/components/notification' import { useSwitchEnableNewEditorState } from '../../hooks/use-switch-enable-new-editor-state' import { Trans, useTranslation } from 'react-i18next' import { useEditorAnalytics } from '@/shared/hooks/use-editor-analytics' -import { useFeatureFlag } from '@/shared/context/split-test-context' -import { useSurveyUrl } from '../../hooks/use-survey-url' export const IdeRedesignSwitcherModal = () => { const { t } = useTranslation() @@ -81,9 +79,6 @@ const SwitcherModalContentEnabled: FC = ({ // do nothing, we're already showing the error }) }, [setEditorRedesignStatus, hide, sendEvent]) - - const surveyURL = useSurveyUrl() - return ( <> @@ -109,7 +104,7 @@ const SwitcherModalContentEnabled: FC = ({ {t('cancel')} = ({ const SwitcherWhatsNew = () => { const { t } = useTranslation() - const newErrorlogs = useFeatureFlag('new-editor-error-logs-redesign') - return (

    {t('latest_updates')}

      - {newErrorlogs &&
    • {t('new_error_logs_panel')}
    • } -
    • {t('searching_all_project_files_is_now_available')}
    • {t('double_clicking_on_the_pdf_shows')}

    diff --git a/services/web/frontend/js/features/ide-redesign/components/toolbar/command-dropdown.tsx b/services/web/frontend/js/features/ide-redesign/components/toolbar/command-dropdown.tsx index 2dc696cdbf..e08cf8873a 100644 --- a/services/web/frontend/js/features/ide-redesign/components/toolbar/command-dropdown.tsx +++ b/services/web/frontend/js/features/ide-redesign/components/toolbar/command-dropdown.tsx @@ -1,7 +1,5 @@ import { Command, - formatShortcut, - Shortcuts, useCommandRegistry, } from '@/features/ide-react/context/command-registry-context' import { @@ -16,10 +14,7 @@ import { MenuBarOption } from '@/shared/components/menu-bar/menu-bar-option' import { Fragment, useCallback, useMemo } from 'react' type CommandId = string -type TaggedCommand = Command & { - type: 'command' - shortcuts?: Shortcuts[CommandId] -} +type TaggedCommand = Command & { type: 'command' } type Entry = T | GroupStructure type GroupStructure = { id: string @@ -42,13 +37,13 @@ const CommandDropdown = ({ title: string id: string }) => { - const { registry, shortcuts } = useCommandRegistry() + const { registry } = useCommandRegistry() const populatedSections = useMemo( () => menu - .map(section => populateSectionOrGroup(section, registry, shortcuts)) + .map(section => populateSectionOrGroup(section, registry)) .filter(x => x.children.length > 0), - [menu, registry, shortcuts] + [menu, registry] ) if (populatedSections.length === 0) { @@ -81,8 +76,8 @@ export const CommandSection = ({ }: { section: MenuSectionStructure }) => { - const { registry, shortcuts } = useCommandRegistry() - const section = populateSectionOrGroup(sectionStructure, registry, shortcuts) + const { registry } = useCommandRegistry() + const section = populateSectionOrGroup(sectionStructure, registry) if (section.children.length === 0) { return null } @@ -113,9 +108,6 @@ const CommandDropdownChild = ({ item }: { item: Entry }) => { onClick={onClickHandler} href={item.href} disabled={item.disabled} - trailingIcon={ - item.shortcuts && {formatShortcut(item.shortcuts[0])} - } /> ) } else { @@ -135,8 +127,7 @@ function populateSectionOrGroup< T extends { children: Array> }, >( section: T, - registry: Map, - shortcuts: Shortcuts + registry: Map ): Omit & { children: Array> } { @@ -146,11 +137,7 @@ function populateSectionOrGroup< children: children .map(child => { if (typeof child !== 'string') { - const populatedChild = populateSectionOrGroup( - child, - registry, - shortcuts - ) + const populatedChild = populateSectionOrGroup(child, registry) if (populatedChild.children.length === 0) { // Skip empty groups return undefined @@ -159,11 +146,7 @@ function populateSectionOrGroup< } const command = registry.get(child) if (command) { - return { - ...command, - shortcuts: shortcuts[command.id], - type: 'command' as const, - } + return { ...command, type: 'command' as const } } return undefined }) diff --git a/services/web/frontend/js/features/ide-redesign/components/toolbar/duplicate-project.tsx b/services/web/frontend/js/features/ide-redesign/components/toolbar/duplicate-project.tsx deleted file mode 100644 index 74f868cc91..0000000000 --- a/services/web/frontend/js/features/ide-redesign/components/toolbar/duplicate-project.tsx +++ /dev/null @@ -1,48 +0,0 @@ -import EditorCloneProjectModalWrapper from '@/features/clone-project-modal/components/editor-clone-project-modal-wrapper' -import OLDropdownMenuItem from '@/features/ui/components/ol/ol-dropdown-menu-item' -import { useEditorAnalytics } from '@/shared/hooks/use-editor-analytics' -import { useLocation } from '@/shared/hooks/use-location' -import getMeta from '@/utils/meta' -import { useCallback, useState } from 'react' -import { useTranslation } from 'react-i18next' - -type ProjectCopyResponse = { - project_id: string -} - -export const DuplicateProject = () => { - const { sendEvent } = useEditorAnalytics() - const { t } = useTranslation() - const [showModal, setShowModal] = useState(false) - const location = useLocation() - const anonymous = getMeta('ol-anonymous') - - const openProject = useCallback( - ({ project_id: projectId }: ProjectCopyResponse) => { - location.assign(`/project/${projectId}`) - }, - [location] - ) - - const handleShowModal = useCallback(() => { - sendEvent('copy-project', { location: 'project-title-dropdown' }) - setShowModal(true) - }, [sendEvent]) - - if (anonymous) { - return null - } - - return ( - <> - - {t('copy')} - - setShowModal(false)} - openProject={openProject} - /> - - ) -} diff --git a/services/web/frontend/js/features/ide-redesign/components/toolbar/labs-actions.tsx b/services/web/frontend/js/features/ide-redesign/components/toolbar/labs-actions.tsx index 27630ca598..06a3a2ab4a 100644 --- a/services/web/frontend/js/features/ide-redesign/components/toolbar/labs-actions.tsx +++ b/services/web/frontend/js/features/ide-redesign/components/toolbar/labs-actions.tsx @@ -4,7 +4,6 @@ import OLTooltip from '@/features/ui/components/ol/ol-tooltip' import MaterialIcon from '@/shared/components/material-icon' import { useCallback } from 'react' import { useTranslation } from 'react-i18next' -import { useSurveyUrl } from '../../hooks/use-survey-url' export const LabsActions = () => { const { t } = useTranslation() @@ -12,7 +11,6 @@ export const LabsActions = () => { const openEditorRedesignSwitcherModal = useCallback(() => { setShowSwitcherModal(true) }, [setShowSwitcherModal]) - const surveyURL = useSurveyUrl() return ( <>
    @@ -36,7 +34,7 @@ export const LabsActions = () => {
    { const { t } = useTranslation() @@ -171,16 +170,19 @@ export const ToolbarMenuBar = () => { [t] ) - const { mathPreview, setMathPreview, breadcrumbs, setBreadcrumbs } = - useProjectSettingsContext() + const { + userSettings: { mathPreview }, + setUserSettings, + } = useUserSettingsContext() const toggleMathPreview = useCallback(() => { - setMathPreview(!mathPreview) - }, [setMathPreview, mathPreview]) - - const toggleBreadcrumbs = useCallback(() => { - setBreadcrumbs(!breadcrumbs) - }, [setBreadcrumbs, breadcrumbs]) + setUserSettings(prev => { + return { + ...prev, + mathPreview: !prev.mathPreview, + } + }) + }, [setUserSettings]) const { setActiveModal } = useRailContext() const openKeyboardShortcutsModal = useCallback(() => { @@ -189,9 +191,6 @@ export const ToolbarMenuBar = () => { const openContactUsModal = useCallback(() => { setActiveModal('contact-us') }, [setActiveModal]) - - const surveyURL = useSurveyUrl() - return ( <> { Editor settings - - } - onClick={toggleBreadcrumbs} - /> { diff --git a/services/web/frontend/js/features/ide-redesign/components/toolbar/project-title.tsx b/services/web/frontend/js/features/ide-redesign/components/toolbar/project-title.tsx index 61e29023a0..68860da4ea 100644 --- a/services/web/frontend/js/features/ide-redesign/components/toolbar/project-title.tsx +++ b/services/web/frontend/js/features/ide-redesign/components/toolbar/project-title.tsx @@ -13,7 +13,6 @@ import { DownloadProjectPDF, DownloadProjectZip } from './download-project' import { useCallback, useState } from 'react' import OLDropdownMenuItem from '@/features/ui/components/ol/ol-dropdown-menu-item' import EditableLabel from './editable-label' -import { DuplicateProject } from './duplicate-project' const [publishModalModules] = importOverleafModules('publishModal') const SubmitProjectButton = publishModalModules?.import.NewPublishToolbarButton @@ -74,7 +73,6 @@ export const ToolbarProjectTitle = () => { - { setIsRenaming(true) diff --git a/services/web/frontend/js/features/ide-redesign/components/toolbar/toolbar.tsx b/services/web/frontend/js/features/ide-redesign/components/toolbar/toolbar.tsx index 298e6f8e93..56c597451e 100644 --- a/services/web/frontend/js/features/ide-redesign/components/toolbar/toolbar.tsx +++ b/services/web/frontend/js/features/ide-redesign/components/toolbar/toolbar.tsx @@ -12,8 +12,6 @@ import BackToEditorButton from '@/features/editor-navigation-toolbar/components/ import { useCallback } from 'react' import * as eventTracking from '../../../../infrastructure/event-tracking' import OLTooltip from '@/features/ui/components/ol/ol-tooltip' -import UpgradeButton from './upgrade-button' -import getMeta from '@/utils/meta' export const Toolbar = () => { const { view, setView } = useLayoutContext() @@ -73,7 +71,6 @@ const ToolbarButtons = () => { - {getMeta('ol-showUpgradePrompt') && }
    ) } diff --git a/services/web/frontend/js/features/ide-redesign/components/toolbar/upgrade-button.tsx b/services/web/frontend/js/features/ide-redesign/components/toolbar/upgrade-button.tsx deleted file mode 100644 index b3059de5af..0000000000 --- a/services/web/frontend/js/features/ide-redesign/components/toolbar/upgrade-button.tsx +++ /dev/null @@ -1,26 +0,0 @@ -import { useTranslation } from 'react-i18next' -import * as eventTracking from '../../../../infrastructure/event-tracking' -import OLButton from '@/features/ui/components/ol/ol-button' - -export default function UpgradeButton() { - const { t } = useTranslation() - - function handleClick() { - eventTracking.send('subscription-funnel', 'code-editor', 'upgrade') - eventTracking.sendMB('upgrade-button-click', { source: 'code-editor' }) - } - - return ( -
    - - {t('upgrade')} - -
    - ) -} diff --git a/services/web/frontend/js/features/ide-redesign/contexts/rail-context.tsx b/services/web/frontend/js/features/ide-redesign/contexts/rail-context.tsx index c52671b42b..c02d17fb9b 100644 --- a/services/web/frontend/js/features/ide-redesign/contexts/rail-context.tsx +++ b/services/web/frontend/js/features/ide-redesign/contexts/rail-context.tsx @@ -1,7 +1,5 @@ -import { sendSearchEvent } from '@/features/event-tracking/search-events' import useCollapsiblePanel from '@/features/ide-react/hooks/use-collapsible-panel' import useEventListener from '@/shared/hooks/use-event-listener' -import { isMac } from '@/shared/utils/os' import { createContext, Dispatch, @@ -21,7 +19,6 @@ export type RailTabKey = | 'review-panel' | 'chat' | 'errors' - | 'full-project-search' export type RailModalKey = 'keyboard-shortcuts' | 'contact-us' | 'dictionary' @@ -92,27 +89,6 @@ export const RailProvider: FC = ({ children }) => { }, [handlePaneCollapse, selectedTab, isOpen, openTab]) ) - useEventListener( - 'keydown', - useCallback( - (event: KeyboardEvent) => { - if ( - (isMac ? event.metaKey : event.ctrlKey) && - event.shiftKey && - event.code === 'KeyF' - ) { - event.preventDefault() - sendSearchEvent('search-open', { - searchType: 'full-project', - method: 'keyboard', - }) - openTab('full-project-search') - } - }, - [openTab] - ) - ) - const value = useMemo( () => ({ selectedTab, diff --git a/services/web/frontend/js/features/ide-redesign/hooks/use-survey-url.tsx b/services/web/frontend/js/features/ide-redesign/hooks/use-survey-url.tsx deleted file mode 100644 index fadc077b70..0000000000 --- a/services/web/frontend/js/features/ide-redesign/hooks/use-survey-url.tsx +++ /dev/null @@ -1,9 +0,0 @@ -import { useFeatureFlag } from '@/shared/context/split-test-context' - -export const useSurveyUrl = () => { - const newErrorlogs = useFeatureFlag('new-editor-error-logs-redesign') - if (newErrorlogs) { - return 'https://forms.gle/83QJ9ALJkiugxTZf8' - } - return 'https://forms.gle/soyVStc5qDx9na1Z6' -} diff --git a/services/web/frontend/js/features/pdf-preview/components/compile-time-changing-soon.tsx b/services/web/frontend/js/features/pdf-preview/components/compile-time-changing-soon.tsx deleted file mode 100644 index 9d9bc53711..0000000000 --- a/services/web/frontend/js/features/pdf-preview/components/compile-time-changing-soon.tsx +++ /dev/null @@ -1,122 +0,0 @@ -import Notification from '@/shared/components/notification' -import StartFreeTrialButton from '@/shared/components/start-free-trial-button' -import { Trans, useTranslation } from 'react-i18next' -import * as eventTracking from '@/infrastructure/event-tracking' -import { FC } from 'react' - -export const CompileTimeoutChangingSoon: FC<{ - isProjectOwner?: boolean - handleDismissChangingSoon: () => void - segmentation?: eventTracking.Segmentation -}> = ({ isProjectOwner = false, handleDismissChangingSoon, segmentation }) => { - const { t } = useTranslation() - - const sendInfoClickEvent = () => { - eventTracking.sendMB('paywall-info-click', { - 'paywall-type': 'compile-time-warning', - ...segmentation, - content: 'blog', - }) - } - - const compileTimeoutChangesBlogLink = ( - /* eslint-disable-next-line jsx-a11y/anchor-has-content */ -
    - ) - - const fixingCompileTimeoutsLearnLink = ( - /* eslint-disable-next-line jsx-a11y/anchor-has-content */ - - ) - - if (isProjectOwner) { - return ( - - {t('start_free_trial_without_exclamation')} - - } - ariaLive="polite" - content={ -
    -

    - -

    -

    - {' '} - }} - /> -

    -
    - } - title={t('your_project_compiled_but_soon_might_not')} - type="warning" - isActionBelowContent - isDismissible - onDismiss={handleDismissChangingSoon} - /> - ) - } - - return ( - -

    - {' '} - -

    -

    - , - ]} - /> -

    -
    - } - title={t('this_project_compiled_but_soon_might_not')} - type="warning" - isDismissible - onDismiss={handleDismissChangingSoon} - /> - ) -} diff --git a/services/web/frontend/js/features/pdf-preview/components/compile-time-warning-upgrade-prompt-inner.tsx b/services/web/frontend/js/features/pdf-preview/components/compile-time-warning-upgrade-prompt-inner.tsx index 463ba55be8..09cfb5ddee 100644 --- a/services/web/frontend/js/features/pdf-preview/components/compile-time-warning-upgrade-prompt-inner.tsx +++ b/services/web/frontend/js/features/pdf-preview/components/compile-time-warning-upgrade-prompt-inner.tsx @@ -1,24 +1,21 @@ import Notification from '@/shared/components/notification' import StartFreeTrialButton from '@/shared/components/start-free-trial-button' -import * as eventTracking from '../../../infrastructure/event-tracking' import { useTranslation } from 'react-i18next' import { FC } from 'react' export const CompileTimeWarningUpgradePromptInner: FC<{ handleDismissWarning: () => void - segmentation: eventTracking.Segmentation -}> = ({ handleDismissWarning, segmentation }) => { +}> = ({ handleDismissWarning }) => { const { t } = useTranslation() return ( {t('start_free_trial_without_exclamation')} @@ -30,7 +27,7 @@ export const CompileTimeWarningUpgradePromptInner: FC<{
    {t('your_project_near_compile_timeout_limit')}
    - {t('upgrade_for_more_compile_time')} + {t('upgrade_for_12x_more_compile_time')} {'. '}
    } diff --git a/services/web/frontend/js/features/pdf-preview/components/compile-time-warning-upgrade-prompt.tsx b/services/web/frontend/js/features/pdf-preview/components/compile-time-warning-upgrade-prompt.tsx index 164c3c3275..6595df854c 100644 --- a/services/web/frontend/js/features/pdf-preview/components/compile-time-warning-upgrade-prompt.tsx +++ b/services/web/frontend/js/features/pdf-preview/components/compile-time-warning-upgrade-prompt.tsx @@ -1,56 +1,23 @@ -import { memo, useCallback, useEffect, useMemo, useState } from 'react' +import { memo, useCallback, useEffect, useState } from 'react' import * as eventTracking from '@/infrastructure/event-tracking' import { useDetachCompileContext } from '@/shared/context/detach-compile-context' import usePersistedState from '@/shared/hooks/use-persisted-state' import { CompileTimeWarningUpgradePromptInner } from '@/features/pdf-preview/components/compile-time-warning-upgrade-prompt-inner' import getMeta from '@/utils/meta' -import { CompileTimeoutChangingSoon } from './compile-time-changing-soon' function CompileTimeWarningUpgradePrompt() { const { isProjectOwner, deliveryLatencies, compiling, showLogs, error } = useDetachCompileContext() const [showWarning, setShowWarning] = useState(false) - const [showChangingSoon, setShowChangingSoon] = useState(false) const [dismissedUntilWarning, setDismissedUntilWarning] = usePersistedState< Date | undefined >(`has-dismissed-10s-compile-time-warning-until`) - const { reducedTimeoutWarning } = getMeta('ol-compileSettings') - const warningThreshold = reducedTimeoutWarning === 'enabled' ? 7 : 10 - - const sharedSegmentation = useMemo( - () => ({ - '10s-timeout-warning': reducedTimeoutWarning, - 'is-owner': isProjectOwner, - }), - [isProjectOwner, reducedTimeoutWarning] - ) - - const warningSegmentation = useMemo( - () => ({ - content: 'warning', - compileTime: warningThreshold, - ...sharedSegmentation, - }), - [sharedSegmentation, warningThreshold] - ) - - const changingSoonSegmentation = useMemo( - () => ({ - content: 'changes', - compileTime: 10, - ...sharedSegmentation, - }), - [sharedSegmentation] - ) const handleNewCompile = useCallback( (compileTime: number) => { setShowWarning(false) - setShowChangingSoon(false) - if (reducedTimeoutWarning === 'enabled' && compileTime > 10000) { - setShowChangingSoon(true) - } else if (compileTime > warningThreshold * 1000) { + if (compileTime > 10000) { if (isProjectOwner) { if ( !dismissedUntilWarning || @@ -58,52 +25,26 @@ function CompileTimeWarningUpgradePrompt() { ) { setShowWarning(true) eventTracking.sendMB('compile-time-warning-displayed', { - compileTime: warningThreshold, + time: 10, isProjectOwner, }) } } } }, - [ - isProjectOwner, - dismissedUntilWarning, - reducedTimeoutWarning, - warningThreshold, - ] + [isProjectOwner, dismissedUntilWarning] ) const handleDismissWarning = useCallback(() => { eventTracking.sendMB('compile-time-warning-dismissed', { - compileTime: warningThreshold, + time: 10, isProjectOwner, }) - eventTracking.sendMB('paywall-dismiss', { - 'paywall-type': 'compile-time-warning', - content: 'warning', - compileTime: warningThreshold, - ...sharedSegmentation, - }) setShowWarning(false) const until = new Date() until.setDate(until.getDate() + 1) // 1 day setDismissedUntilWarning(until) - }, [ - isProjectOwner, - setDismissedUntilWarning, - warningThreshold, - sharedSegmentation, - ]) - - const handleDismissChangingSoon = useCallback(() => { - eventTracking.sendMB('paywall-dismiss', { - 'paywall-type': 'compile-time-warning', - compileTime: 10, - content: 'changes', - ...sharedSegmentation, - }) - setShowChangingSoon(false) - }, [sharedSegmentation]) + }, [isProjectOwner, setDismissedUntilWarning]) useEffect(() => { if (compiling || error || showLogs) return @@ -114,32 +55,21 @@ function CompileTimeWarningUpgradePrompt() { return null } - if ( - compiling || - error || - showLogs || - !deliveryLatencies.compileTimeServerE2E - ) { + if (compiling || error || showLogs) { return null } - if (!showWarning && !showChangingSoon) { + if (!showWarning) { return null } + // if showWarning is true then the 10s warning is shown + return (
    {showWarning && isProjectOwner && ( - )} - {showChangingSoon && ( - )}
    diff --git a/services/web/frontend/js/features/pdf-preview/components/pdf-log-entry-content.tsx b/services/web/frontend/js/features/pdf-preview/components/pdf-log-entry-content.tsx index 17378f6c74..8c9a9d7761 100644 --- a/services/web/frontend/js/features/pdf-preview/components/pdf-log-entry-content.tsx +++ b/services/web/frontend/js/features/pdf-preview/components/pdf-log-entry-content.tsx @@ -3,7 +3,6 @@ import PdfLogEntryRawContent from './pdf-log-entry-raw-content' import importOverleafModules from '../../../../macros/import-overleaf-module.macro' import { LogEntry } from '../util/types' import { ElementType } from 'react' -import classNames from 'classnames' const pdfLogEntryComponents = importOverleafModules( 'pdfLogEntryComponents' @@ -18,21 +17,17 @@ export default function PdfLogEntryContent({ extraInfoURL, index, logEntry, - alwaysExpandRawContent = false, - className, }: { rawContent?: string formattedContent?: React.ReactNode extraInfoURL?: string | null index?: number logEntry?: LogEntry - alwaysExpandRawContent?: boolean - className?: string }) { const { t } = useTranslation() return ( -
    +
    {formattedContent && (
    {formattedContent}
    )} @@ -53,11 +48,7 @@ export default function PdfLogEntryContent({ )} {rawContent && ( - + )}
    ) diff --git a/services/web/frontend/js/features/pdf-preview/components/pdf-log-entry-raw-content.tsx b/services/web/frontend/js/features/pdf-preview/components/pdf-log-entry-raw-content.tsx index 0e9cc5246d..39f46fbed3 100644 --- a/services/web/frontend/js/features/pdf-preview/components/pdf-log-entry-raw-content.tsx +++ b/services/web/frontend/js/features/pdf-preview/components/pdf-log-entry-raw-content.tsx @@ -8,24 +8,20 @@ import Icon from '../../../shared/components/icon' export default function PdfLogEntryRawContent({ rawContent, collapsedSize = 0, - alwaysExpanded = false, }: { rawContent: string collapsedSize?: number - alwaysExpanded?: boolean }) { - const [expanded, setExpanded] = useState(alwaysExpanded) - const [needsExpander, setNeedsExpander] = useState(!alwaysExpanded) + const [expanded, setExpanded] = useState(false) + const [needsExpander, setNeedsExpander] = useState(true) const { elementRef } = useResizeObserver( useCallback( (element: Element) => { if (element.scrollHeight === 0) return // skip update when logs-pane is closed - setNeedsExpander( - !alwaysExpanded && element.scrollHeight > collapsedSize - ) + setNeedsExpander(element.scrollHeight > collapsedSize) }, - [collapsedSize, alwaysExpanded] + [collapsedSize] ) ) diff --git a/services/web/frontend/js/features/pdf-preview/components/pdf-log-entry.tsx b/services/web/frontend/js/features/pdf-preview/components/pdf-log-entry.tsx index 23ae2dca5d..349ad79047 100644 --- a/services/web/frontend/js/features/pdf-preview/components/pdf-log-entry.tsx +++ b/services/web/frontend/js/features/pdf-preview/components/pdf-log-entry.tsx @@ -1,3 +1,4 @@ +import classNames from 'classnames' import { memo, MouseEventHandler, useCallback } from 'react' import PreviewLogEntryHeader from '../../preview/components/preview-log-entry-header' import PdfLogEntryContent from './pdf-log-entry-content' @@ -5,9 +6,6 @@ import HumanReadableLogsHints from '../../../ide/human-readable-logs/HumanReadab import { sendMB } from '@/infrastructure/event-tracking' import getMeta from '@/utils/meta' import { ErrorLevel, LogEntry, SourceLocation } from '../util/types' -import { useIsNewEditorEnabled } from '@/features/ide-redesign/utils/new-editor-utils' -import NewLogEntry from '@/features/ide-redesign/components/error-logs/log-entry' -import { useFeatureFlag } from '@/shared/context/split-test-context' function PdfLogEntry({ ruleId, @@ -20,9 +18,12 @@ function PdfLogEntry({ level, sourceLocation, showSourceLocationLink = true, + showCloseButton = false, entryAriaLabel = undefined, + customClass, contentDetails, onSourceLocationClick, + onClose, index, logEntry, id, @@ -37,9 +38,12 @@ function PdfLogEntry({ extraInfoURL?: string | null sourceLocation?: SourceLocation showSourceLocationLink?: boolean + showCloseButton?: boolean entryAriaLabel?: string + customClass?: string contentDetails?: string[] onSourceLocationClick?: (sourceLocation: SourceLocation) => void + onClose?: () => void index?: number logEntry?: LogEntry id?: string @@ -69,34 +73,9 @@ function PdfLogEntry({ [level, onSourceLocationClick, ruleId, sourceLocation] ) - const newEditor = useIsNewEditorEnabled() - const newErrorlogs = useFeatureFlag('new-editor-error-logs-redesign') - - if (newEditor && newErrorlogs) { - return ( - - ) - } - return (
    {(rawContent || formattedContent || showAiErrorAssistant) && ( diff --git a/services/web/frontend/js/features/pdf-preview/components/pdf-logs-viewer.tsx b/services/web/frontend/js/features/pdf-preview/components/pdf-logs-viewer.tsx index f9fbcae42a..ec834432fe 100644 --- a/services/web/frontend/js/features/pdf-preview/components/pdf-logs-viewer.tsx +++ b/services/web/frontend/js/features/pdf-preview/components/pdf-logs-viewer.tsx @@ -21,6 +21,7 @@ function PdfLogsViewer({ alwaysVisible = false }: { alwaysVisible?: boolean }) { const { codeCheckFailed, error, + hasShortCompileTimeout, logEntries, rawLog, validationIssues, @@ -31,8 +32,6 @@ function PdfLogsViewer({ alwaysVisible = false }: { alwaysVisible?: boolean }) { const { loadingError } = usePdfPreviewContext() - const { compileTimeout } = getMeta('ol-compileSettings') - const { t } = useTranslation() const [ @@ -59,7 +58,7 @@ function PdfLogsViewer({ alwaysVisible = false }: { alwaysVisible?: boolean }) { {loadingError && } - {compileTimeout < 60 && error === 'timedout' ? ( + {hasShortCompileTimeout && error === 'timedout' ? ( isCompileTimeoutPaywallDisplay ? ( - startCompile()} - />, - ]} - /> -
    -
    - , - ]} - /> - - } - level="warning" - /> - ) + + startCompile()} + />, + ]} + /> +
    +
    + , + ]} + /> + + } + level="warning" + /> ) case 'rendering-error': return ( - includeErrors && ( - - {t('something_went_wrong_rendering_pdf')} -   - , - ]} - /> - {getMeta('ol-compilesUserContentDomain') && ( - <> -
    -
    - , - /* eslint-disable-next-line jsx-a11y/anchor-has-content */ -
    , - ]} - /> - - )} - - ) + + {t('something_went_wrong_rendering_pdf')} +   + , + ]} + /> + {getMeta('ol-compilesUserContentDomain') && ( + <> +
    +
    + , + /* eslint-disable-next-line jsx-a11y/anchor-has-content */ +
    , + ]} + /> + + )} + ) case 'clsi-maintenance': return ( - includeErrors && ( - - {t('clsi_maintenance')} - - ) + + {t('clsi_maintenance')} + ) case 'clsi-unavailable': return ( - includeErrors && ( - - {t('clsi_unavailable')} - - ) + + {t('clsi_unavailable')} + ) case 'too-recently-compiled': return ( - includeErrors && ( - - {t('too_recently_compiled')} - - ) + + {t('too_recently_compiled')} + ) case 'terminated': return ( - includeErrors && ( - - {t('compile_terminated_by_user')} - - ) + + {t('compile_terminated_by_user')} + ) case 'rate-limited': return ( - includeErrors && ( - - {t('project_flagged_too_many_compiles')} - - ) + + {t('project_flagged_too_many_compiles')} + ) case 'compile-in-progress': return ( - includeErrors && ( - - {t('pdf_compile_try_again')} - - ) + + {t('pdf_compile_try_again')} + ) case 'autocompile-disabled': return ( - includeErrors && ( - - {t('autocompile_disabled_reason')} - - ) + + {t('autocompile_disabled_reason')} + ) case 'project-too-large': return ( - includeErrors && ( - - {t('project_too_much_editable_text')} - - ) + + {t('project_too_much_editable_text')} + ) case 'timedout': - return includeErrors && + return case 'failure': return ( - includeErrors && ( - - {t('no_pdf_error_explanation')} + + {t('no_pdf_error_explanation')} -
      -
    • {t('no_pdf_error_reason_unrecoverable_error')}
    • -
    • - }} - /> -
    • -
    • - }} - /> -
    • -
    -
    - ) +
      +
    • {t('no_pdf_error_reason_unrecoverable_error')}
    • +
    • + }} + /> +
    • +
    • + }} + /> +
    • +
    +
    ) case 'clear-cache': return ( - includeErrors && ( - - {t('somthing_went_wrong_compiling')} - - ) + + {t('somthing_went_wrong_compiling')} + ) case 'pdf-viewer-loading-error': return ( - includeErrors && ( - - , - // eslint-disable-next-line jsx-a11y/anchor-has-content -
    , - // eslint-disable-next-line jsx-a11y/anchor-has-content - , - ]} - /> - - ) + + , + // eslint-disable-next-line jsx-a11y/anchor-has-content + , + // eslint-disable-next-line jsx-a11y/anchor-has-content + , + ]} + /> + ) case 'validation-problems': @@ -241,11 +207,9 @@ function PdfPreviewError({ case 'error': default: return ( - includeErrors && ( - - {t('somthing_went_wrong_compiling')} - - ) + + {t('somthing_went_wrong_compiling')} + ) } } diff --git a/services/web/frontend/js/features/pdf-preview/components/pdf-preview-pane.tsx b/services/web/frontend/js/features/pdf-preview/components/pdf-preview-pane.tsx index e063c20c76..7bbecbc327 100644 --- a/services/web/frontend/js/features/pdf-preview/components/pdf-preview-pane.tsx +++ b/services/web/frontend/js/features/pdf-preview/components/pdf-preview-pane.tsx @@ -12,12 +12,9 @@ import PdfPreviewHybridToolbarNew from '@/features/ide-redesign/components/pdf-p import PdfErrorState from '@/features/ide-redesign/components/pdf-preview/pdf-error-state' import { useIsNewEditorEnabled } from '@/features/ide-redesign/utils/new-editor-utils' import importOverleafModules from '../../../../macros/import-overleaf-module.macro' -import PdfCodeCheckFailedBanner from '@/features/ide-redesign/components/pdf-preview/pdf-code-check-failed-banner' -import getMeta from '@/utils/meta' function PdfPreviewPane() { - const { pdfUrl } = useCompileContext() - const { compileTimeout } = getMeta('ol-compileSettings') + const { pdfUrl, hasShortCompileTimeout } = useCompileContext() const classes = classNames('pdf', 'full-size', { 'pdf-empty': !pdfUrl, }) @@ -35,9 +32,8 @@ function PdfPreviewPane() { ) : ( )} - {newEditor && } - {compileTimeout < 60 && } + {hasShortCompileTimeout && } }>
    diff --git a/services/web/frontend/js/features/pdf-preview/components/timeout-message-after-paywall-dismissal.tsx b/services/web/frontend/js/features/pdf-preview/components/timeout-message-after-paywall-dismissal.tsx index 64ef0fbfc1..db6140085f 100644 --- a/services/web/frontend/js/features/pdf-preview/components/timeout-message-after-paywall-dismissal.tsx +++ b/services/web/frontend/js/features/pdf-preview/components/timeout-message-after-paywall-dismissal.tsx @@ -1,30 +1,39 @@ import getMeta from '@/utils/meta' import { Trans, useTranslation } from 'react-i18next' -import { memo, useMemo } from 'react' +import { memo, useCallback, useEffect } from 'react' import { useDetachCompileContext } from '@/shared/context/detach-compile-context' import StartFreeTrialButton from '@/shared/components/start-free-trial-button' import MaterialIcon from '@/shared/components/material-icon' +import { useStopOnFirstError } from '@/shared/hooks/use-stop-on-first-error' import * as eventTracking from '@/infrastructure/event-tracking' import PdfLogEntry from './pdf-log-entry' -type TimeoutMessageProps = { - segmentation?: eventTracking.Segmentation -} +function TimeoutMessageAfterPaywallDismissal() { + const { + startCompile, + lastCompileOptions, + setAnimateCompileDropdownArrow, + isProjectOwner, + } = useDetachCompileContext() + + const { enableStopOnFirstError } = useStopOnFirstError({ + eventSource: 'timeout-new', + }) + + const handleEnableStopOnFirstErrorClick = useCallback(() => { + enableStopOnFirstError() + startCompile({ stopOnFirstError: true }) + setAnimateCompileDropdownArrow(true) + }, [enableStopOnFirstError, startCompile, setAnimateCompileDropdownArrow]) -function TimeoutMessageAfterPaywallDismissal({ - segmentation, -}: TimeoutMessageProps) { - const { lastCompileOptions, isProjectOwner } = useDetachCompileContext() return (
    - + {getMeta('ol-ExposedSettings').enableSubscriptions && ( )}
    @@ -33,22 +42,26 @@ function TimeoutMessageAfterPaywallDismissal({ type CompileTimeoutProps = { isProjectOwner: boolean - segmentation?: eventTracking.Segmentation } const CompileTimeout = memo(function CompileTimeout({ isProjectOwner, - segmentation, }: CompileTimeoutProps) { const { t } = useTranslation() - const eventSegmentation = useMemo( - () => ({ - ...segmentation, + useEffect(() => { + eventTracking.sendMB('paywall-prompt', { + 'paywall-type': 'compile-timeout', 'paywall-version': 'secondary', - }), - [segmentation] - ) + }) + }, []) + + function onPaywallClick() { + eventTracking.sendMB('paywall-click', { + 'paywall-type': 'compile-timeout', + 'paywall-version': 'secondary', + }) + } return ( {t('try_for_free')} @@ -111,50 +124,22 @@ const CompileTimeout = memo(function CompileTimeout({ type PreventTimeoutHelpMessageProps = { lastCompileOptions: any - segmentation?: eventTracking.Segmentation + handleEnableStopOnFirstErrorClick: () => void + isProjectOwner: boolean } const PreventTimeoutHelpMessage = memo(function PreventTimeoutHelpMessage({ lastCompileOptions, - segmentation, + handleEnableStopOnFirstErrorClick, + isProjectOwner, }: PreventTimeoutHelpMessageProps) { const { t } = useTranslation() - function sendInfoClickEvent() { - eventTracking.sendMB('paywall-info-click', { - 'paywall-type': 'compile-timeout', - content: 'blog', - ...segmentation, - }) - } - - const compileTimeoutChangesBlogLink = ( - /* eslint-disable-next-line jsx-a11y/anchor-has-content, react/jsx-key */ -
    - ) - return ( - {segmentation?.['10s-timeout-warning'] === 'enabled' && ( -

    - - - -

    - )} -

    {t('common_causes_of_compile_timeouts_include')}:

    -
    +
    Digital Science
    diff --git a/services/web/frontend/js/features/project-list/components/sidebar/tags-list.tsx b/services/web/frontend/js/features/project-list/components/sidebar/tags-list.tsx index 1c6298603c..452b003b2b 100644 --- a/services/web/frontend/js/features/project-list/components/sidebar/tags-list.tsx +++ b/services/web/frontend/js/features/project-list/components/sidebar/tags-list.tsx @@ -57,7 +57,6 @@ export default function TagsList() {
    ) diff --git a/services/web/frontend/js/features/source-editor/components/full-project-search-button.tsx b/services/web/frontend/js/features/source-editor/components/full-project-search-button.tsx index be02fdbe3c..698204d89c 100644 --- a/services/web/frontend/js/features/source-editor/components/full-project-search-button.tsx +++ b/services/web/frontend/js/features/source-editor/components/full-project-search-button.tsx @@ -12,8 +12,6 @@ import Close from '@/shared/components/close' import useTutorial from '@/shared/hooks/promotions/use-tutorial' import { useEditorContext } from '@/shared/context/editor-context' import getMeta from '@/utils/meta' -import { useIsNewEditorEnabled } from '@/features/ide-redesign/utils/new-editor-utils' -import { useRailContext } from '@/features/ide-redesign/contexts/rail-context' const PROMOTION_SIGNUP_CUT_OFF_DATE = new Date('2025-04-22T00:00:00Z') @@ -21,8 +19,6 @@ export const FullProjectSearchButton = ({ query }: { query: SearchQuery }) => { const view = useCodeMirrorViewContext() const { t } = useTranslation() const { setProjectSearchIsOpen } = useLayoutContext() - const newEditor = useIsNewEditorEnabled() - const { openTab } = useRailContext() const ref = useRef(null) const { inactiveTutorials } = useEditorContext() @@ -48,18 +44,14 @@ export const FullProjectSearchButton = ({ query }: { query: SearchQuery }) => { } const openFullProjectSearch = useCallback(() => { - if (newEditor) { - openTab('full-project-search') - } else { - setProjectSearchIsOpen(true) - } + setProjectSearchIsOpen(true) closeSearchPanel(view) window.setTimeout(() => { window.dispatchEvent( new CustomEvent('editor:full-project-search', { detail: query }) ) }, 200) - }, [setProjectSearchIsOpen, query, view, newEditor, openTab]) + }, [setProjectSearchIsOpen, query, view]) const onClick = useCallback(() => { sendSearchEvent('search-open', { diff --git a/services/web/frontend/js/features/source-editor/components/table-generator/toolbar/commands.ts b/services/web/frontend/js/features/source-editor/components/table-generator/toolbar/commands.ts index ab58179586..2645e853bd 100644 --- a/services/web/frontend/js/features/source-editor/components/table-generator/toolbar/commands.ts +++ b/services/web/frontend/js/features/source-editor/components/table-generator/toolbar/commands.ts @@ -45,16 +45,16 @@ const themeGenerators: Record = { left: true, right: number === numColumns - 1, }), - row: () => '\\hline', + row: (number: number, numRows: number) => '\\hline', multicolumn: () => ({ left: true, right: true }), lastRow: () => '\\hline', }, [BorderTheme.BOOKTABS]: { - column: () => ({ + column: (number: number, numColumns: number) => ({ left: false, right: false, }), - row: (number: number) => { + row: (number: number, numRows: number) => { if (number === 0) { return '\\toprule' } diff --git a/services/web/frontend/js/features/source-editor/components/table-generator/toolbar/toolbar-button-menu.tsx b/services/web/frontend/js/features/source-editor/components/table-generator/toolbar/toolbar-button-menu.tsx index d63ed7b706..51c68872f6 100644 --- a/services/web/frontend/js/features/source-editor/components/table-generator/toolbar/toolbar-button-menu.tsx +++ b/services/web/frontend/js/features/source-editor/components/table-generator/toolbar/toolbar-button-menu.tsx @@ -36,7 +36,7 @@ export const ToolbarButtonMenu: FC< event.preventDefault() event.stopPropagation() }} - onClick={() => { + onClick={event => { onToggle(!open) }} disabled={disabled} diff --git a/services/web/frontend/js/features/source-editor/components/toolbar/math-dropdown.tsx b/services/web/frontend/js/features/source-editor/components/toolbar/math-dropdown.tsx index 748a04d7cb..b34a61c69d 100644 --- a/services/web/frontend/js/features/source-editor/components/toolbar/math-dropdown.tsx +++ b/services/web/frontend/js/features/source-editor/components/toolbar/math-dropdown.tsx @@ -34,7 +34,7 @@ export const MathDropdown = memo(function MathDropdown() { { + onClick={event => { writefullInstance?.openEquationGenerator() }} > diff --git a/services/web/frontend/js/features/source-editor/components/toolbar/table-dropdown.tsx b/services/web/frontend/js/features/source-editor/components/toolbar/table-dropdown.tsx index a191b63600..190d2e7c7d 100644 --- a/services/web/frontend/js/features/source-editor/components/toolbar/table-dropdown.tsx +++ b/services/web/frontend/js/features/source-editor/components/toolbar/table-dropdown.tsx @@ -46,7 +46,7 @@ export const TableDropdown = memo(function TableDropdown() { { + onClick={event => { writefullInstance?.openTableGenerator() }} > diff --git a/services/web/frontend/js/features/source-editor/extensions/cursor-highlights.ts b/services/web/frontend/js/features/source-editor/extensions/cursor-highlights.ts index ccdc8b90e7..78d2903825 100644 --- a/services/web/frontend/js/features/source-editor/extensions/cursor-highlights.ts +++ b/services/web/frontend/js/features/source-editor/extensions/cursor-highlights.ts @@ -187,7 +187,7 @@ class CursorMarker extends RectangleMarker { const cursorHighlightsLayer = layer({ above: true, class: 'ol-cm-cursorHighlightsLayer', - update: update => { + update: (update, layer) => { return ( update.docChanged || update.selectionSet || diff --git a/services/web/frontend/js/features/source-editor/extensions/cursor-position.ts b/services/web/frontend/js/features/source-editor/extensions/cursor-position.ts index 0cd69d8b1f..efde64f40e 100644 --- a/services/web/frontend/js/features/source-editor/extensions/cursor-position.ts +++ b/services/web/frontend/js/features/source-editor/extensions/cursor-position.ts @@ -42,7 +42,7 @@ export const cursorPosition = ({ // Asynchronously dispatch cursor position when the selection changes and // provide a little debouncing. Using requestAnimationFrame postpones it // until the next CM6 DOM update. - ViewPlugin.define(() => { + ViewPlugin.define(view => { let animationFrameRequest: number | null = null return { diff --git a/services/web/frontend/js/features/source-editor/extensions/draw-selection.ts b/services/web/frontend/js/features/source-editor/extensions/draw-selection.ts index 413317ec0a..af31353a23 100644 --- a/services/web/frontend/js/features/source-editor/extensions/draw-selection.ts +++ b/services/web/frontend/js/features/source-editor/extensions/draw-selection.ts @@ -71,7 +71,7 @@ const cursorLayer = layer({ updateHasMouseDownEffect(update) ) }, - mount(dom) { + mount(dom, view) { dom.style.animationDuration = '1200ms' }, class: 'cm-cursorLayer', @@ -90,7 +90,7 @@ const selectionLayer = layer({ } return markers }, - update(update) { + update(update, dom) { return ( update.docChanged || update.selectionSet || diff --git a/services/web/frontend/js/features/source-editor/extensions/empty-line-filler.ts b/services/web/frontend/js/features/source-editor/extensions/empty-line-filler.ts index 49d9b195b9..647463d608 100644 --- a/services/web/frontend/js/features/source-editor/extensions/empty-line-filler.ts +++ b/services/web/frontend/js/features/source-editor/extensions/empty-line-filler.ts @@ -9,13 +9,13 @@ import { import browser from './browser' class EmptyLineWidget extends WidgetType { - toDOM(): HTMLElement { + toDOM(view: EditorView): HTMLElement { const element = document.createElement('span') element.className = 'ol-cm-filler' return element } - eq() { + eq(widget: EmptyLineWidget) { return true } } diff --git a/services/web/frontend/js/features/source-editor/extensions/history-ot.ts b/services/web/frontend/js/features/source-editor/extensions/history-ot.ts deleted file mode 100644 index 91a58599fb..0000000000 --- a/services/web/frontend/js/features/source-editor/extensions/history-ot.ts +++ /dev/null @@ -1,449 +0,0 @@ -import { Decoration, EditorView, WidgetType } from '@codemirror/view' -import { - EditorState, - StateEffect, - StateField, - Transaction, -} from '@codemirror/state' -import { - CommentList, - EditOperation, - TextOperation, - TrackingProps, - TrackedChangeList, -} from 'overleaf-editor-core' -import { DocumentContainer } from '@/features/ide-react/editor/document-container' -import { HistoryOTShareDoc } from '../../../../../types/share-doc' - -export const historyOT = (currentDoc: DocumentContainer) => { - const trackedChanges = - currentDoc.doc?.getTrackedChanges() ?? new TrackedChangeList([]) - const positionMapper = new PositionMapper(trackedChanges) - return [ - updateSender, - trackChangesUserIdState, - shareDocState.init(() => currentDoc?.doc?._doc ?? null), - commentsState, - trackedChangesState.init(() => ({ - decorations: buildTrackedChangesDecorations( - trackedChanges, - positionMapper - ), - positionMapper, - })), - trackedChangesTheme, - ] -} - -export const shareDocState = StateField.define({ - create() { - return null - }, - - update(value) { - // this state is constant - return value - }, -}) - -const trackedChangesTheme = EditorView.baseTheme({ - '.ol-cm-change-i, .ol-cm-change-highlight-i, .ol-cm-change-focus-i': { - backgroundColor: 'rgba(44, 142, 48, 0.30)', - }, - '&light .ol-cm-change-c, &light .ol-cm-change-highlight-c, &light .ol-cm-change-focus-c': - { - backgroundColor: 'rgba(243, 177, 17, 0.30)', - }, - '&dark .ol-cm-change-c, &dark .ol-cm-change-highlight-c, &dark .ol-cm-change-focus-c': - { - backgroundColor: 'rgba(194, 93, 11, 0.15)', - }, - '.ol-cm-change': { - padding: 'var(--half-leading, 0) 0', - }, - '.ol-cm-change-highlight': { - padding: 'var(--half-leading, 0) 0', - }, - '.ol-cm-change-focus': { - padding: 'var(--half-leading, 0) 0', - }, - '&light .ol-cm-change-d': { - borderLeft: '2px dotted #c5060b', - marginLeft: '-1px', - }, - '&dark .ol-cm-change-d': { - borderLeft: '2px dotted #c5060b', - marginLeft: '-1px', - }, - '&light .ol-cm-change-d-highlight': { - borderLeft: '3px solid #c5060b', - marginLeft: '-2px', - }, - '&dark .ol-cm-change-d-highlight': { - borderLeft: '3px solid #c5060b', - marginLeft: '-2px', - }, - '&light .ol-cm-change-d-focus': { - borderLeft: '3px solid #B83A33', - marginLeft: '-2px', - }, - '&dark .ol-cm-change-d-focus': { - borderLeft: '3px solid #B83A33', - marginLeft: '-2px', - }, -}) - -export const updateTrackedChangesEffect = - StateEffect.define() - -const buildTrackedChangesDecorations = ( - trackedChanges: TrackedChangeList, - positionMapper: PositionMapper -) => { - const decorations = [] - for (const change of trackedChanges.asSorted()) { - if (change.tracking.type === 'insert') { - decorations.push( - Decoration.mark({ - class: 'ol-cm-change ol-cm-change-i', - tracking: change.tracking, - }).range( - positionMapper.toCM6(change.range.pos), - positionMapper.toCM6(change.range.end) - ) - ) - } else { - decorations.push( - Decoration.widget({ - widget: new ChangeDeletedWidget(), - side: 1, - }).range(positionMapper.toCM6(change.range.pos)) - ) - } - } - - return Decoration.set(decorations, true) -} - -class ChangeDeletedWidget extends WidgetType { - toDOM() { - const widget = document.createElement('span') - widget.classList.add('ol-cm-change') - widget.classList.add('ol-cm-change-d') - return widget - } - - eq() { - return true - } -} - -export const trackedChangesState = StateField.define({ - create() { - return { - decorations: Decoration.none, - positionMapper: new PositionMapper(new TrackedChangeList([])), - } - }, - - update(value, transaction) { - if ( - (transaction.docChanged && !transaction.annotation(Transaction.remote)) || - transaction.effects.some(effect => effect.is(updateTrackedChangesEffect)) - ) { - const shareDoc = transaction.startState.field(shareDocState) - if (shareDoc != null) { - const trackedChanges = shareDoc.snapshot.getTrackedChanges() - const positionMapper = new PositionMapper(trackedChanges) - value = { - decorations: buildTrackedChangesDecorations( - trackedChanges, - positionMapper - ), - positionMapper, - } - } - } - - return value - }, - - provide(field) { - return EditorView.decorations.from(field, value => value.decorations) - }, -}) - -const setTrackChangesUserIdEffect = StateEffect.define() - -export const setTrackChangesUserId = (userId: string | null) => { - return { - effects: setTrackChangesUserIdEffect.of(userId), - } -} - -const trackChangesUserIdState = StateField.define({ - create() { - return null - }, - - update(value, transaction) { - for (const effect of transaction.effects) { - if (effect.is(setTrackChangesUserIdEffect)) { - value = effect.value - } - } - return value - }, -}) - -const updateCommentsEffect = StateEffect.define() - -export const updateComments = (comments: CommentList) => { - return { - effects: updateCommentsEffect.of(comments), - } -} - -const buildCommentsDecorations = (comments: CommentList) => - Decoration.set( - comments.toArray().flatMap(comment => - comment.ranges.map(range => - Decoration.mark({ - class: 'tracked-change-comment', - id: comment.id, - resolved: comment.resolved, - }).range(range.pos, range.end) - ) - ), - true - ) - -const commentsState = StateField.define({ - create() { - return Decoration.none // TODO: init from snapshot - }, - - update(value, transaction) { - if (transaction.docChanged) { - value = value.map(transaction.changes) - } - - for (const effect of transaction.effects) { - if (effect.is(updateCommentsEffect)) { - value = buildCommentsDecorations(effect.value) - } - } - - return value - }, - - provide(field) { - return EditorView.decorations.from(field) - }, -}) - -export const historyOTOperationEffect = StateEffect.define() - -const updateSender = EditorState.transactionExtender.of(tr => { - if (!tr.docChanged || tr.annotation(Transaction.remote)) { - return {} - } - - const trackingUserId = tr.startState.field(trackChangesUserIdState) - const positionMapper = tr.startState.field(trackedChangesState).positionMapper - const startDoc = tr.startState.doc - const opBuilder = new OperationBuilder( - positionMapper.toSnapshot(startDoc.length) - ) - - if (trackingUserId == null) { - // Not tracking changes - tr.changes.iterChanges((fromA, toA, fromB, toB, inserted) => { - // insert - if (inserted.length > 0) { - const pos = positionMapper.toSnapshot(fromA) - opBuilder.insert(pos, inserted.toString()) - } - - // deletion - if (toA > fromA) { - const start = positionMapper.toSnapshot(fromA) - const end = positionMapper.toSnapshot(toA) - opBuilder.delete(start, end - start) - } - }) - } else { - // Tracking changes - const timestamp = new Date() - tr.changes.iterChanges((fromA, toA, fromB, toB, inserted) => { - // insertion - if (inserted.length > 0) { - const pos = positionMapper.toSnapshot(fromA) - opBuilder.trackedInsert( - pos, - inserted.toString(), - trackingUserId, - timestamp - ) - } - - // deletion - if (toA > fromA) { - const start = positionMapper.toSnapshot(fromA) - const end = positionMapper.toSnapshot(toA) - opBuilder.trackedDelete(start, end - start, trackingUserId, timestamp) - } - }) - } - - const op = opBuilder.finish() - const shareDoc = tr.startState.field(shareDocState) - if (shareDoc != null) { - shareDoc.submitOp([op]) - } - - return {} -}) - -/** - * Incrementally builds a TextOperation from a series of inserts and deletes. - * - * This relies on inserts and deletes being ordered by document position. This - * is not clear in the documentation, but has been confirmed by Marijn in - * https://discuss.codemirror.net/t/iterators-can-be-hard-to-work-with-for-beginners/3533/10 - */ -class OperationBuilder { - /** - * Source document length - */ - private docLength: number - - /** - * Position in the source document - */ - private pos: number - - /** - * Operation built - */ - private op: TextOperation - - constructor(docLength: number) { - this.docLength = docLength - this.op = new TextOperation() - this.pos = 0 - } - - insert(pos: number, text: string) { - this.retainUntil(pos) - this.op.insert(text) - } - - delete(pos: number, length: number) { - this.retainUntil(pos) - this.op.remove(length) - this.pos += length - } - - trackedInsert(pos: number, text: string, userId: string, timestamp: Date) { - this.retainUntil(pos) - this.op.insert(text, { - tracking: new TrackingProps('insert', userId, timestamp), - }) - } - - trackedDelete(pos: number, length: number, userId: string, timestamp: Date) { - this.retainUntil(pos) - this.op.retain(length, { - tracking: new TrackingProps('delete', userId, timestamp), - }) - this.pos += length - } - - retainUntil(pos: number) { - if (pos > this.pos) { - this.op.retain(pos - this.pos) - this.pos = pos - } else if (pos < this.pos) { - throw Error( - `Out of order: position ${pos} comes before current position: ${this.pos}` - ) - } - } - - finish() { - this.retainUntil(this.docLength) - return this.op - } -} - -type OffsetTable = { pos: number; map: (pos: number) => number }[] - -class PositionMapper { - private offsets: { - toCM6: OffsetTable - toSnapshot: OffsetTable - } - - constructor(trackedChanges: TrackedChangeList) { - this.offsets = { - toCM6: [{ pos: 0, map: pos => pos }], - toSnapshot: [{ pos: 0, map: pos => pos }], - } - - // Offset of the snapshot pos relative to the CM6 pos - let offset = 0 - for (const change of trackedChanges.asSorted()) { - if (change.tracking.type === 'delete') { - const deleteLength = change.range.length - const deletePos = change.range.pos - const oldOffset = offset - const newOffset = offset + deleteLength - this.offsets.toSnapshot.push({ - pos: change.range.pos - offset + 1, - map: pos => pos + newOffset, - }) - this.offsets.toCM6.push({ - pos: change.range.pos, - map: () => deletePos - oldOffset, - }) - this.offsets.toCM6.push({ - pos: change.range.pos + deleteLength, - map: pos => pos - newOffset, - }) - offset = newOffset - } - } - } - - toCM6(snapshotPos: number) { - return this.mapPos(snapshotPos, this.offsets.toCM6) - } - - toSnapshot(cm6Pos: number) { - return this.mapPos(cm6Pos, this.offsets.toSnapshot) - } - - mapPos(pos: number, offsets: OffsetTable) { - // Binary search for the offset at the last position before pos - let low = 0 - let high = offsets.length - 1 - while (low < high) { - const middle = Math.ceil((low + high) / 2) - const entry = offsets[middle] - if (entry.pos < pos) { - // This entry could be the right offset, but lower entries are too low - // Because we used Math.ceil(), middle is higher than low and the - // algorithm progresses. - low = middle - } else if (entry.pos > pos) { - // This entry is too high - high = middle - 1 - } else { - // This is the right entry - return entry.map(pos) - } - } - return offsets[low].map(pos) - } -} diff --git a/services/web/frontend/js/features/source-editor/extensions/index.ts b/services/web/frontend/js/features/source-editor/extensions/index.ts index 0e19d42fc1..0a65739c55 100644 --- a/services/web/frontend/js/features/source-editor/extensions/index.ts +++ b/services/web/frontend/js/features/source-editor/extensions/index.ts @@ -50,7 +50,6 @@ import { docName } from './doc-name' import { fileTreeItemDrop } from './file-tree-item-drop' import { mathPreview } from './math-preview' import { ranges } from './ranges' -import { historyOT } from './history-ot' import { trackDetachedComments } from './track-detached-comments' import { reviewTooltip } from './review-tooltip' @@ -143,9 +142,7 @@ export const createExtensions = (options: Record): Extension[] => [ // NOTE: `emptyLineFiller` needs to be before `trackChanges`, // so the decorations are added in the correct order. emptyLineFiller(), - options.currentDoc.currentDocument.getType() === 'history-ot' - ? historyOT(options.currentDoc.currentDocument) - : ranges(), + ranges(), trackDetachedComments(options.currentDoc), visual(options.visual), mathPreview(options.settings.mathPreview), diff --git a/services/web/frontend/js/features/source-editor/extensions/keybindings.ts b/services/web/frontend/js/features/source-editor/extensions/keybindings.ts index 01c39d67ba..3e67b4b753 100644 --- a/services/web/frontend/js/features/source-editor/extensions/keybindings.ts +++ b/services/web/frontend/js/features/source-editor/extensions/keybindings.ts @@ -34,14 +34,17 @@ const customiseVimOnce = (_Vim: typeof Vim, _CodeMirror: typeof CodeMirror) => { // Allow copy via Ctrl-C in insert mode _Vim.unmap('', 'insert') - _Vim.defineAction('insertModeCtrlC', (cm: CodeMirror) => { - if (hasNonEmptySelection(cm)) { - navigator.clipboard.writeText(cm.getSelection()) - cm.setSelection(cm.getCursor(), cm.getCursor()) - } else { - _Vim.exitInsertMode(cm) + _Vim.defineAction( + 'insertModeCtrlC', + (cm: CodeMirror, actionArgs: object, state: any) => { + if (hasNonEmptySelection(cm)) { + navigator.clipboard.writeText(cm.getSelection()) + cm.setSelection(cm.getCursor(), cm.getCursor()) + } else { + _Vim.exitInsertMode(cm) + } } - }) + ) // Overwrite the moveByCharacters command with a decoration-aware version _Vim.defineMotion( diff --git a/services/web/frontend/js/features/source-editor/extensions/ranges.ts b/services/web/frontend/js/features/source-editor/extensions/ranges.ts index 7bde7a4adb..8dc4489d57 100644 --- a/services/web/frontend/js/features/source-editor/extensions/ranges.ts +++ b/services/web/frontend/js/features/source-editor/extensions/ranges.ts @@ -68,7 +68,7 @@ export const rangesDataField = StateField.define({ export const ranges = () => [ rangesDataField, // handle viewportChanged updates - ViewPlugin.define(() => { + ViewPlugin.define(view => { let timer: number return { diff --git a/services/web/frontend/js/features/source-editor/extensions/realtime.ts b/services/web/frontend/js/features/source-editor/extensions/realtime.ts index 58cfa8712a..36d9956a76 100644 --- a/services/web/frontend/js/features/source-editor/extensions/realtime.ts +++ b/services/web/frontend/js/features/source-editor/extensions/realtime.ts @@ -1,34 +1,10 @@ -import { - Prec, - Transaction, - Annotation, - ChangeSpec, - Text, - StateEffect, -} from '@codemirror/state' +import { Prec, Transaction, Annotation, ChangeSpec } from '@codemirror/state' import { EditorView, ViewPlugin } from '@codemirror/view' import { EventEmitter } from 'events' import RangesTracker from '@overleaf/ranges-tracker' -import { - ShareDoc, - ShareLatexOTShareDoc, - HistoryOTShareDoc, -} from '../../../../../types/share-doc' +import { ShareDoc } from '../../../../../types/share-doc' import { debugConsole } from '@/utils/debugging' import { DocumentContainer } from '@/features/ide-react/editor/document-container' -import { - EditOperation, - TextOperation, - InsertOp, - RemoveOp, - RetainOp, -} from 'overleaf-editor-core' -import { - updateTrackedChangesEffect, - setTrackChangesUserId, - trackedChangesState, - shareDocState, -} from './history-ot' /* * Integrate CodeMirror 6 with the real-time system, via ShareJS. @@ -49,10 +25,8 @@ import { * - frontend/js/features/ide-react/connection/editor-watchdog-manager.js */ -type Origin = 'remote' | 'undo' | 'reject' | undefined - export type ChangeDescription = { - origin: Origin + origin: 'remote' | 'undo' | 'reject' | undefined inserted: boolean removed: boolean } @@ -102,22 +76,15 @@ export const realtime = ( return Prec.highest([realtimePlugin, ensureRealtimePlugin]) } -type OTAdapter = { - handleUpdateFromCM( - transactions: readonly Transaction[], - ranges?: RangesTracker - ): void - attachShareJs(): void -} - export class EditorFacade extends EventEmitter { - private otAdapter: OTAdapter | null + public shareDoc: ShareDoc | null public events: EventEmitter + private maxDocLength?: number constructor(public view: EditorView) { super() this.view = view - this.otAdapter = null + this.shareDoc = null this.events = new EventEmitter() } @@ -151,62 +118,23 @@ export class EditorFacade extends EventEmitter { this.cmChange({ from: position, to: position + text.length }, origin) } - attachShareJs(shareDoc: ShareDoc, maxDocLength?: number) { - this.otAdapter = - shareDoc.otType === 'history-ot' - ? new HistoryOTAdapter(this, shareDoc, maxDocLength) - : new ShareLatexOTAdapter(this, shareDoc, maxDocLength) - this.otAdapter.attachShareJs() - } - - detachShareJs() { - this.otAdapter = null - } - - handleUpdateFromCM( - transactions: readonly Transaction[], - ranges?: RangesTracker - ) { - if (this.otAdapter == null) { - throw new Error('Trying to process updates with no otAdapter') - } - - this.otAdapter.handleUpdateFromCM(transactions, ranges) - } - - setTrackChangesUserId(userId: string | null) { - if (this.otAdapter instanceof HistoryOTAdapter) { - this.view.dispatch(setTrackChangesUserId(userId)) - } - } -} - -class ShareLatexOTAdapter { - constructor( - public editor: EditorFacade, - private shareDoc: ShareLatexOTShareDoc, - private maxDocLength?: number - ) { - this.editor = editor - this.shareDoc = shareDoc - this.maxDocLength = maxDocLength - } - // Connect to ShareJS, passing changes to the CodeMirror view // as new transactions. // This is a broad immitation of helper functions supplied in // the sharejs library. (See vendor/libs/sharejs, in particular // the 'attach_ace' helper) - attachShareJs() { - const shareDoc = this.shareDoc + attachShareJs(shareDoc: ShareDoc, maxDocLength?: number) { + this.shareDoc = shareDoc + this.maxDocLength = maxDocLength + const check = () => { // run in a timeout so it checks the editor content once this update has been applied window.setTimeout(() => { - const editorText = this.editor.getValue() + const editorText = this.getValue() const otText = shareDoc.getText() if (editorText !== otText) { - this.shareDoc.emit('error', 'Text does not match in CodeMirror 6') + shareDoc.emit('error', 'Text does not match in CodeMirror 6') debugConsole.error('Text does not match!') debugConsole.error('editor: ' + editorText) debugConsole.error('ot: ' + otText) @@ -215,12 +143,12 @@ class ShareLatexOTAdapter { } const onInsert = (pos: number, text: string) => { - this.editor.cmInsert(pos, text, 'remote') + this.cmInsert(pos, text, 'remote') check() } const onDelete = (pos: number, text: string) => { - this.editor.cmDelete(pos, text, 'remote') + this.cmDelete(pos, text, 'remote') check() } @@ -233,7 +161,7 @@ class ShareLatexOTAdapter { shareDoc.removeListener('insert', onInsert) shareDoc.removeListener('delete', onDelete) delete shareDoc.detach_cm6 - this.editor.detachShareJs() + this.shareDoc = null } } @@ -247,6 +175,10 @@ class ShareLatexOTAdapter { const trackedDeletesLength = ranges != null ? ranges.getTrackedDeletesLength() : 0 + if (!shareDoc) { + throw new Error('Trying to process updates with no shareDoc') + } + for (const transaction of transactions) { if (transaction.docChanged) { const origin = chooseOrigin(transaction) @@ -302,7 +234,7 @@ class ShareLatexOTAdapter { removed, } - this.editor.emit('change', this.editor, changeDescription) + this.emit('change', this, changeDescription) } ) } @@ -310,154 +242,6 @@ class ShareLatexOTAdapter { } } -class HistoryOTAdapter { - constructor( - public editor: EditorFacade, - private shareDoc: HistoryOTShareDoc, - private maxDocLength?: number - ) { - this.editor = editor - this.shareDoc = shareDoc - this.maxDocLength = maxDocLength - } - - attachShareJs() { - this.checkContent() - - const onRemoteOp = this.onRemoteOp.bind(this) - this.shareDoc.on('remoteop', onRemoteOp) - - this.shareDoc.detach_cm6 = () => { - this.shareDoc.removeListener('remoteop', onRemoteOp) - delete this.shareDoc.detach_cm6 - this.editor.detachShareJs() - } - } - - handleUpdateFromCM(transactions: readonly Transaction[]) { - for (const transaction of transactions) { - if ( - this.maxDocLength && - transaction.changes.newLength >= this.maxDocLength - ) { - this.shareDoc.emit( - 'error', - new Error('document length is greater than maxDocLength') - ) - return - } - - const origin = chooseOrigin(transaction) - transaction.changes.iterChanges((fromA, toA, fromB, toB, inserted) => { - this.onCodeMirrorChange(fromA, toA, fromB, toB, inserted, origin) - }) - } - } - - onRemoteOp(operations: EditOperation[]) { - const positionMapper = - this.editor.view.state.field(trackedChangesState).positionMapper - const changes: ChangeSpec[] = [] - let trackedChangesUpdated = false - for (const operation of operations) { - if (operation instanceof TextOperation) { - let cursor = 0 - for (const op of operation.ops) { - if (op instanceof InsertOp) { - if (op.tracking?.type !== 'delete') { - changes.push({ - from: positionMapper.toCM6(cursor), - insert: op.insertion, - }) - } - trackedChangesUpdated = true - } else if (op instanceof RemoveOp) { - changes.push({ - from: positionMapper.toCM6(cursor), - to: positionMapper.toCM6(cursor + op.length), - }) - cursor += op.length - trackedChangesUpdated = true - } else if (op instanceof RetainOp) { - if (op.tracking != null) { - if (op.tracking.type === 'delete') { - changes.push({ - from: positionMapper.toCM6(cursor), - to: positionMapper.toCM6(cursor + op.length), - }) - } - trackedChangesUpdated = true - } - cursor += op.length - } - } - } - - const view = this.editor.view - const effects: StateEffect[] = [] - const scrollEffect = view - .scrollSnapshot() - .map(view.state.changes(changes)) - if (scrollEffect != null) { - effects.push(scrollEffect) - } - if (trackedChangesUpdated) { - const shareDoc = this.editor.view.state.field(shareDocState) - if (shareDoc != null) { - const trackedChanges = shareDoc.snapshot.getTrackedChanges() - effects.push(updateTrackedChangesEffect.of(trackedChanges)) - } - } - - view.dispatch({ - changes, - effects, - annotations: [ - Transaction.remote.of(true), - Transaction.addToHistory.of(false), - ], - }) - } - } - - onCodeMirrorChange( - fromA: number, - toA: number, - fromB: number, - toB: number, - insertedText: Text, - origin: Origin - ) { - const insertedLength = insertedText.length - const removedLength = toA - fromA - const inserted = insertedLength > 0 - const removed = removedLength > 0 - - const changeDescription: ChangeDescription = { - origin, - inserted, - removed, - } - - this.editor.emit('change', this.editor, changeDescription) - } - - checkContent() { - // run in a timeout so it checks the editor content once this update has been applied - window.setTimeout(() => { - const editorText = this.editor.getValue() - const otText = this.shareDoc.getText() - - if (editorText !== otText) { - this.shareDoc.emit('error', 'Text does not match in CodeMirror 6') - debugConsole.error('Text does not match!') - debugConsole.error('editor: ' + editorText) - debugConsole.error('ot: ' + otText) - } - }, 0) - } -} - export const trackChangesAnnotation = Annotation.define() const chooseOrigin = (transaction: Transaction) => { diff --git a/services/web/frontend/js/features/source-editor/extensions/vertical-overflow.ts b/services/web/frontend/js/features/source-editor/extensions/vertical-overflow.ts index 873343c2bc..20505ed95d 100644 --- a/services/web/frontend/js/features/source-editor/extensions/vertical-overflow.ts +++ b/services/web/frontend/js/features/source-editor/extensions/vertical-overflow.ts @@ -188,7 +188,7 @@ class TopPaddingWidget extends WidgetType { this.height = height } - toDOM(): HTMLElement { + toDOM(view: EditorView): HTMLElement { const element = document.createElement('div') element.style.height = this.height + 'px' return element diff --git a/services/web/frontend/js/features/source-editor/extensions/visual/visual-widgets/begin.ts b/services/web/frontend/js/features/source-editor/extensions/visual/visual-widgets/begin.ts index 1826b48719..70e508d93e 100644 --- a/services/web/frontend/js/features/source-editor/extensions/visual/visual-widgets/begin.ts +++ b/services/web/frontend/js/features/source-editor/extensions/visual/visual-widgets/begin.ts @@ -45,7 +45,6 @@ export class BeginWidget extends WidgetType { return element.getBoundingClientRect() } - // eslint-disable-next-line @typescript-eslint/no-unused-vars buildName(name: HTMLSpanElement, view: EditorView) { name.textContent = this.environment } diff --git a/services/web/frontend/js/features/source-editor/extensions/visual/visual-widgets/end.ts b/services/web/frontend/js/features/source-editor/extensions/visual/visual-widgets/end.ts index 3ca2439ae1..232399de3b 100644 --- a/services/web/frontend/js/features/source-editor/extensions/visual/visual-widgets/end.ts +++ b/services/web/frontend/js/features/source-editor/extensions/visual/visual-widgets/end.ts @@ -7,7 +7,7 @@ export class EndWidget extends WidgetType { return element } - eq() { + eq(widget: EndWidget) { return true } diff --git a/services/web/frontend/js/features/source-editor/extensions/visual/visual-widgets/environment-line.ts b/services/web/frontend/js/features/source-editor/extensions/visual/visual-widgets/environment-line.ts index d506ac2c38..d6ab42503e 100644 --- a/services/web/frontend/js/features/source-editor/extensions/visual/visual-widgets/environment-line.ts +++ b/services/web/frontend/js/features/source-editor/extensions/visual/visual-widgets/environment-line.ts @@ -1,4 +1,4 @@ -import { WidgetType } from '@codemirror/view' +import { EditorView, WidgetType } from '@codemirror/view' export class EnvironmentLineWidget extends WidgetType { constructor( @@ -8,7 +8,7 @@ export class EnvironmentLineWidget extends WidgetType { super() } - toDOM() { + toDOM(view: EditorView) { const element = document.createElement('div') element.classList.add(`ol-cm-environment-${this.environment}`) element.classList.add('ol-cm-environment-edge') diff --git a/services/web/frontend/js/features/source-editor/extensions/visual/visual-widgets/table-rendering-error.ts b/services/web/frontend/js/features/source-editor/extensions/visual/visual-widgets/table-rendering-error.ts index 68da3ab058..63ad0a297a 100644 --- a/services/web/frontend/js/features/source-editor/extensions/visual/visual-widgets/table-rendering-error.ts +++ b/services/web/frontend/js/features/source-editor/extensions/visual/visual-widgets/table-rendering-error.ts @@ -17,7 +17,6 @@ export class TableRenderingErrorWidget extends WidgetType { const iconType = document.createElement('span') iconType.classList.add('material-symbols') iconType.setAttribute('aria-hidden', 'true') - iconType.setAttribute('translate', 'no') iconType.textContent = 'info' icon.appendChild(iconType) warning.appendChild(icon) diff --git a/services/web/frontend/js/features/source-editor/hooks/use-codemirror-scope.ts b/services/web/frontend/js/features/source-editor/hooks/use-codemirror-scope.ts index 2504afdd0c..bc7a99050d 100644 --- a/services/web/frontend/js/features/source-editor/hooks/use-codemirror-scope.ts +++ b/services/web/frontend/js/features/source-editor/hooks/use-codemirror-scope.ts @@ -185,9 +185,9 @@ function useCodeMirrorScope(view: EditorView) { if (currentDocument) { if (trackChanges) { - currentDocument.setTrackChangesUserId(userId ?? 'anonymous') + currentDocument.track_changes_as = userId || 'anonymous-user' } else { - currentDocument.setTrackChangesUserId(null) + currentDocument.track_changes_as = null } } }, [userId, currentDocument, trackChanges]) diff --git a/services/web/frontend/js/features/source-editor/languages/latex/latex-indent-service.ts b/services/web/frontend/js/features/source-editor/languages/latex/latex-indent-service.ts index d1e8e84bc4..08c1798032 100644 --- a/services/web/frontend/js/features/source-editor/languages/latex/latex-indent-service.ts +++ b/services/web/frontend/js/features/source-editor/languages/latex/latex-indent-service.ts @@ -1,7 +1,7 @@ import { indentService } from '@codemirror/language' export const latexIndentService = () => - indentService.of(indentContext => { + indentService.of((indentContext, pos) => { // only use this for insertNewLineAndIndent if (indentContext.simulatedBreak) { // match the indentation of the previous line (if present) diff --git a/services/web/frontend/js/features/source-editor/languages/latex/linter/latex-linter.worker.js b/services/web/frontend/js/features/source-editor/languages/latex/linter/latex-linter.worker.js index c496ce767f..0bfaf94d62 100644 --- a/services/web/frontend/js/features/source-editor/languages/latex/linter/latex-linter.worker.js +++ b/services/web/frontend/js/features/source-editor/languages/latex/linter/latex-linter.worker.js @@ -2087,10 +2087,7 @@ if (typeof onmessage !== 'undefined') { } // export dummy class for testing export default class LintWorker { - // unused vars kept to document the interface - // eslint-disable-next-line @typescript-eslint/no-unused-vars postMessage(message) {} - // eslint-disable-next-line @typescript-eslint/no-unused-vars addEventListener(eventName, listener) {} Parse(text) { return Parse(text) diff --git a/services/web/frontend/js/features/subscription/components/dashboard/free-plan.tsx b/services/web/frontend/js/features/subscription/components/dashboard/free-plan.tsx index 1f9583dd8b..a8cf7dcf7b 100644 --- a/services/web/frontend/js/features/subscription/components/dashboard/free-plan.tsx +++ b/services/web/frontend/js/features/subscription/components/dashboard/free-plan.tsx @@ -1,5 +1,6 @@ import { useTranslation, Trans } from 'react-i18next' import WritefullManagedBundleAddOn from '@/features/subscription/components/dashboard/states/active/change-plan/modals/writefull-bundle-management-modal' +import RedirectAlerts from './redirect-alerts' import getMeta from '@/utils/meta' function FreePlan() { @@ -8,6 +9,7 @@ function FreePlan() { return ( <> + { e.preventDefault() runAsync(postJSON('/user/subscription/account/email')) } + if (!personalSubscription || !('payment' in personalSubscription)) return null + + const recurlyEmail = personalSubscription.payment.accountEmail + + if (!userEmail || recurlyEmail === userEmail) return null + return ( <>
    @@ -39,7 +39,7 @@ function PersonalSubscriptionSyncEmail() { , ]} // eslint-disable-line react/jsx-key - values={{ recurlyEmail: accountEmail, userEmail }} + values={{ recurlyEmail, userEmail }} shouldUnescape tOptions={{ interpolation: { escapeValue: true } }} /> @@ -64,4 +64,4 @@ function PersonalSubscriptionSyncEmail() { ) } -export default PersonalSubscriptionSyncEmail +export default PersonalSubscriptionRecurlySyncEmail diff --git a/services/web/frontend/js/features/subscription/components/dashboard/personal-subscription.tsx b/services/web/frontend/js/features/subscription/components/dashboard/personal-subscription.tsx index b174751528..2173ea45d3 100644 --- a/services/web/frontend/js/features/subscription/components/dashboard/personal-subscription.tsx +++ b/services/web/frontend/js/features/subscription/components/dashboard/personal-subscription.tsx @@ -5,8 +5,9 @@ import { ActiveSubscriptionNew } from '@/features/subscription/components/dashbo import { CanceledSubscription } from './states/canceled' import { ExpiredSubscription } from './states/expired' import { useSubscriptionDashboardContext } from '../../context/subscription-dashboard-context' -import PersonalSubscriptionSyncEmail from './personal-subscription-sync-email' +import PersonalSubscriptionRecurlySyncEmail from './personal-subscription-recurly-sync-email' import OLNotification from '@/features/ui/components/ol/ol-notification' +import RedirectAlerts from './redirect-alerts' function PastDueSubscriptionAlert({ subscription, @@ -75,6 +76,7 @@ function PersonalSubscription() { return ( <> + {personalSubscription.payment.hasPastDueInvoice && ( )} @@ -88,7 +90,7 @@ function PersonalSubscription() { /> )}
    - + ) } diff --git a/services/web/frontend/js/features/subscription/components/dashboard/redirect-alerts.tsx b/services/web/frontend/js/features/subscription/components/dashboard/redirect-alerts.tsx index 9dea4e1e46..be5bab484e 100644 --- a/services/web/frontend/js/features/subscription/components/dashboard/redirect-alerts.tsx +++ b/services/web/frontend/js/features/subscription/components/dashboard/redirect-alerts.tsx @@ -15,8 +15,6 @@ export function RedirectAlerts() { warning = t('good_news_you_are_already_receiving_this_add_on_via_writefull') } else if (redirectReason === 'double-buy') { warning = t('good_news_you_already_purchased_this_add_on') - } else if (redirectReason === 'ai-assist-unavailable') { - warning = t('ai_assist_unavailable_due_to_subscription_type') } else { return null } diff --git a/services/web/frontend/js/features/subscription/components/dashboard/states/active/change-plan/individual-plans-table.tsx b/services/web/frontend/js/features/subscription/components/dashboard/states/active/change-plan/individual-plans-table.tsx index d8c98fc56b..a6ede01715 100644 --- a/services/web/frontend/js/features/subscription/components/dashboard/states/active/change-plan/individual-plans-table.tsx +++ b/services/web/frontend/js/features/subscription/components/dashboard/states/active/change-plan/individual-plans-table.tsx @@ -20,7 +20,7 @@ function ChangeToPlanButton({ planCode }: { planCode: string }) { ) } -function KeepCurrentPlanButton() { +function KeepCurrentPlanButton({ plan }: { plan: Plan }) { const { t } = useTranslation() const { handleOpenModal } = useSubscriptionDashboardContext() @@ -43,7 +43,7 @@ function ChangePlanButton({ plan }: { plan: Plan }) { plan.planCode === personalSubscription.planCode.split('_')[0] if (isCurrentPlanForUser && personalSubscription.pendingPlan) { - return + return } else if (isCurrentPlanForUser && !personalSubscription.pendingPlan) { return ( diff --git a/services/web/frontend/js/features/subscription/components/dashboard/states/active/change-plan/modals/confirm-change-plan-modal.tsx b/services/web/frontend/js/features/subscription/components/dashboard/states/active/change-plan/modals/confirm-change-plan-modal.tsx index a964009dcc..08cbf1743f 100644 --- a/services/web/frontend/js/features/subscription/components/dashboard/states/active/change-plan/modals/confirm-change-plan-modal.tsx +++ b/services/web/frontend/js/features/subscription/components/dashboard/states/active/change-plan/modals/confirm-change-plan-modal.tsx @@ -1,10 +1,7 @@ import { useState } from 'react' import { useTranslation, Trans } from 'react-i18next' import { SubscriptionDashModalIds } from '../../../../../../../../../../types/subscription/dashboard/modal-ids' -import { - postJSON, - FetchError, -} from '../../../../../../../../infrastructure/fetch-json' +import { postJSON } from '../../../../../../../../infrastructure/fetch-json' import getMeta from '../../../../../../../../utils/meta' import { useSubscriptionDashboardContext } from '../../../../../../context/subscription-dashboard-context' import { subscriptionUpdateUrl } from '../../../../../../data/subscription-url' @@ -17,7 +14,6 @@ import OLModal, { } from '@/features/ui/components/ol/ol-modal' import OLButton from '@/features/ui/components/ol/ol-button' import OLNotification from '@/features/ui/components/ol/ol-notification' -import handleStripePaymentAction from '@/features/subscription/util/handle-stripe-payment-action' export function ConfirmChangePlanModal() { const modalId: SubscriptionDashModalIds = 'change-to-plan' @@ -41,13 +37,8 @@ export function ConfirmChangePlanModal() { }) location.reload() } catch (e) { - const { handled } = await handleStripePaymentAction(e as FetchError) - if (handled) { - location.reload() - } else { - setError(true) - setInflight(false) - } + setError(true) + setInflight(false) } } diff --git a/services/web/frontend/js/features/subscription/components/dashboard/subscription-dashboard.tsx b/services/web/frontend/js/features/subscription/components/dashboard/subscription-dashboard.tsx index b0f1b4122c..8cb07181cf 100644 --- a/services/web/frontend/js/features/subscription/components/dashboard/subscription-dashboard.tsx +++ b/services/web/frontend/js/features/subscription/components/dashboard/subscription-dashboard.tsx @@ -15,7 +15,6 @@ import OLRow from '@/features/ui/components/ol/ol-row' import OLCol from '@/features/ui/components/ol/ol-col' import OLNotification from '@/features/ui/components/ol/ol-notification' import WritefullManagedBundleAddOn from './states/active/change-plan/modals/writefull-bundle-management-modal' -import RedirectAlerts from './redirect-alerts' function SubscriptionDashboard() { const { t } = useTranslation() @@ -41,7 +40,6 @@ function SubscriptionDashboard() { type="warning" /> )} -

    {t('your_subscription')}

    diff --git a/services/web/frontend/js/features/subscription/components/group-invite/group-invite.tsx b/services/web/frontend/js/features/subscription/components/group-invite/group-invite.tsx index 66b6288388..a4e8fb2da8 100644 --- a/services/web/frontend/js/features/subscription/components/group-invite/group-invite.tsx +++ b/services/web/frontend/js/features/subscription/components/group-invite/group-invite.tsx @@ -19,20 +19,20 @@ export type InviteViewTypes = | undefined function GroupInviteViews() { - const hasIndividualPaidSubscription = getMeta( - 'ol-hasIndividualPaidSubscription' + const hasIndividualRecurlySubscription = getMeta( + 'ol-hasIndividualRecurlySubscription' ) const cannotJoinSubscription = getMeta('ol-cannot-join-subscription') useEffect(() => { if (cannotJoinSubscription) { setView('managed-user-cannot-join') - } else if (hasIndividualPaidSubscription) { + } else if (hasIndividualRecurlySubscription) { setView('cancel-personal-subscription') } else { setView('invite') } - }, [cannotJoinSubscription, hasIndividualPaidSubscription]) + }, [cannotJoinSubscription, hasIndividualRecurlySubscription]) const [view, setView] = useState(undefined) if (!view) { diff --git a/services/web/frontend/js/features/subscription/components/preview-subscription-change/root.tsx b/services/web/frontend/js/features/subscription/components/preview-subscription-change/root.tsx index 112d15d7e3..367a5e35a9 100644 --- a/services/web/frontend/js/features/subscription/components/preview-subscription-change/root.tsx +++ b/services/web/frontend/js/features/subscription/components/preview-subscription-change/root.tsx @@ -11,7 +11,7 @@ import { formatCurrency } from '@/shared/utils/currency' import useAsync from '@/shared/hooks/use-async' import { useLocation } from '@/shared/hooks/use-location' import { debugConsole } from '@/utils/debugging' -import { FetchError, postJSON } from '@/infrastructure/fetch-json' +import { postJSON } from '@/infrastructure/fetch-json' import Notification from '@/shared/components/notification' import OLCard from '@/features/ui/components/ol/ol-card' import OLRow from '@/features/ui/components/ol/ol-row' @@ -21,7 +21,6 @@ import { subscriptionUpdateUrl } from '@/features/subscription/data/subscription import * as eventTracking from '@/infrastructure/event-tracking' import sparkleText from '@/shared/svgs/ai-sparkle-text.svg' import { useFeatureFlag } from '@/shared/context/split-test-context' -import handleStripePaymentAction from '../../util/handle-stripe-payment-action' function PreviewSubscriptionChange() { const preview = getMeta( @@ -280,25 +279,16 @@ function PreviewSubscriptionChange() { } async function payNow(preview: SubscriptionChangePreview) { - try { - if (preview.change.type === 'add-on-purchase') { - await postJSON( - `/user/subscription/addon/${preview.change.addOn.code}/add` - ) - } else if (preview.change.type === 'premium-subscription') { - await postJSON(subscriptionUpdateUrl, { - body: { plan_code: preview.change.plan.code }, - }) - } else { - throw new Error( - `Unknown subscription change preview type: ${preview.change}` - ) - } - } catch (e) { - const { handled } = await handleStripePaymentAction(e as FetchError) - if (!handled) { - throw e - } + if (preview.change.type === 'add-on-purchase') { + await postJSON(`/user/subscription/addon/${preview.change.addOn.code}/add`) + } else if (preview.change.type === 'premium-subscription') { + await postJSON(subscriptionUpdateUrl, { + body: { plan_code: preview.change.plan.code }, + }) + } else { + throw new Error( + `Unknown subscription change preview type: ${preview.change}` + ) } } diff --git a/services/web/frontend/js/features/subscription/util/handle-stripe-payment-action.ts b/services/web/frontend/js/features/subscription/util/handle-stripe-payment-action.ts deleted file mode 100644 index f533cba730..0000000000 --- a/services/web/frontend/js/features/subscription/util/handle-stripe-payment-action.ts +++ /dev/null @@ -1,29 +0,0 @@ -import { FetchError, postJSON } from '@/infrastructure/fetch-json' -import getMeta from '../../../utils/meta' -import { loadStripe } from '@stripe/stripe-js/pure' - -export default async function handleStripePaymentAction( - error: FetchError -): Promise<{ handled: boolean }> { - const clientSecret = error?.data?.clientSecret - - if (clientSecret) { - // TODO: support both US and UK Stripe accounts - const stripeUKPublicKey = getMeta('ol-stripeUKApiKey') - const stripe = await loadStripe(stripeUKPublicKey) - if (stripe) { - const manualConfirmationFlow = - await stripe.confirmCardPayment(clientSecret) - if (!manualConfirmationFlow.error) { - try { - await postJSON(`/user/subscription/sync`) - } catch (error) { - // if the sync fails, there may be stale data until the webhook is - // processed but we can't do any special handling for that in here - } - return { handled: true } - } - } - } - return { handled: false } -} diff --git a/services/web/frontend/js/features/tooltip/index-bs5.ts b/services/web/frontend/js/features/tooltip/index-bs5.ts index 43d6bc015f..62c199e2e6 100644 --- a/services/web/frontend/js/features/tooltip/index-bs5.ts +++ b/services/web/frontend/js/features/tooltip/index-bs5.ts @@ -21,8 +21,8 @@ if (footerLanguageElement) { const allTooltips = document.querySelectorAll('[data-bs-toggle="tooltip"]') allTooltips.forEach(element => { - // eslint-disable-next-line no-new - new Tooltip(element) + // eslint-disable-next-line no-unused-vars + const tooltip = new Tooltip(element) }) const possibleBadgeTooltips = document.querySelectorAll('[data-badge-tooltip]') @@ -36,8 +36,8 @@ possibleBadgeTooltips.forEach(element => { if (element.parentElement) { const parentWidth = getElementWidth(element.parentElement) if (element.scrollWidth > parentWidth) { - // eslint-disable-next-line no-new - new Tooltip(element) + // eslint-disable-next-line no-unused-vars + const tooltip = new Tooltip(element) } else { element.parentElement.style.maxWidth = 'none' } diff --git a/services/web/frontend/js/features/ui/components/bootstrap-5/dropdown-toggle-with-tooltip.tsx b/services/web/frontend/js/features/ui/components/bootstrap-5/dropdown-toggle-with-tooltip.tsx new file mode 100644 index 0000000000..cdf20e3dd3 --- /dev/null +++ b/services/web/frontend/js/features/ui/components/bootstrap-5/dropdown-toggle-with-tooltip.tsx @@ -0,0 +1,52 @@ +import { ReactNode, forwardRef } from 'react' +import { BsPrefixRefForwardingComponent } from 'react-bootstrap/helpers' +import type { DropdownToggleProps } from '@/features/ui/components/types/dropdown-menu-props' +import { + DropdownToggle as BS5DropdownToggle, + OverlayTrigger, + OverlayTriggerProps, + Tooltip, +} from 'react-bootstrap' +import type { MergeAndOverride } from '../../../../../../types/utils' + +type DropdownToggleWithTooltipProps = MergeAndOverride< + DropdownToggleProps, + { + children: ReactNode + overlayTriggerProps?: Omit + toolTipDescription: string + tooltipProps?: Omit, 'children'> + 'aria-label'?: string + } +> +const DropdownToggleWithTooltip = forwardRef< + BsPrefixRefForwardingComponent<'button', DropdownToggleProps>, + DropdownToggleWithTooltipProps +>( + ( + { + children, + toolTipDescription, + overlayTriggerProps, + tooltipProps, + id, + ...toggleProps + }, + ref + ) => { + return ( + {toolTipDescription}} + {...overlayTriggerProps} + > + + {children} + + + ) + } +) + +DropdownToggleWithTooltip.displayName = 'DropdownToggleWithTooltip' + +export default DropdownToggleWithTooltip diff --git a/services/web/frontend/js/features/ui/components/bootstrap-5/footer/fat-footer-base.tsx b/services/web/frontend/js/features/ui/components/bootstrap-5/footer/fat-footer-base.tsx index f2fd7e3a82..dd85660ffd 100644 --- a/services/web/frontend/js/features/ui/components/bootstrap-5/footer/fat-footer-base.tsx +++ b/services/web/frontend/js/features/ui/components/bootstrap-5/footer/fat-footer-base.tsx @@ -26,7 +26,7 @@ function FatFooterBase() {