diff --git a/.github/ISSUE_TEMPLATE.md b/.github/ISSUE_TEMPLATE/bug_report.md similarity index 83% rename from .github/ISSUE_TEMPLATE.md rename to .github/ISSUE_TEMPLATE/bug_report.md index 3a375bcbe9..9c0577106e 100644 --- a/.github/ISSUE_TEMPLATE.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -1,10 +1,19 @@ +--- +name: Bug report +about: Report a bug +title: '' +labels: type:bug +assignees: '' + +--- + diff --git a/develop/docker-compose.yml b/develop/docker-compose.yml index 750e11ac87..7161e0686a 100644 --- a/develop/docker-compose.yml +++ b/develop/docker-compose.yml @@ -25,10 +25,10 @@ services: env_file: - dev.env environment: - - DOCKER_RUNNER=true - TEXLIVE_IMAGE=texlive-full # docker build texlive -t texlive-full - - COMPILES_HOST_DIR=${PWD}/compiles - - OUTPUT_HOST_DIR=${PWD}/output + - SANDBOXED_COMPILES=true + - SANDBOXED_COMPILES_HOST_DIR_COMPILES=${PWD}/compiles + - SANDBOXED_COMPILES_HOST_DIR_OUTPUT=${PWD}/output user: root volumes: - ${PWD}/compiles:/overleaf/services/clsi/compiles diff --git a/libraries/access-token-encryptor/buildscript.txt b/libraries/access-token-encryptor/buildscript.txt index 74c3bbbd24..8ce12073ea 100644 --- a/libraries/access-token-encryptor/buildscript.txt +++ b/libraries/access-token-encryptor/buildscript.txt @@ -1,6 +1,6 @@ access-token-encryptor --dependencies=None ---docker-repos=gcr.io/overleaf-ops +--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker --env-add= --env-pass-through= --esmock-loader=False diff --git a/libraries/fetch-utils/buildscript.txt b/libraries/fetch-utils/buildscript.txt index 91548ff7c6..35e8eed85b 100644 --- a/libraries/fetch-utils/buildscript.txt +++ b/libraries/fetch-utils/buildscript.txt @@ -1,6 +1,6 @@ fetch-utils --dependencies=None ---docker-repos=gcr.io/overleaf-ops +--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker --env-add= --env-pass-through= --esmock-loader=False diff --git a/libraries/logger/buildscript.txt b/libraries/logger/buildscript.txt index 9008707b0e..a3d1cc0646 100644 --- a/libraries/logger/buildscript.txt +++ b/libraries/logger/buildscript.txt @@ -1,6 +1,6 @@ logger --dependencies=None ---docker-repos=gcr.io/overleaf-ops +--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker --env-add= --env-pass-through= --esmock-loader=False diff --git a/libraries/metrics/buildscript.txt b/libraries/metrics/buildscript.txt index 2c2e5d7531..58ff195d95 100644 --- a/libraries/metrics/buildscript.txt +++ b/libraries/metrics/buildscript.txt @@ -1,6 +1,6 @@ metrics --dependencies=None ---docker-repos=gcr.io/overleaf-ops +--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker --env-add= --env-pass-through= --esmock-loader=False diff --git a/libraries/mongo-utils/buildscript.txt b/libraries/mongo-utils/buildscript.txt index bda8d4f734..35ca540bfb 100644 --- a/libraries/mongo-utils/buildscript.txt +++ b/libraries/mongo-utils/buildscript.txt @@ -1,6 +1,6 @@ mongo-utils --dependencies=None ---docker-repos=gcr.io/overleaf-ops +--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker --env-add= --env-pass-through= --esmock-loader=False diff --git a/libraries/o-error/buildscript.txt b/libraries/o-error/buildscript.txt index a4134b4b60..c61679157e 100644 --- a/libraries/o-error/buildscript.txt +++ b/libraries/o-error/buildscript.txt @@ -1,6 +1,6 @@ o-error --dependencies=None ---docker-repos=gcr.io/overleaf-ops +--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker --env-add= --env-pass-through= --esmock-loader=False diff --git a/libraries/object-persistor/buildscript.txt b/libraries/object-persistor/buildscript.txt index 75d2e09382..d5113ce910 100644 --- a/libraries/object-persistor/buildscript.txt +++ b/libraries/object-persistor/buildscript.txt @@ -1,6 +1,6 @@ object-persistor --dependencies=None ---docker-repos=gcr.io/overleaf-ops +--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker --env-add= --env-pass-through= --esmock-loader=False diff --git a/libraries/overleaf-editor-core/buildscript.txt b/libraries/overleaf-editor-core/buildscript.txt index 9b6508663b..25a221232a 100644 --- a/libraries/overleaf-editor-core/buildscript.txt +++ b/libraries/overleaf-editor-core/buildscript.txt @@ -1,6 +1,6 @@ overleaf-editor-core --dependencies=None ---docker-repos=gcr.io/overleaf-ops +--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker --env-add= --env-pass-through= --esmock-loader=False diff --git a/libraries/overleaf-editor-core/lib/file_data/clear_tracking_props.js b/libraries/overleaf-editor-core/lib/file_data/clear_tracking_props.js index ba7f0bf00b..b3ddbab7d8 100644 --- a/libraries/overleaf-editor-core/lib/file_data/clear_tracking_props.js +++ b/libraries/overleaf-editor-core/lib/file_data/clear_tracking_props.js @@ -1,7 +1,7 @@ // @ts-check /** - * @import { ClearTrackingPropsRawData } from '../types' + * @import { ClearTrackingPropsRawData, TrackingDirective } from '../types' */ class ClearTrackingProps { @@ -11,12 +11,27 @@ class ClearTrackingProps { /** * @param {any} other - * @returns {boolean} + * @returns {other is ClearTrackingProps} */ equals(other) { return other instanceof ClearTrackingProps } + /** + * @param {TrackingDirective} other + * @returns {other is ClearTrackingProps} + */ + canMergeWith(other) { + return other instanceof ClearTrackingProps + } + + /** + * @param {TrackingDirective} other + */ + mergeWith(other) { + return this + } + /** * @returns {ClearTrackingPropsRawData} */ diff --git a/libraries/overleaf-editor-core/lib/file_data/lazy_string_file_data.js b/libraries/overleaf-editor-core/lib/file_data/lazy_string_file_data.js index bc11b3e98d..abc720d10c 100644 --- a/libraries/overleaf-editor-core/lib/file_data/lazy_string_file_data.js +++ b/libraries/overleaf-editor-core/lib/file_data/lazy_string_file_data.js @@ -11,7 +11,7 @@ const EditOperation = require('../operation/edit_operation') const EditOperationBuilder = require('../operation/edit_operation_builder') /** - * @import { BlobStore, ReadonlyBlobStore, RangesBlob, RawFileData, RawLazyStringFileData } from '../types' + * @import { BlobStore, ReadonlyBlobStore, RangesBlob, RawHashFileData, RawLazyStringFileData } from '../types' */ class LazyStringFileData extends FileData { @@ -159,11 +159,11 @@ class LazyStringFileData extends FileData { /** @inheritdoc * @param {BlobStore} blobStore - * @return {Promise} + * @return {Promise} */ async store(blobStore) { if (this.operations.length === 0) { - /** @type RawFileData */ + /** @type RawHashFileData */ const raw = { hash: this.hash } if (this.rangesHash) { raw.rangesHash = this.rangesHash @@ -171,9 +171,11 @@ class LazyStringFileData extends FileData { return raw } const eager = await this.toEager(blobStore) + const raw = await eager.store(blobStore) + this.hash = raw.hash + this.rangesHash = raw.rangesHash this.operations.length = 0 - /** @type RawFileData */ - return await eager.store(blobStore) + return raw } } diff --git a/libraries/overleaf-editor-core/lib/file_data/string_file_data.js b/libraries/overleaf-editor-core/lib/file_data/string_file_data.js index 48df633461..c78c1e0414 100644 --- a/libraries/overleaf-editor-core/lib/file_data/string_file_data.js +++ b/libraries/overleaf-editor-core/lib/file_data/string_file_data.js @@ -8,7 +8,7 @@ const CommentList = require('./comment_list') const TrackedChangeList = require('./tracked_change_list') /** - * @import { StringFileRawData, RawFileData, BlobStore, CommentRawData } from "../types" + * @import { StringFileRawData, RawHashFileData, BlobStore, CommentRawData } from "../types" * @import { TrackedChangeRawData, RangesBlob } from "../types" * @import EditOperation from "../operation/edit_operation" */ @@ -139,7 +139,7 @@ class StringFileData extends FileData { /** * @inheritdoc * @param {BlobStore} blobStore - * @return {Promise} + * @return {Promise} */ async store(blobStore) { const blob = await blobStore.putString(this.content) diff --git a/libraries/overleaf-editor-core/lib/file_data/tracked_change.js b/libraries/overleaf-editor-core/lib/file_data/tracked_change.js index d0e6517d0f..e789a427b0 100644 --- a/libraries/overleaf-editor-core/lib/file_data/tracked_change.js +++ b/libraries/overleaf-editor-core/lib/file_data/tracked_change.js @@ -84,6 +84,21 @@ class TrackedChange { ) ) } + + /** + * Return an equivalent tracked change whose extent is limited to the given + * range + * + * @param {Range} range + * @returns {TrackedChange | null} - the result or null if the intersection is empty + */ + intersectRange(range) { + const intersection = this.range.intersect(range) + if (intersection == null) { + return null + } + return new TrackedChange(intersection, this.tracking) + } } module.exports = TrackedChange diff --git a/libraries/overleaf-editor-core/lib/file_data/tracked_change_list.js b/libraries/overleaf-editor-core/lib/file_data/tracked_change_list.js index 263b37ab50..b302865c70 100644 --- a/libraries/overleaf-editor-core/lib/file_data/tracked_change_list.js +++ b/libraries/overleaf-editor-core/lib/file_data/tracked_change_list.js @@ -2,9 +2,11 @@ const Range = require('../range') const TrackedChange = require('./tracked_change') const TrackingProps = require('../file_data/tracking_props') +const { InsertOp, RemoveOp, RetainOp } = require('../operation/scan_op') /** * @import { TrackingDirective, TrackedChangeRawData } from "../types" + * @import TextOperation from "../operation/text_operation" */ class TrackedChangeList { @@ -58,6 +60,22 @@ class TrackedChangeList { return this._trackedChanges.filter(change => range.contains(change.range)) } + /** + * Returns tracked changes that overlap with the given range + * @param {Range} range + * @returns {TrackedChange[]} + */ + intersectRange(range) { + const changes = [] + for (const change of this._trackedChanges) { + const intersection = change.intersectRange(range) + if (intersection != null) { + changes.push(intersection) + } + } + return changes + } + /** * Returns the tracking props for a given range. * @param {Range} range @@ -89,6 +107,8 @@ class TrackedChangeList { /** * Collapses consecutive (and compatible) ranges + * + * @private * @returns {void} */ _mergeRanges() { @@ -117,12 +137,28 @@ class TrackedChangeList { } /** + * Apply an insert operation * * @param {number} cursor * @param {string} insertedText * @param {{tracking?: TrackingProps}} opts */ applyInsert(cursor, insertedText, opts = {}) { + this._applyInsert(cursor, insertedText, opts) + this._mergeRanges() + } + + /** + * Apply an insert operation + * + * This method will not merge ranges at the end + * + * @private + * @param {number} cursor + * @param {string} insertedText + * @param {{tracking?: TrackingProps}} [opts] + */ + _applyInsert(cursor, insertedText, opts = {}) { const newTrackedChanges = [] for (const trackedChange of this._trackedChanges) { if ( @@ -171,15 +207,29 @@ class TrackedChangeList { newTrackedChanges.push(newTrackedChange) } this._trackedChanges = newTrackedChanges - this._mergeRanges() } /** + * Apply a delete operation to the list of tracked changes * * @param {number} cursor * @param {number} length */ applyDelete(cursor, length) { + this._applyDelete(cursor, length) + this._mergeRanges() + } + + /** + * Apply a delete operation to the list of tracked changes + * + * This method will not merge ranges at the end + * + * @private + * @param {number} cursor + * @param {number} length + */ + _applyDelete(cursor, length) { const newTrackedChanges = [] for (const trackedChange of this._trackedChanges) { const deletedRange = new Range(cursor, length) @@ -205,15 +255,31 @@ class TrackedChangeList { } } this._trackedChanges = newTrackedChanges + } + + /** + * Apply a retain operation to the list of tracked changes + * + * @param {number} cursor + * @param {number} length + * @param {{tracking?: TrackingDirective}} [opts] + */ + applyRetain(cursor, length, opts = {}) { + this._applyRetain(cursor, length, opts) this._mergeRanges() } /** + * Apply a retain operation to the list of tracked changes + * + * This method will not merge ranges at the end + * + * @private * @param {number} cursor * @param {number} length * @param {{tracking?: TrackingDirective}} opts */ - applyRetain(cursor, length, opts = {}) { + _applyRetain(cursor, length, opts = {}) { // If there's no tracking info, leave everything as-is if (!opts.tracking) { return @@ -269,6 +335,31 @@ class TrackedChangeList { newTrackedChanges.push(newTrackedChange) } this._trackedChanges = newTrackedChanges + } + + /** + * Apply a text operation to the list of tracked changes + * + * Ranges are merged only once at the end, for performance and to avoid + * problematic edge cases where intermediate ranges get incorrectly merged. + * + * @param {TextOperation} operation + */ + applyTextOperation(operation) { + // this cursor tracks the destination document that gets modified as + // operations are applied to it. + let cursor = 0 + for (const op of operation.ops) { + if (op instanceof InsertOp) { + this._applyInsert(cursor, op.insertion, { tracking: op.tracking }) + cursor += op.insertion.length + } else if (op instanceof RemoveOp) { + this._applyDelete(cursor, op.length) + } else if (op instanceof RetainOp) { + this._applyRetain(cursor, op.length, { tracking: op.tracking }) + cursor += op.length + } + } this._mergeRanges() } } diff --git a/libraries/overleaf-editor-core/lib/file_data/tracking_props.js b/libraries/overleaf-editor-core/lib/file_data/tracking_props.js index 75ec95c566..82d731a232 100644 --- a/libraries/overleaf-editor-core/lib/file_data/tracking_props.js +++ b/libraries/overleaf-editor-core/lib/file_data/tracking_props.js @@ -62,6 +62,35 @@ class TrackingProps { this.ts.getTime() === other.ts.getTime() ) } + + /** + * Are these tracking props compatible with the other tracking props for merging + * ranges? + * + * @param {TrackingDirective} other + * @returns {other is TrackingProps} + */ + canMergeWith(other) { + if (!(other instanceof TrackingProps)) { + return false + } + return this.type === other.type && this.userId === other.userId + } + + /** + * Merge two tracking props + * + * Assumes that `canMerge(other)` returns true + * + * @param {TrackingDirective} other + */ + mergeWith(other) { + if (!this.canMergeWith(other)) { + throw new Error('Cannot merge with incompatible tracking props') + } + const ts = this.ts <= other.ts ? this.ts : other.ts + return new TrackingProps(this.type, this.userId, ts) + } } module.exports = TrackingProps diff --git a/libraries/overleaf-editor-core/lib/operation/scan_op.js b/libraries/overleaf-editor-core/lib/operation/scan_op.js index 4f179f24b4..fd322459cc 100644 --- a/libraries/overleaf-editor-core/lib/operation/scan_op.js +++ b/libraries/overleaf-editor-core/lib/operation/scan_op.js @@ -175,7 +175,7 @@ class InsertOp extends ScanOp { return false } if (this.tracking) { - if (!this.tracking.equals(other.tracking)) { + if (!other.tracking || !this.tracking.canMergeWith(other.tracking)) { return false } } else if (other.tracking) { @@ -198,7 +198,10 @@ class InsertOp extends ScanOp { throw new Error('Cannot merge with incompatible operation') } this.insertion += other.insertion - // We already have the same tracking info and commentIds + if (this.tracking != null && other.tracking != null) { + this.tracking = this.tracking.mergeWith(other.tracking) + } + // We already have the same commentIds } /** @@ -306,9 +309,13 @@ class RetainOp extends ScanOp { return false } if (this.tracking) { - return this.tracking.equals(other.tracking) + if (!other.tracking || !this.tracking.canMergeWith(other.tracking)) { + return false + } + } else if (other.tracking) { + return false } - return !other.tracking + return true } /** @@ -319,6 +326,9 @@ class RetainOp extends ScanOp { throw new Error('Cannot merge with incompatible operation') } this.length += other.length + if (this.tracking != null && other.tracking != null) { + this.tracking = this.tracking.mergeWith(other.tracking) + } } /** diff --git a/libraries/overleaf-editor-core/lib/operation/text_operation.js b/libraries/overleaf-editor-core/lib/operation/text_operation.js index 148570fa42..61c7f124b4 100644 --- a/libraries/overleaf-editor-core/lib/operation/text_operation.js +++ b/libraries/overleaf-editor-core/lib/operation/text_operation.js @@ -314,25 +314,18 @@ class TextOperation extends EditOperation { str ) } - file.trackedChanges.applyRetain(result.length, op.length, { - tracking: op.tracking, - }) result += str.slice(inputCursor, inputCursor + op.length) inputCursor += op.length } else if (op instanceof InsertOp) { if (containsNonBmpChars(op.insertion)) { throw new InvalidInsertionError(str, op.toJSON()) } - file.trackedChanges.applyInsert(result.length, op.insertion, { - tracking: op.tracking, - }) file.comments.applyInsert( new Range(result.length, op.insertion.length), { commentIds: op.commentIds } ) result += op.insertion } else if (op instanceof RemoveOp) { - file.trackedChanges.applyDelete(result.length, op.length) file.comments.applyDelete(new Range(result.length, op.length)) inputCursor += op.length } else { @@ -352,6 +345,8 @@ class TextOperation extends EditOperation { throw new TextOperation.TooLongError(operation, result.length) } + file.trackedChanges.applyTextOperation(this) + file.content = result } @@ -400,44 +395,36 @@ class TextOperation extends EditOperation { for (let i = 0, l = ops.length; i < l; i++) { const op = ops[i] if (op instanceof RetainOp) { - // Where we need to end up after the retains - const target = strIndex + op.length - // A previous retain could have overriden some tracking info. Now we - // need to restore it. - const previousRanges = previousState.trackedChanges.inRange( - new Range(strIndex, op.length) - ) - - let removeTrackingInfoIfNeeded if (op.tracking) { - removeTrackingInfoIfNeeded = new ClearTrackingProps() - } + // Where we need to end up after the retains + const target = strIndex + op.length + // A previous retain could have overriden some tracking info. Now we + // need to restore it. + const previousChanges = previousState.trackedChanges.intersectRange( + new Range(strIndex, op.length) + ) - for (const trackedChange of previousRanges) { - if (strIndex < trackedChange.range.start) { - inverse.retain(trackedChange.range.start - strIndex, { - tracking: removeTrackingInfoIfNeeded, + for (const change of previousChanges) { + if (strIndex < change.range.start) { + inverse.retain(change.range.start - strIndex, { + tracking: new ClearTrackingProps(), + }) + strIndex = change.range.start + } + inverse.retain(change.range.length, { + tracking: change.tracking, }) - strIndex = trackedChange.range.start + strIndex += change.range.length } - if (trackedChange.range.end < strIndex + op.length) { - inverse.retain(trackedChange.range.length, { - tracking: trackedChange.tracking, + if (strIndex < target) { + inverse.retain(target - strIndex, { + tracking: new ClearTrackingProps(), }) - strIndex = trackedChange.range.end + strIndex = target } - if (trackedChange.range.end !== strIndex) { - // No need to split the range at the end - const [left] = trackedChange.range.splitAt(strIndex) - inverse.retain(left.length, { tracking: trackedChange.tracking }) - strIndex = left.end - } - } - if (strIndex < target) { - inverse.retain(target - strIndex, { - tracking: removeTrackingInfoIfNeeded, - }) - strIndex = target + } else { + inverse.retain(op.length) + strIndex += op.length } } else if (op instanceof InsertOp) { inverse.remove(op.insertion.length) diff --git a/libraries/overleaf-editor-core/lib/range.js b/libraries/overleaf-editor-core/lib/range.js index bc47632f92..b3fb2bd78b 100644 --- a/libraries/overleaf-editor-core/lib/range.js +++ b/libraries/overleaf-editor-core/lib/range.js @@ -86,10 +86,32 @@ class Range { } /** - * @param {Range} range + * Does this range overlap another range? + * + * Overlapping means that the two ranges have at least one character in common + * + * @param {Range} other - the other range */ - overlaps(range) { - return this.start < range.end && this.end > range.start + overlaps(other) { + return this.start < other.end && this.end > other.start + } + + /** + * Does this range overlap the start of another range? + * + * @param {Range} other - the other range + */ + overlapsStart(other) { + return this.start <= other.start && this.end > other.start + } + + /** + * Does this range overlap the end of another range? + * + * @param {Range} other - the other range + */ + overlapsEnd(other) { + return this.start < other.end && this.end >= other.end } /** @@ -227,6 +249,26 @@ class Range { ) return [rangeUpToCursor, rangeAfterCursor] } + + /** + * Returns the intersection of this range with another range + * + * @param {Range} other - the other range + * @return {Range | null} the intersection or null if the intersection is empty + */ + intersect(other) { + if (this.contains(other)) { + return other + } else if (other.contains(this)) { + return this + } else if (other.overlapsStart(this)) { + return new Range(this.pos, other.end - this.start) + } else if (other.overlapsEnd(this)) { + return new Range(other.pos, this.end - other.start) + } else { + return null + } + } } module.exports = Range diff --git a/libraries/overleaf-editor-core/test/lazy_string_file_data.test.js b/libraries/overleaf-editor-core/test/lazy_string_file_data.test.js index 4c9f4aa497..946e6cd5d1 100644 --- a/libraries/overleaf-editor-core/test/lazy_string_file_data.test.js +++ b/libraries/overleaf-editor-core/test/lazy_string_file_data.test.js @@ -193,4 +193,13 @@ describe('LazyStringFileData', function () { expect(fileData.getStringLength()).to.equal(longString.length) expect(fileData.getOperations()).to.have.length(1) }) + + it('truncates its operations after being stored', async function () { + const testHash = File.EMPTY_FILE_HASH + const fileData = new LazyStringFileData(testHash, undefined, 0) + fileData.edit(new TextOperation().insert('abc')) + const stored = await fileData.store(this.blobStore) + expect(fileData.hash).to.equal(stored.hash) + expect(fileData.operations).to.deep.equal([]) + }) }) diff --git a/libraries/overleaf-editor-core/test/range.test.js b/libraries/overleaf-editor-core/test/range.test.js index daad8fd6ed..9a048d5c03 100644 --- a/libraries/overleaf-editor-core/test/range.test.js +++ b/libraries/overleaf-editor-core/test/range.test.js @@ -1,4 +1,3 @@ -// @ts-check 'use strict' const { expect } = require('chai') @@ -449,4 +448,44 @@ describe('Range', function () { expect(() => range.insertAt(16, 3)).to.throw() }) }) + + describe('intersect', function () { + it('should handle partially overlapping ranges', function () { + const range1 = new Range(5, 10) + const range2 = new Range(3, 6) + const intersection1 = range1.intersect(range2) + expect(intersection1.pos).to.equal(5) + expect(intersection1.length).to.equal(4) + const intersection2 = range2.intersect(range1) + expect(intersection2.pos).to.equal(5) + expect(intersection2.length).to.equal(4) + }) + + it('should intersect with itself', function () { + const range = new Range(5, 10) + const intersection = range.intersect(range) + expect(intersection.pos).to.equal(5) + expect(intersection.length).to.equal(10) + }) + + it('should handle nested ranges', function () { + const range1 = new Range(5, 10) + const range2 = new Range(7, 2) + const intersection1 = range1.intersect(range2) + expect(intersection1.pos).to.equal(7) + expect(intersection1.length).to.equal(2) + const intersection2 = range2.intersect(range1) + expect(intersection2.pos).to.equal(7) + expect(intersection2.length).to.equal(2) + }) + + it('should handle disconnected ranges', function () { + const range1 = new Range(5, 10) + const range2 = new Range(20, 30) + const intersection1 = range1.intersect(range2) + expect(intersection1).to.be.null + const intersection2 = range2.intersect(range1) + expect(intersection2).to.be.null + }) + }) }) diff --git a/libraries/overleaf-editor-core/test/scan_op.test.js b/libraries/overleaf-editor-core/test/scan_op.test.js index 80ab69114e..98f4834d48 100644 --- a/libraries/overleaf-editor-core/test/scan_op.test.js +++ b/libraries/overleaf-editor-core/test/scan_op.test.js @@ -107,7 +107,7 @@ describe('RetainOp', function () { expect(op1.equals(new RetainOp(3))).to.be.true }) - it('cannot merge with another RetainOp if tracking info is different', function () { + it('cannot merge with another RetainOp if the tracking user is different', function () { const op1 = new RetainOp( 4, new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z')) @@ -120,14 +120,14 @@ describe('RetainOp', function () { expect(() => op1.mergeWith(op2)).to.throw(Error) }) - it('can merge with another RetainOp if tracking info is the same', function () { + it('can merge with another RetainOp if the tracking user is the same', function () { const op1 = new RetainOp( 4, new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z')) ) const op2 = new RetainOp( 4, - new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z')) + new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:01.000Z')) ) op1.mergeWith(op2) expect( @@ -310,7 +310,7 @@ describe('InsertOp', function () { expect(() => op1.mergeWith(op2)).to.throw(Error) }) - it('cannot merge with another InsertOp if tracking info is different', function () { + it('cannot merge with another InsertOp if tracking user is different', function () { const op1 = new InsertOp( 'a', new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z')) @@ -323,7 +323,7 @@ describe('InsertOp', function () { expect(() => op1.mergeWith(op2)).to.throw(Error) }) - it('can merge with another InsertOp if tracking and comment info is the same', function () { + it('can merge with another InsertOp if tracking user and comment info is the same', function () { const op1 = new InsertOp( 'a', new TrackingProps( @@ -338,7 +338,7 @@ describe('InsertOp', function () { new TrackingProps( 'insert', 'user1', - new Date('2024-01-01T00:00:00.000Z') + new Date('2024-01-01T00:00:01.000Z') ), ['1', '2'] ) diff --git a/libraries/overleaf-editor-core/test/text_operation.test.js b/libraries/overleaf-editor-core/test/text_operation.test.js index fa9bc62dc3..43b8c707a6 100644 --- a/libraries/overleaf-editor-core/test/text_operation.test.js +++ b/libraries/overleaf-editor-core/test/text_operation.test.js @@ -322,6 +322,47 @@ describe('TextOperation', function () { new TextOperation().retain(4).remove(4).retain(3) ) }) + + it('undoing a tracked delete restores the tracked changes', function () { + expectInverseToLeadToInitialState( + new StringFileData( + 'the quick brown fox jumps over the lazy dog', + undefined, + [ + { + range: { pos: 5, length: 5 }, + tracking: { + ts: '2023-01-01T00:00:00.000Z', + type: 'insert', + userId: 'user1', + }, + }, + { + range: { pos: 12, length: 3 }, + tracking: { + ts: '2023-01-01T00:00:00.000Z', + type: 'delete', + userId: 'user1', + }, + }, + { + range: { pos: 18, length: 5 }, + tracking: { + ts: '2023-01-01T00:00:00.000Z', + type: 'insert', + userId: 'user1', + }, + }, + ] + ), + new TextOperation() + .retain(7) + .retain(13, { + tracking: new TrackingProps('delete', 'user1', new Date()), + }) + .retain(23) + ) + }) }) describe('compose', function () { diff --git a/libraries/promise-utils/buildscript.txt b/libraries/promise-utils/buildscript.txt index 73dec381c1..32c9fc8793 100644 --- a/libraries/promise-utils/buildscript.txt +++ b/libraries/promise-utils/buildscript.txt @@ -1,6 +1,6 @@ promise-utils --dependencies=None ---docker-repos=gcr.io/overleaf-ops +--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker --env-add= --env-pass-through= --esmock-loader=False diff --git a/libraries/ranges-tracker/buildscript.txt b/libraries/ranges-tracker/buildscript.txt index 6276182679..be28fc1d80 100644 --- a/libraries/ranges-tracker/buildscript.txt +++ b/libraries/ranges-tracker/buildscript.txt @@ -1,6 +1,6 @@ ranges-tracker --dependencies=None ---docker-repos=gcr.io/overleaf-ops +--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker --env-add= --env-pass-through= --esmock-loader=False diff --git a/libraries/redis-wrapper/buildscript.txt b/libraries/redis-wrapper/buildscript.txt index 1e4489a655..395bc706ac 100644 --- a/libraries/redis-wrapper/buildscript.txt +++ b/libraries/redis-wrapper/buildscript.txt @@ -1,6 +1,6 @@ redis-wrapper --dependencies=None ---docker-repos=gcr.io/overleaf-ops +--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker --env-add= --env-pass-through= --esmock-loader=False diff --git a/libraries/settings/buildscript.txt b/libraries/settings/buildscript.txt index 925234f561..d4daff96d5 100644 --- a/libraries/settings/buildscript.txt +++ b/libraries/settings/buildscript.txt @@ -1,6 +1,6 @@ settings --dependencies=None ---docker-repos=gcr.io/overleaf-ops +--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker --env-add= --env-pass-through= --esmock-loader=False diff --git a/libraries/stream-utils/buildscript.txt b/libraries/stream-utils/buildscript.txt index a04310e77f..1da6bdade9 100644 --- a/libraries/stream-utils/buildscript.txt +++ b/libraries/stream-utils/buildscript.txt @@ -1,6 +1,6 @@ stream-utils --dependencies=None ---docker-repos=gcr.io/overleaf-ops +--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker --env-add= --env-pass-through= --esmock-loader=False diff --git a/package-lock.json b/package-lock.json index 73b722b1f5..2a3bb7696d 100644 --- a/package-lock.json +++ b/package-lock.json @@ -5943,15 +5943,16 @@ "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" }, "node_modules/@grpc/grpc-js": { - "version": "1.8.22", - "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.8.22.tgz", - "integrity": "sha512-oAjDdN7fzbUi+4hZjKG96MR6KTEubAeMpQEb+77qy+3r0Ua5xTFuie6JOLr4ZZgl5g+W5/uRTS2M1V8mVAFPuA==", + "version": "1.13.4", + "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.13.4.tgz", + "integrity": "sha512-GsFaMXCkMqkKIvwCQjCrwH+GHbPKBjhwo/8ZuUkWHqbI73Kky9I+pQltrlT0+MWpedCoosda53lgjYfyEPgxBg==", + "license": "Apache-2.0", "dependencies": { - "@grpc/proto-loader": "^0.7.0", - "@types/node": ">=12.12.47" + "@grpc/proto-loader": "^0.7.13", + "@js-sdsl/ordered-map": "^4.4.2" }, "engines": { - "node": "^8.13.0 || >=10.10.0" + "node": ">=12.10.0" } }, "node_modules/@grpc/proto-loader": { @@ -6989,6 +6990,18 @@ "dev": true, "optional": true }, + "node_modules/@noble/hashes": { + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/@noble/hashes/-/hashes-1.8.0.tgz", + "integrity": "sha512-jCs9ldd7NwzpgXDIf6P3+NrHh9/sD6CQdxHyjQI+h/6rDNo88ypBxxz45UDuZHz9r3tNz7N/VInSVoVdtXEI4A==", + "license": "MIT", + "engines": { + "node": "^14.21.3 || >=16" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + } + }, "node_modules/@node-oauth/formats": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/@node-oauth/formats/-/formats-1.0.0.tgz", @@ -8643,6 +8656,15 @@ "resolved": "services/web", "link": true }, + "node_modules/@paralleldrive/cuid2": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/@paralleldrive/cuid2/-/cuid2-2.2.2.tgz", + "integrity": "sha512-ZOBkgDwEdoYVlSeRbYYXs0S9MejQofiVYoTbKzy/6GQa39/q5tQU2IX46+shYnUkpEl3wc+J6wRlar7r2EK2xA==", + "license": "MIT", + "dependencies": { + "@noble/hashes": "^1.1.5" + } + }, "node_modules/@phosphor-icons/react": { "version": "2.1.7", "resolved": "https://registry.npmjs.org/@phosphor-icons/react/-/react-2.1.7.tgz", @@ -11575,29 +11597,6 @@ "storybook": "^8.2.0 || ^8.3.0-0 || ^8.4.0-0 || ^8.5.0-0 || ^8.6.0-0" } }, - "node_modules/@stripe/react-stripe-js": { - "version": "3.5.0", - "resolved": "https://registry.npmjs.org/@stripe/react-stripe-js/-/react-stripe-js-3.5.0.tgz", - "integrity": "sha512-oo5J2SNbuAUjE9XmQv/SOD7vgZCa1Y9OcZyRAfvQPkyrDrru35sg5c64ANdHEmOWUibism3+25rKdARSw3HOfA==", - "license": "MIT", - "dependencies": { - "prop-types": "^15.7.2" - }, - "peerDependencies": { - "@stripe/stripe-js": ">=1.44.1 <7.0.0", - "react": ">=16.8.0 <20.0.0", - "react-dom": ">=16.8.0 <20.0.0" - } - }, - "node_modules/@stripe/stripe-js": { - "version": "5.10.0", - "resolved": "https://registry.npmjs.org/@stripe/stripe-js/-/stripe-js-5.10.0.tgz", - "integrity": "sha512-PTigkxMdMUP6B5ISS7jMqJAKhgrhZwjprDqR1eATtFfh0OpKVNp110xiH+goeVdrJ29/4LeZJR4FaHHWstsu0A==", - "license": "MIT", - "engines": { - "node": ">=12.16" - } - }, "node_modules/@swc/helpers": { "version": "0.5.17", "resolved": "https://registry.npmjs.org/@swc/helpers/-/helpers-0.5.17.tgz", @@ -15252,13 +15251,13 @@ } }, "node_modules/array-buffer-byte-length": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.1.tgz", - "integrity": "sha512-ahC5W1xgou+KTXix4sAO8Ki12Q+jf4i0+tmk3sC+zgcynshkHxzpXdImBehiUYKKKDwvfFiJl1tZt6ewscS1Mg==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.2.tgz", + "integrity": "sha512-LHE+8BuR7RYGDKvnrmcuSq3tDcKv9OFEXQt/HpbZhY7V6h0zlUXutnAD82GiFx9rdieCMjkvtcsPqBwgUl1Iiw==", "license": "MIT", "dependencies": { - "call-bind": "^1.0.5", - "is-array-buffer": "^3.0.4" + "call-bound": "^1.0.3", + "is-array-buffer": "^3.0.5" }, "engines": { "node": ">= 0.4" @@ -15374,19 +15373,18 @@ } }, "node_modules/arraybuffer.prototype.slice": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.3.tgz", - "integrity": "sha512-bMxMKAjg13EBSVscxTaYA4mRc5t1UAXa2kXiGTNfZ079HIWXEkKmkgFrh/nJqamaLSrXO5H4WFFkPEaLJWbs3A==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.4.tgz", + "integrity": "sha512-BNoCY6SXXPQ7gF2opIP4GBE+Xw7U+pHMYKuzjgCN3GwiaIR09UUeKfheyIry77QtrCBlC0KK0q5/TER/tYh3PQ==", "license": "MIT", "dependencies": { "array-buffer-byte-length": "^1.0.1", - "call-bind": "^1.0.5", + "call-bind": "^1.0.8", "define-properties": "^1.2.1", - "es-abstract": "^1.22.3", - "es-errors": "^1.2.1", - "get-intrinsic": "^1.2.3", - "is-array-buffer": "^3.0.4", - "is-shared-array-buffer": "^1.0.2" + "es-abstract": "^1.23.5", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.6", + "is-array-buffer": "^3.0.4" }, "engines": { "node": ">= 0.4" @@ -15480,6 +15478,15 @@ "resolved": "https://registry.npmjs.org/async/-/async-3.2.5.tgz", "integrity": "sha512-baNZyqaaLhyLVKm/DlvdW051MSgO6b8eVfIezl9E5PqWxFgzLm/wQntEW4zOytVburDEr0JlALEpdOFwvErLsg==" }, + "node_modules/async-function": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/async-function/-/async-function-1.0.0.tgz", + "integrity": "sha512-hsU18Ae8CDTR6Kgu9DYf0EbCr/a5iGL0rytQDobUcdpYOKokk8LEjVphnXkDkgpi0wYVsqrXuP0bZxJaTqdgoA==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, "node_modules/async-lock": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/async-lock/-/async-lock-1.4.1.tgz", @@ -16049,24 +16056,32 @@ "optional": true }, "node_modules/bare-fs": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/bare-fs/-/bare-fs-4.0.1.tgz", - "integrity": "sha512-ilQs4fm/l9eMfWY2dY0WCIUplSUp7U0CT1vrqMg1MUdeZl4fypu5UP0XcDBK5WBQPJAKP1b7XEodISmekH/CEg==", + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/bare-fs/-/bare-fs-4.1.5.tgz", + "integrity": "sha512-1zccWBMypln0jEE05LzZt+V/8y8AQsQQqxtklqaIyg5nu6OAYFhZxPXinJTSG+kU5qyNmeLgcn9AW7eHiCHVLA==", "license": "Apache-2.0", "optional": true, "dependencies": { - "bare-events": "^2.0.0", + "bare-events": "^2.5.4", "bare-path": "^3.0.0", - "bare-stream": "^2.0.0" + "bare-stream": "^2.6.4" }, "engines": { - "bare": ">=1.7.0" + "bare": ">=1.16.0" + }, + "peerDependencies": { + "bare-buffer": "*" + }, + "peerDependenciesMeta": { + "bare-buffer": { + "optional": true + } } }, "node_modules/bare-os": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/bare-os/-/bare-os-3.6.0.tgz", - "integrity": "sha512-BUrFS5TqSBdA0LwHop4OjPJwisqxGy6JsWVqV6qaFoe965qqtaKfDzHY5T2YA1gUL0ZeeQeA+4BBc1FJTcHiPw==", + "version": "3.6.1", + "resolved": "https://registry.npmjs.org/bare-os/-/bare-os-3.6.1.tgz", + "integrity": "sha512-uaIjxokhFidJP+bmmvKSgiMzj2sV5GPHaZVAIktcxcpCyBFFWO+YlikVAdhmUo2vYFvFhOXIAlldqV29L8126g==", "license": "Apache-2.0", "optional": true, "engines": { @@ -16948,15 +16963,44 @@ } }, "node_modules/call-bind": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.7.tgz", - "integrity": "sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==", + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.8.tgz", + "integrity": "sha512-oKlSFMcMwpUg2ednkhQ454wfWiU/ul3CkJe/PEHcTKuiX6RpbehUiFMXu13HalGZxfUwCQzZG747YXBn1im9ww==", + "license": "MIT", "dependencies": { + "call-bind-apply-helpers": "^1.0.0", "es-define-property": "^1.0.0", - "es-errors": "^1.3.0", - "function-bind": "^1.1.2", "get-intrinsic": "^1.2.4", - "set-function-length": "^1.2.1" + "set-function-length": "^1.2.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/call-bind-apply-helpers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", + "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/call-bound": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.4.tgz", + "integrity": "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "get-intrinsic": "^1.3.0" }, "engines": { "node": ">= 0.4" @@ -17445,7 +17489,8 @@ "node_modules/chownr": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.4.tgz", - "integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==" + "integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==", + "license": "ISC" }, "node_modules/chrome-trace-event": { "version": "1.0.3", @@ -17803,12 +17848,10 @@ "license": "MIT" }, "node_modules/commander": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/commander/-/commander-4.1.1.tgz", - "integrity": "sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==", - "engines": { - "node": ">= 6" - } + "version": "2.11.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.11.0.tgz", + "integrity": "sha512-b0553uYA5YAEGgyYIGYROzKQ7X5RAqedkfjiZxwi0kL1g3bOaBNNZfYkzt/CL0umgD5wc9Jec2FbB98CjkMRvQ==", + "license": "MIT" }, "node_modules/common-path-prefix": { "version": "3.0.0", @@ -17923,46 +17966,20 @@ "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=" }, "node_modules/concat-stream": { - "version": "1.6.2", - "resolved": "https://registry.npmjs.org/concat-stream/-/concat-stream-1.6.2.tgz", - "integrity": "sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/concat-stream/-/concat-stream-2.0.0.tgz", + "integrity": "sha512-MWufYdFw53ccGjCA+Ol7XJYpAlW6/prSMzuPOTRnJGcGzuhLn4Scrz7qf6o8bROZ514ltazcIFJZevcfbo0x7A==", "engines": [ - "node >= 0.8" + "node >= 6.0" ], + "license": "MIT", "dependencies": { "buffer-from": "^1.0.0", "inherits": "^2.0.3", - "readable-stream": "^2.2.2", + "readable-stream": "^3.0.2", "typedarray": "^0.0.6" } }, - "node_modules/concat-stream/node_modules/isarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", - "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==" - }, - "node_modules/concat-stream/node_modules/readable-stream": { - "version": "2.3.8", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", - "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", - "dependencies": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - } - }, - "node_modules/concat-stream/node_modules/string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", - "dependencies": { - "safe-buffer": "~5.1.0" - } - }, "node_modules/confbox": { "version": "0.1.8", "resolved": "https://registry.npmjs.org/confbox/-/confbox-0.1.8.tgz", @@ -18408,6 +18425,20 @@ "node": ">=10" } }, + "node_modules/cpu-features": { + "version": "0.0.10", + "resolved": "https://registry.npmjs.org/cpu-features/-/cpu-features-0.0.10.tgz", + "integrity": "sha512-9IkYqtX3YHPCzoVg1Py+o9057a3i0fp7S530UWokCSaFVTc7CwXPRiOjRjBQQ18ZCNafx78YfnG+HALxtVmOGA==", + "hasInstallScript": true, + "optional": true, + "dependencies": { + "buildcheck": "~0.0.6", + "nan": "^2.19.0" + }, + "engines": { + "node": ">=10.0.0" + } + }, "node_modules/crc-32": { "version": "1.2.2", "resolved": "https://registry.npmjs.org/crc-32/-/crc-32-1.2.2.tgz", @@ -19453,14 +19484,14 @@ } }, "node_modules/data-view-buffer": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/data-view-buffer/-/data-view-buffer-1.0.1.tgz", - "integrity": "sha512-0lht7OugA5x3iJLOWFhWK/5ehONdprk0ISXqVFn/NFrDu+cuc8iADFrGQz5BnRK7LLU3JmkbXSxaqX+/mXYtUA==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/data-view-buffer/-/data-view-buffer-1.0.2.tgz", + "integrity": "sha512-EmKO5V3OLXh1rtK2wgXRansaK1/mtVdTUEiEI0W8RkvgT05kfxaH29PliLnpLP73yYO6142Q72QNa8Wx/A5CqQ==", "license": "MIT", "dependencies": { - "call-bind": "^1.0.6", + "call-bound": "^1.0.3", "es-errors": "^1.3.0", - "is-data-view": "^1.0.1" + "is-data-view": "^1.0.2" }, "engines": { "node": ">= 0.4" @@ -19470,29 +19501,29 @@ } }, "node_modules/data-view-byte-length": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/data-view-byte-length/-/data-view-byte-length-1.0.1.tgz", - "integrity": "sha512-4J7wRJD3ABAzr8wP+OcIcqq2dlUKp4DVflx++hs5h5ZKydWMI6/D/fAot+yh6g2tHh8fLFTvNOaVN357NvSrOQ==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/data-view-byte-length/-/data-view-byte-length-1.0.2.tgz", + "integrity": "sha512-tuhGbE6CfTM9+5ANGf+oQb72Ky/0+s3xKUpHvShfiz2RxMFgFPjsXuRLBVMtvMs15awe45SRb83D6wH4ew6wlQ==", "license": "MIT", "dependencies": { - "call-bind": "^1.0.7", + "call-bound": "^1.0.3", "es-errors": "^1.3.0", - "is-data-view": "^1.0.1" + "is-data-view": "^1.0.2" }, "engines": { "node": ">= 0.4" }, "funding": { - "url": "https://github.com/sponsors/ljharb" + "url": "https://github.com/sponsors/inspect-js" } }, "node_modules/data-view-byte-offset": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/data-view-byte-offset/-/data-view-byte-offset-1.0.0.tgz", - "integrity": "sha512-t/Ygsytq+R995EJ5PZlD4Cu56sWa8InXySaViRzw9apusqsOO2bQP+SbYzAhR0pFKoB+43lYy8rWban9JSuXnA==", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/data-view-byte-offset/-/data-view-byte-offset-1.0.1.tgz", + "integrity": "sha512-BS8PfmtDGnrgYdOonGZQdLZslWIeCGFP9tpan0hi1Co2Zr2NKADsvGYA8XxuG/4UWgJ6Cjtv+YJnB6MM69QGlQ==", "license": "MIT", "dependencies": { - "call-bind": "^1.0.6", + "call-bound": "^1.0.2", "es-errors": "^1.3.0", "is-data-view": "^1.0.1" }, @@ -19903,7 +19934,6 @@ "version": "1.0.4", "resolved": "https://registry.npmjs.org/dezalgo/-/dezalgo-1.0.4.tgz", "integrity": "sha512-rXSP0bf+5n0Qonsb+SVVfNfIsimO4HEtmnIpPHY8Q1UCzKlQrDMfdobr8nJOOsRgWCyMRqeSBQzmWUMq7zvVig==", - "dev": true, "dependencies": { "asap": "^2.0.0", "wrappy": "1" @@ -19975,6 +20005,88 @@ "node": ">=6" } }, + "node_modules/docker-modem": { + "version": "5.0.6", + "resolved": "https://registry.npmjs.org/docker-modem/-/docker-modem-5.0.6.tgz", + "integrity": "sha512-ens7BiayssQz/uAxGzH8zGXCtiV24rRWXdjNha5V4zSOcxmAZsfGVm/PPFbwQdqEkDnhG+SyR9E3zSHUbOKXBQ==", + "license": "Apache-2.0", + "dependencies": { + "debug": "^4.1.1", + "readable-stream": "^3.5.0", + "split-ca": "^1.0.1", + "ssh2": "^1.15.0" + }, + "engines": { + "node": ">= 8.0" + } + }, + "node_modules/dockerode": { + "version": "4.0.7", + "resolved": "https://registry.npmjs.org/dockerode/-/dockerode-4.0.7.tgz", + "integrity": "sha512-R+rgrSRTRdU5mH14PZTCPZtW/zw3HDWNTS/1ZAQpL/5Upe/ye5K9WQkIysu4wBoiMwKynsz0a8qWuGsHgEvSAA==", + "license": "Apache-2.0", + "dependencies": { + "@balena/dockerignore": "^1.0.2", + "@grpc/grpc-js": "^1.11.1", + "@grpc/proto-loader": "^0.7.13", + "docker-modem": "^5.0.6", + "protobufjs": "^7.3.2", + "tar-fs": "~2.1.2", + "uuid": "^10.0.0" + }, + "engines": { + "node": ">= 8.0" + } + }, + "node_modules/dockerode/node_modules/protobufjs": { + "version": "7.5.3", + "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-7.5.3.tgz", + "integrity": "sha512-sildjKwVqOI2kmFDiXQ6aEB0fjYTafpEvIBs8tOR8qI4spuL9OPROLVu2qZqi/xgCfsHIwVqlaF8JBjWFHnKbw==", + "hasInstallScript": true, + "license": "BSD-3-Clause", + "dependencies": { + "@protobufjs/aspromise": "^1.1.2", + "@protobufjs/base64": "^1.1.2", + "@protobufjs/codegen": "^2.0.4", + "@protobufjs/eventemitter": "^1.1.0", + "@protobufjs/fetch": "^1.1.0", + "@protobufjs/float": "^1.0.2", + "@protobufjs/inquire": "^1.1.0", + "@protobufjs/path": "^1.1.2", + "@protobufjs/pool": "^1.1.0", + "@protobufjs/utf8": "^1.1.0", + "@types/node": ">=13.7.0", + "long": "^5.0.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/dockerode/node_modules/tar-fs": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.3.tgz", + "integrity": "sha512-090nwYJDmlhwFwEW3QQl+vaNnxsO2yVsd45eTKRBzSzu+hlb1w2K9inVq5b0ngXuLVqQ4ApvsUHHnu/zQNkWAg==", + "license": "MIT", + "dependencies": { + "chownr": "^1.1.1", + "mkdirp-classic": "^0.5.2", + "pump": "^3.0.0", + "tar-stream": "^2.1.4" + } + }, + "node_modules/dockerode/node_modules/uuid": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-10.0.0.tgz", + "integrity": "sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ==", + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "license": "MIT", + "bin": { + "uuid": "dist/bin/uuid" + } + }, "node_modules/doctrine": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", @@ -20202,6 +20314,20 @@ "node": ">=0.10" } }, + "node_modules/dunder-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", + "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/duplexify": { "version": "4.1.3", "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.3.tgz", @@ -20533,57 +20659,65 @@ } }, "node_modules/es-abstract": { - "version": "1.23.3", - "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.23.3.tgz", - "integrity": "sha512-e+HfNH61Bj1X9/jLc5v1owaLYuHdeHHSQlkhCBiTK8rBvKaULl/beGMxwrMXjpYrv4pz22BlY570vVePA2ho4A==", + "version": "1.24.0", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.24.0.tgz", + "integrity": "sha512-WSzPgsdLtTcQwm4CROfS5ju2Wa1QQcVeT37jFjYzdFz1r9ahadC8B8/a4qxJxM+09F18iumCdRmlr96ZYkQvEg==", "license": "MIT", "dependencies": { - "array-buffer-byte-length": "^1.0.1", - "arraybuffer.prototype.slice": "^1.0.3", + "array-buffer-byte-length": "^1.0.2", + "arraybuffer.prototype.slice": "^1.0.4", "available-typed-arrays": "^1.0.7", - "call-bind": "^1.0.7", - "data-view-buffer": "^1.0.1", - "data-view-byte-length": "^1.0.1", - "data-view-byte-offset": "^1.0.0", - "es-define-property": "^1.0.0", + "call-bind": "^1.0.8", + "call-bound": "^1.0.4", + "data-view-buffer": "^1.0.2", + "data-view-byte-length": "^1.0.2", + "data-view-byte-offset": "^1.0.1", + "es-define-property": "^1.0.1", "es-errors": "^1.3.0", - "es-object-atoms": "^1.0.0", - "es-set-tostringtag": "^2.0.3", - "es-to-primitive": "^1.2.1", - "function.prototype.name": "^1.1.6", - "get-intrinsic": "^1.2.4", - "get-symbol-description": "^1.0.2", - "globalthis": "^1.0.3", - "gopd": "^1.0.1", + "es-object-atoms": "^1.1.1", + "es-set-tostringtag": "^2.1.0", + "es-to-primitive": "^1.3.0", + "function.prototype.name": "^1.1.8", + "get-intrinsic": "^1.3.0", + "get-proto": "^1.0.1", + "get-symbol-description": "^1.1.0", + "globalthis": "^1.0.4", + "gopd": "^1.2.0", "has-property-descriptors": "^1.0.2", - "has-proto": "^1.0.3", - "has-symbols": "^1.0.3", + "has-proto": "^1.2.0", + "has-symbols": "^1.1.0", "hasown": "^2.0.2", - "internal-slot": "^1.0.7", - "is-array-buffer": "^3.0.4", + "internal-slot": "^1.1.0", + "is-array-buffer": "^3.0.5", "is-callable": "^1.2.7", - "is-data-view": "^1.0.1", + "is-data-view": "^1.0.2", "is-negative-zero": "^2.0.3", - "is-regex": "^1.1.4", - "is-shared-array-buffer": "^1.0.3", - "is-string": "^1.0.7", - "is-typed-array": "^1.1.13", - "is-weakref": "^1.0.2", - "object-inspect": "^1.13.1", + "is-regex": "^1.2.1", + "is-set": "^2.0.3", + "is-shared-array-buffer": "^1.0.4", + "is-string": "^1.1.1", + "is-typed-array": "^1.1.15", + "is-weakref": "^1.1.1", + "math-intrinsics": "^1.1.0", + "object-inspect": "^1.13.4", "object-keys": "^1.1.1", - "object.assign": "^4.1.5", - "regexp.prototype.flags": "^1.5.2", - "safe-array-concat": "^1.1.2", - "safe-regex-test": "^1.0.3", - "string.prototype.trim": "^1.2.9", - "string.prototype.trimend": "^1.0.8", + "object.assign": "^4.1.7", + "own-keys": "^1.0.1", + "regexp.prototype.flags": "^1.5.4", + "safe-array-concat": "^1.1.3", + "safe-push-apply": "^1.0.0", + "safe-regex-test": "^1.1.0", + "set-proto": "^1.0.0", + "stop-iteration-iterator": "^1.1.0", + "string.prototype.trim": "^1.2.10", + "string.prototype.trimend": "^1.0.9", "string.prototype.trimstart": "^1.0.8", - "typed-array-buffer": "^1.0.2", - "typed-array-byte-length": "^1.0.1", - "typed-array-byte-offset": "^1.0.2", - "typed-array-length": "^1.0.6", - "unbox-primitive": "^1.0.2", - "which-typed-array": "^1.1.15" + "typed-array-buffer": "^1.0.3", + "typed-array-byte-length": "^1.0.3", + "typed-array-byte-offset": "^1.0.4", + "typed-array-length": "^1.0.7", + "unbox-primitive": "^1.1.0", + "which-typed-array": "^1.1.19" }, "engines": { "node": ">= 0.4" @@ -20593,12 +20727,10 @@ } }, "node_modules/es-define-property": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.0.tgz", - "integrity": "sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==", - "dependencies": { - "get-intrinsic": "^1.2.4" - }, + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", + "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", + "license": "MIT", "engines": { "node": ">= 0.4" } @@ -20639,9 +20771,9 @@ "license": "MIT" }, "node_modules/es-object-atoms": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.0.0.tgz", - "integrity": "sha512-MZ4iQ6JwHOBQjahnjwaC1ZtIBH+2ohjamzAO3oaHcXYup7qxjF2fixyH+Q71voWHeOkI2q/TnJao/KfXYIZWbw==", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", + "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", "license": "MIT", "dependencies": { "es-errors": "^1.3.0" @@ -20651,14 +20783,15 @@ } }, "node_modules/es-set-tostringtag": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.0.3.tgz", - "integrity": "sha512-3T8uNMC3OQTHkFUsFq8r/BwAXLHvU/9O9mE0fBc/MY5iq/8H7ncvO947LmYA6ldWw9Uh8Yhf25zu6n7nML5QWQ==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz", + "integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==", "license": "MIT", "dependencies": { - "get-intrinsic": "^1.2.4", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.6", "has-tostringtag": "^1.0.2", - "hasown": "^2.0.1" + "hasown": "^2.0.2" }, "engines": { "node": ">= 0.4" @@ -20674,13 +20807,14 @@ } }, "node_modules/es-to-primitive": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", - "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.3.0.tgz", + "integrity": "sha512-w+5mJ3GuFL+NjVtJlvydShqE1eN3h3PbI7/5LAsYJP/2qtuMXjfL2LpHSRqo4b4eSF5K/DH1JXKUAHSB2UW50g==", + "license": "MIT", "dependencies": { - "is-callable": "^1.1.4", - "is-date-object": "^1.0.1", - "is-symbol": "^1.0.2" + "is-callable": "^1.2.7", + "is-date-object": "^1.0.5", + "is-symbol": "^1.0.4" }, "engines": { "node": ">= 0.4" @@ -22835,8 +22969,7 @@ "node_modules/fast-safe-stringify": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", - "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==", - "dev": true + "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==" }, "node_modules/fast-text-encoding": { "version": "1.0.3", @@ -23331,11 +23464,18 @@ } }, "node_modules/for-each": { - "version": "0.3.3", - "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.3.tgz", - "integrity": "sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw==", + "version": "0.3.5", + "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.5.tgz", + "integrity": "sha512-dKx12eRCVIzqCxFGplyFKJMPvLEWgmNtUrpTiJIR5u97zEhRG8ySrtboPHZXx7daLxQVrl643cTzbab2tkQjxg==", + "license": "MIT", "dependencies": { - "is-callable": "^1.1.3" + "is-callable": "^1.2.7" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, "node_modules/for-in": { @@ -23497,6 +23637,7 @@ "resolved": "https://registry.npmjs.org/formidable/-/formidable-1.2.6.tgz", "integrity": "sha512-KcpbcpuLNOwrEjnbpMC0gS+X8ciDoZE1kkqzat4a8vrprf+s9pKNQ/QIwWfbfs4ltgmFl3MD177SNTkve3BwGQ==", "deprecated": "Please upgrade to latest, formidable@v2 or formidable@v3! Check these notes: https://bit.ly/2ZEqIau", + "license": "MIT", "funding": { "url": "https://ko-fi.com/tunnckoCore/commissions" } @@ -23672,14 +23813,17 @@ } }, "node_modules/function.prototype.name": { - "version": "1.1.6", - "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.6.tgz", - "integrity": "sha512-Z5kx79swU5P27WEayXM1tBi5Ze/lbIyiNgU3qyXUOf9b2rgXYyF9Dy9Cx+IQv/Lc8WCG6L82zwUPpSS9hGehIg==", + "version": "1.1.8", + "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.8.tgz", + "integrity": "sha512-e5iwyodOHhbMr/yNrc7fDYG4qlbIvI5gajyzPnb5TCwyhjApznQh1BMFou9b30SevY43gCJKXycoCBjMbsuW0Q==", + "license": "MIT", "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1", - "functions-have-names": "^1.2.3" + "call-bind": "^1.0.8", + "call-bound": "^1.0.3", + "define-properties": "^1.2.1", + "functions-have-names": "^1.2.3", + "hasown": "^2.0.2", + "is-callable": "^1.2.7" }, "engines": { "node": ">= 0.4" @@ -23791,15 +23935,21 @@ } }, "node_modules/get-intrinsic": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.4.tgz", - "integrity": "sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==", + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", + "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", + "license": "MIT", "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "es-define-property": "^1.0.1", "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", "function-bind": "^1.1.2", - "has-proto": "^1.0.1", - "has-symbols": "^1.0.3", - "hasown": "^2.0.0" + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.1.0" }, "engines": { "node": ">= 0.4" @@ -23827,6 +23977,19 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/get-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", + "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", + "license": "MIT", + "dependencies": { + "dunder-proto": "^1.0.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/get-stream": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-5.2.0.tgz", @@ -23843,14 +24006,14 @@ } }, "node_modules/get-symbol-description": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.2.tgz", - "integrity": "sha512-g0QYk1dZBxGwk+Ngc+ltRH2IBp2f7zBkBMBJZCDerh6EhlhSR6+9irMCuT/09zD6qkarHUSn529sK/yL4S27mg==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.1.0.tgz", + "integrity": "sha512-w9UMqWwJxHNOvoNzSJ2oPF5wvYcvP7jUvYzhp67yEhTi17ZDBBC1z9pTdGuzjD+EFIqLSYRweZjqfiPzQ06Ebg==", "license": "MIT", "dependencies": { - "call-bind": "^1.0.5", + "call-bound": "^1.0.3", "es-errors": "^1.3.0", - "get-intrinsic": "^1.2.4" + "get-intrinsic": "^1.2.6" }, "engines": { "node": ">= 0.4" @@ -24070,11 +24233,13 @@ } }, "node_modules/globalthis": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/globalthis/-/globalthis-1.0.3.tgz", - "integrity": "sha512-sFdI5LyBiNTHjRd7cGPWapiHWMOXKyuBNX/cWJ3NfzrZQVa8GI/8cofCl74AOVqq9W5kNmguTIzJ/1s2gyI9wA==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/globalthis/-/globalthis-1.0.4.tgz", + "integrity": "sha512-DpLKbNU4WylpxJykQujfCcwYWiV/Jhm50Goo0wrVILAv5jOr9d+H+UR3PhSCD2rCCEIg0uc+G+muBTwD54JhDQ==", + "license": "MIT", "dependencies": { - "define-properties": "^1.1.3" + "define-properties": "^1.2.1", + "gopd": "^1.0.1" }, "engines": { "node": ">= 0.4" @@ -24621,11 +24786,12 @@ "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" }, "node_modules/gopd": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.0.1.tgz", - "integrity": "sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==", - "dependencies": { - "get-intrinsic": "^1.1.3" + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", + "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", + "license": "MIT", + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -24645,6 +24811,7 @@ "version": "2.1.8", "resolved": "https://registry.npmjs.org/graphlib/-/graphlib-2.1.8.tgz", "integrity": "sha512-jcLLfkpoVGmH7/InMC/1hIvOPSUh38oJtGhvrOFGzioE1DZ+0YW16RgmOJhHiuWTvGiJQ9Z1Ik43JvkRPRvE+A==", + "license": "MIT", "dependencies": { "lodash": "^4.17.15" } @@ -24865,10 +25032,13 @@ } }, "node_modules/has-proto": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.3.tgz", - "integrity": "sha512-SJ1amZAJUiZS+PhsVLf5tGydlaVB8EdFpaSO4gmiUKUOxk8qzn5AIy4ZeJUmh22znIdk/uMAUT2pl3FxzVUH+Q==", + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.2.0.tgz", + "integrity": "sha512-KIL7eQPfHQRC8+XluaIw7BHUwwqL19bQn4hzNgdr+1wXoU0KKj6rufu47lhY7KbJR2C6T6+PfyN0Ea7wkSS+qQ==", "license": "MIT", + "dependencies": { + "dunder-proto": "^1.0.0" + }, "engines": { "node": ">= 0.4" }, @@ -24877,9 +25047,10 @@ } }, "node_modules/has-symbols": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", - "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", + "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", + "license": "MIT", "engines": { "node": ">= 0.4" }, @@ -25837,14 +26008,14 @@ } }, "node_modules/internal-slot": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.7.tgz", - "integrity": "sha512-NGnrKwXzSms2qUUih/ILZ5JBqNTSa1+ZmP6flaIp6KmSElgE9qdndzS3cqjrDovwFdmwsGsLdeFgB6suw+1e9g==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.1.0.tgz", + "integrity": "sha512-4gd7VpWNQNB4UKKCFFVcp1AVv+FMOgs9NKzjHKusc8jTMhd5eL1NqQqOpE0KzMds804/yHlglp3uxgluOqAPLw==", "license": "MIT", "dependencies": { "es-errors": "^1.3.0", - "hasown": "^2.0.0", - "side-channel": "^1.0.4" + "hasown": "^2.0.2", + "side-channel": "^1.1.0" }, "engines": { "node": ">= 0.4" @@ -26022,13 +26193,14 @@ } }, "node_modules/is-array-buffer": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.4.tgz", - "integrity": "sha512-wcjaerHw0ydZwfhiKbXJWLDY8A7yV7KhjQOpb83hGgGfId/aQa4TOvwyzn2PuswW2gPCYEL/nEAiSVpdOj1lXw==", + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.5.tgz", + "integrity": "sha512-DDfANUiiG2wC1qawP66qlTugJeL5HyzMpfr8lLK+jMQirGzNod0B12cFB/9q838Ru27sBwfw78/rdoU7RERz6A==", "license": "MIT", "dependencies": { - "call-bind": "^1.0.2", - "get-intrinsic": "^1.2.1" + "call-bind": "^1.0.8", + "call-bound": "^1.0.3", + "get-intrinsic": "^1.2.6" }, "engines": { "node": ">= 0.4" @@ -26043,12 +26215,35 @@ "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=", "dev": true }, - "node_modules/is-bigint": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.4.tgz", - "integrity": "sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg==", + "node_modules/is-async-function": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-async-function/-/is-async-function-2.1.1.tgz", + "integrity": "sha512-9dgM/cZBnNvjzaMYHVoxxfPj2QXt22Ev7SuuPrs+xav0ukGB0S6d4ydZdEiM48kLx5kDV+QBPrpVnFyefL8kkQ==", + "license": "MIT", "dependencies": { - "has-bigints": "^1.0.1" + "async-function": "^1.0.0", + "call-bound": "^1.0.3", + "get-proto": "^1.0.1", + "has-tostringtag": "^1.0.2", + "safe-regex-test": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-bigint": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.1.0.tgz", + "integrity": "sha512-n4ZT37wG78iz03xPRKJrHTdZbe3IicyucEtdRsV5yglwc3GyUfbAfpSeD0FJ41NbUNSt5wbhqfp1fS+BgnvDFQ==", + "license": "MIT", + "dependencies": { + "has-bigints": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -26067,12 +26262,13 @@ } }, "node_modules/is-boolean-object": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.1.2.tgz", - "integrity": "sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA==", + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.2.2.tgz", + "integrity": "sha512-wa56o2/ElJMYqjCjGkXri7it5FbebW5usLw/nPmCMs5DeZ7eziSYZhSmPRn0txqeW4LnAmQQU7FgqLpsEFKM4A==", + "license": "MIT", "dependencies": { - "call-bind": "^1.0.2", - "has-tostringtag": "^1.0.0" + "call-bound": "^1.0.3", + "has-tostringtag": "^1.0.2" }, "engines": { "node": ">= 0.4" @@ -26137,11 +26333,13 @@ } }, "node_modules/is-data-view": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-data-view/-/is-data-view-1.0.1.tgz", - "integrity": "sha512-AHkaJrsUVW6wq6JS8y3JnM/GJF/9cf+k20+iDzlSaJrinEo5+7vRiteOSwBhHRiAyQATN1AmY4hwzxJKPmYf+w==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-data-view/-/is-data-view-1.0.2.tgz", + "integrity": "sha512-RKtWF8pGmS87i2D6gqQu/l7EYRlVdfzemCJN/P3UOs//x1QE7mfhvzHIApBTRf7axvT6DMGwSwBXYCT0nfB9xw==", "license": "MIT", "dependencies": { + "call-bound": "^1.0.2", + "get-intrinsic": "^1.2.6", "is-typed-array": "^1.1.13" }, "engines": { @@ -26152,11 +26350,13 @@ } }, "node_modules/is-date-object": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.5.tgz", - "integrity": "sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.1.0.tgz", + "integrity": "sha512-PwwhEakHVKTdRNVOw+/Gyh0+MzlCl4R6qKvkhuvLtPMggI1WAHt9sOwZxQLSGpUaDnrdyDsomoRgNnCfKNSXXg==", + "license": "MIT", "dependencies": { - "has-tostringtag": "^1.0.0" + "call-bound": "^1.0.2", + "has-tostringtag": "^1.0.2" }, "engines": { "node": ">= 0.4" @@ -26221,6 +26421,21 @@ "node": ">=0.10.0" } }, + "node_modules/is-finalizationregistry": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-finalizationregistry/-/is-finalizationregistry-1.1.1.tgz", + "integrity": "sha512-1pC6N8qWJbWoPtEjgcL2xyhQOP491EQjeUo3qTKcmV8YSDDJrOepfG8pcC7h/QgnQHYSv0mJ3Z/ZWxmatVrysg==", + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/is-fullwidth-code-point": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", @@ -26318,10 +26533,13 @@ } }, "node_modules/is-map": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/is-map/-/is-map-2.0.2.tgz", - "integrity": "sha512-cOZFQQozTha1f4MxLFzlgKYPTyj26picdZTx82hbc/Xf4K/tZOOXSCkMvU4pKioRXGDLJRn0GM7Upe7kR721yg==", - "dev": true, + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/is-map/-/is-map-2.0.3.tgz", + "integrity": "sha512-1Qed0/Hr2m+YqxnM09CjA2d/i6YZNfF6R2oRAOj36eUdS6qIV/huPJNSEpKbupewFs+ZsJlxsjjPbc0/afW6Lw==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, "funding": { "url": "https://github.com/sponsors/ljharb" } @@ -26383,11 +26601,13 @@ } }, "node_modules/is-number-object": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.7.tgz", - "integrity": "sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ==", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.1.1.tgz", + "integrity": "sha512-lZhclumE1G6VYD8VHe35wFaIif+CTy5SJIi5+3y4psDgWu4wPDoBhF8NxUOinEc7pHgiTsT6MaBb92rKhhD+Xw==", + "license": "MIT", "dependencies": { - "has-tostringtag": "^1.0.0" + "call-bound": "^1.0.3", + "has-tostringtag": "^1.0.2" }, "engines": { "node": ">= 0.4" @@ -26446,12 +26666,15 @@ "integrity": "sha512-+lP4/6lKUBfQjZ2pdxThZvLUAafmZb8OAxFb8XXtiQmS35INgr85hdOGoEs124ez1FCnZJt6jau/T+alh58QFQ==" }, "node_modules/is-regex": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", - "integrity": "sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==", + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.2.1.tgz", + "integrity": "sha512-MjYsKHO5O7mCsmRGxWcLWheFqN9DJ/2TmngvjKXihe6efViPqc274+Fx/4fYj/r03+ESvBdTXK0V6tA3rgez1g==", + "license": "MIT", "dependencies": { - "call-bind": "^1.0.2", - "has-tostringtag": "^1.0.0" + "call-bound": "^1.0.2", + "gopd": "^1.2.0", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.2" }, "engines": { "node": ">= 0.4" @@ -26461,10 +26684,13 @@ } }, "node_modules/is-set": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/is-set/-/is-set-2.0.2.tgz", - "integrity": "sha512-+2cnTEZeY5z/iXGbLhPrOAaK/Mau5k5eXq9j14CpRTftq0pAJu2MwVRSZhyZWBzx3o6X795Lz6Bpb6R0GKf37g==", - "dev": true, + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/is-set/-/is-set-2.0.3.tgz", + "integrity": "sha512-iPAjerrse27/ygGLxw+EBR9agv9Y6uLeYVJMu+QNCoouJ1/1ri0mGrcWpfCqFZuzzx3WjtwxG098X+n4OuRkPg==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, "funding": { "url": "https://github.com/sponsors/ljharb" } @@ -26477,12 +26703,12 @@ "license": "MIT" }, "node_modules/is-shared-array-buffer": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.3.tgz", - "integrity": "sha512-nA2hv5XIhLR3uVzDDfCIknerhx8XUKnstuOERPNNIinXG7v9u+ohXF67vxm4TPTEPU6lm61ZkwP3c9PCB97rhg==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.4.tgz", + "integrity": "sha512-ISWac8drv4ZGfwKl5slpHG9OwPNty4jOWPRIhBpxOoD+hqITiwuipOQ2bNthAzwA3B4fIjO4Nln74N0S9byq8A==", "license": "MIT", "dependencies": { - "call-bind": "^1.0.7" + "call-bound": "^1.0.3" }, "engines": { "node": ">= 0.4" @@ -26503,11 +26729,13 @@ } }, "node_modules/is-string": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.7.tgz", - "integrity": "sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.1.1.tgz", + "integrity": "sha512-BtEeSsoaQjlSPBemMQIrY1MY0uM6vnS1g5fmufYOtnxLGUZM2178PKbhsk7Ffv58IX+ZtcvoGwccYsh0PglkAA==", + "license": "MIT", "dependencies": { - "has-tostringtag": "^1.0.0" + "call-bound": "^1.0.3", + "has-tostringtag": "^1.0.2" }, "engines": { "node": ">= 0.4" @@ -26517,11 +26745,14 @@ } }, "node_modules/is-symbol": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.4.tgz", - "integrity": "sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg==", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.1.1.tgz", + "integrity": "sha512-9gGx6GTtCQM73BgmHQXfDmLtfjjTUDSyoxTCbp5WtoixAhfgsDirWIcVQ/IHpvI5Vgd5i/J5F7B9cN/WlVbC/w==", + "license": "MIT", "dependencies": { - "has-symbols": "^1.0.2" + "call-bound": "^1.0.2", + "has-symbols": "^1.1.0", + "safe-regex-test": "^1.1.0" }, "engines": { "node": ">= 0.4" @@ -26531,12 +26762,12 @@ } }, "node_modules/is-typed-array": { - "version": "1.1.13", - "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.13.tgz", - "integrity": "sha512-uZ25/bUAlUY5fR4OKT4rZQEBrzQWYV9ZJYGGsUmEJ6thodVJ1HX64ePQ6Z0qPWP+m+Uq6e9UugrE38jeYsDSMw==", + "version": "1.1.15", + "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.15.tgz", + "integrity": "sha512-p3EcsicXjit7SaskXHs1hA91QxgTw46Fv6EFKKGS5DRFLD8yKnohjF3hxoju94b/OcMZoQukzpPpBE9uLVKzgQ==", "license": "MIT", "dependencies": { - "which-typed-array": "^1.1.14" + "which-typed-array": "^1.1.16" }, "engines": { "node": ">= 0.4" @@ -26577,33 +26808,43 @@ "integrity": "sha512-X/kiF3Xndj6WI7l/yLyzR7V1IbQd6L4S4cewSL0fRciemPmHbaXIKR2qtf+zseH+lbMG0vFp4HvCUe7amGZVhw==" }, "node_modules/is-weakmap": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/is-weakmap/-/is-weakmap-2.0.1.tgz", - "integrity": "sha512-NSBR4kH5oVj1Uwvv970ruUkCV7O1mzgVFO4/rev2cLRda9Tm9HrL70ZPut4rOHgY0FNrUu9BCbXA2sdQ+x0chA==", - "dev": true, + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/is-weakmap/-/is-weakmap-2.0.2.tgz", + "integrity": "sha512-K5pXYOm9wqY1RgjpL3YTkF39tni1XajUIkawTLUo9EZEVUFga5gSQJF8nNS7ZwJQ02y+1YCNYcMh+HIf1ZqE+w==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/is-weakref": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.0.2.tgz", - "integrity": "sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ==", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.1.1.tgz", + "integrity": "sha512-6i9mGWSlqzNMEqpCp93KwRS1uUOodk2OJ6b+sq7ZPDSy2WuI5NFIxp/254TytR8ftefexkWn5xNiHUNpPOfSew==", + "license": "MIT", "dependencies": { - "call-bind": "^1.0.2" + "call-bound": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/is-weakset": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/is-weakset/-/is-weakset-2.0.2.tgz", - "integrity": "sha512-t2yVvttHkQktwnNNmBQ98AhENLdPUTDTE21uPqAQ0ARwQfGeQKRVS0NNurH7bTf7RrvcVn1OOge45CnBeHCSmg==", - "dev": true, + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/is-weakset/-/is-weakset-2.0.4.tgz", + "integrity": "sha512-mfcwb6IzQyOKTs84CQMrOwW4gQcaTOAWJ0zzJCl2WSPDrWk/OzDaImWFH3djXhb24g4eudZfLRozAvPGw4d9hQ==", + "license": "MIT", "dependencies": { - "call-bind": "^1.0.2", - "get-intrinsic": "^1.1.1" + "call-bound": "^1.0.3", + "get-intrinsic": "^1.2.6" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -27324,6 +27565,7 @@ "version": "3.0.15", "resolved": "https://registry.npmjs.org/json-refs/-/json-refs-3.0.15.tgz", "integrity": "sha512-0vOQd9eLNBL18EGl5yYaO44GhixmImes2wiYn9Z3sag3QnehWrYWlB9AFtMxCL2Bj3fyxgDYkxGFEU/chlYssw==", + "license": "MIT", "dependencies": { "commander": "~4.1.1", "graphlib": "^2.1.8", @@ -27345,14 +27587,25 @@ "version": "1.0.10", "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "license": "MIT", "dependencies": { "sprintf-js": "~1.0.2" } }, + "node_modules/json-refs/node_modules/commander": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-4.1.1.tgz", + "integrity": "sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==", + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, "node_modules/json-refs/node_modules/js-yaml": { "version": "3.14.1", "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", + "license": "MIT", "dependencies": { "argparse": "^1.0.7", "esprima": "^4.0.0" @@ -27365,6 +27618,7 @@ "version": "3.0.0", "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "license": "MIT", "engines": { "node": ">=8" } @@ -28130,12 +28384,14 @@ "node_modules/lodash._arraypool": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash._arraypool/-/lodash._arraypool-2.4.1.tgz", - "integrity": "sha1-6I7suS4ruEyQZWEv2VigcZzUf5Q=" + "integrity": "sha512-tC2aLC7bbkDXKNrjDu9OLiVx9pFIvjinID2eD9PzNdAQGZScWUd/h8faqOw5d6oLsOvFRCRbz1ASoB+deyMVUw==", + "license": "MIT" }, "node_modules/lodash._basebind": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash._basebind/-/lodash._basebind-2.4.1.tgz", - "integrity": "sha1-6UC5690nwyfgqNqxtVkWxTQelXU=", + "integrity": "sha512-VGHm6DH+1UiuafQdE/DNMqxOcSyhRu0xO9+jPDq7xITRn5YOorGrHVQmavMVXCYmTm80YRTZZCn/jTW7MokwLg==", + "license": "MIT", "dependencies": { "lodash._basecreate": "~2.4.1", "lodash._setbinddata": "~2.4.1", @@ -28146,7 +28402,8 @@ "node_modules/lodash._baseclone": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash._baseclone/-/lodash._baseclone-2.4.1.tgz", - "integrity": "sha1-MPgj5X4X43NdODvWK2Czh1Q7QYY=", + "integrity": "sha512-+zJVXs0VxC/Au+/7foiKzw8UaWvfSfPh20XhqK/6HFQiUeclL5fz05zY7G9yDAFItAKKZwB4cgpzGvxiwuG1wQ==", + "license": "MIT", "dependencies": { "lodash._getarray": "~2.4.1", "lodash._releasearray": "~2.4.1", @@ -28161,7 +28418,8 @@ "node_modules/lodash._basecreate": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash._basecreate/-/lodash._basecreate-2.4.1.tgz", - "integrity": "sha1-+Ob1tXip405UEXm1a47uv0oofgg=", + "integrity": "sha512-8JJ3FnMPm54t3BwPLk8q8mPyQKQXm/rt9df+awr4NGtyJrtcCXM3Of1I86S6jVy1b4yAyFBb8wbKPEauuqzRmQ==", + "license": "MIT", "dependencies": { "lodash._isnative": "~2.4.1", "lodash.isobject": "~2.4.1", @@ -28171,7 +28429,8 @@ "node_modules/lodash._basecreatecallback": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash._basecreatecallback/-/lodash._basecreatecallback-2.4.1.tgz", - "integrity": "sha1-fQsmdknLKeehOdAQO3wR+uhOSFE=", + "integrity": "sha512-SLczhg860fGW7AKlYcuOFstDtJuQhaANlJ4Y/jrOoRxhmVtK41vbJDH3OefVRSRkSCQo4HI82QVkAVsoGa5gSw==", + "license": "MIT", "dependencies": { "lodash._setbinddata": "~2.4.1", "lodash.bind": "~2.4.1", @@ -28182,7 +28441,8 @@ "node_modules/lodash._basecreatewrapper": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash._basecreatewrapper/-/lodash._basecreatewrapper-2.4.1.tgz", - "integrity": "sha1-TTHy595+E0+/KAN2K4FQsyUZZm8=", + "integrity": "sha512-x2ja1fa/qmzbizuXgVM4QAP9svtMbdxjG8Anl9bCeDAwLOVQ1vLrA0hLb/NkpbGi9evjtkl0aWLTEoOlUdBPQA==", + "license": "MIT", "dependencies": { "lodash._basecreate": "~2.4.1", "lodash._setbinddata": "~2.4.1", @@ -28193,7 +28453,8 @@ "node_modules/lodash._createwrapper": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash._createwrapper/-/lodash._createwrapper-2.4.1.tgz", - "integrity": "sha1-UdaVeXPaTtVW43KQ2MGhjFPeFgc=", + "integrity": "sha512-5TCfLt1haQpsa7bgLYRKNNE4yqhO4ZxIayN1btQmazMchO6Q8JYFRMqbJ3W+uNmMm4R0Jw7KGkZX5YfDDnywuw==", + "license": "MIT", "dependencies": { "lodash._basebind": "~2.4.1", "lodash._basecreatewrapper": "~2.4.1", @@ -28204,7 +28465,8 @@ "node_modules/lodash._getarray": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash._getarray/-/lodash._getarray-2.4.1.tgz", - "integrity": "sha1-+vH3+BD6mFolHCGHQESBCUg55e4=", + "integrity": "sha512-iIrScwY3atGvLVbQL/+CNUznaPwBJg78S/JO4cTUFXRkRsZgEBhscB27cVoT4tsIOUyFu/5M/0umfHNGJ6wYwg==", + "license": "MIT", "dependencies": { "lodash._arraypool": "~2.4.1" } @@ -28212,22 +28474,26 @@ "node_modules/lodash._isnative": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash._isnative/-/lodash._isnative-2.4.1.tgz", - "integrity": "sha1-PqZAS3hKe+g2x7V1gOHN95sUgyw=" + "integrity": "sha512-BOlKGKNHhCHswGOWtmVb5zBygyxN7EmTuzVOSQI6QSoGhG+kvv71gICFS1TBpnqvT1n53txK8CDK3u5D2/GZxQ==", + "license": "MIT" }, "node_modules/lodash._maxpoolsize": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash._maxpoolsize/-/lodash._maxpoolsize-2.4.1.tgz", - "integrity": "sha1-nUgvRjuOZq++WcLBTtsRcGAXIzQ=" + "integrity": "sha512-xKDem1BxoIfcCtaJHotjtyfdIvZO9qrF+mv3G1+ngQmaI3MJt3Qm46i9HLk/CbzABbavUrr1/EomQT8KxtsrYA==", + "license": "MIT" }, "node_modules/lodash._objecttypes": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash._objecttypes/-/lodash._objecttypes-2.4.1.tgz", - "integrity": "sha1-fAt/admKH3ZSn4kLDNsbTf7BHBE=" + "integrity": "sha512-XpqGh1e7hhkOzftBfWE7zt+Yn9mVHFkDhicVttvKLsoCMLVVL+xTQjfjB4X4vtznauxv0QZ5ZAeqjvat0dh62Q==", + "license": "MIT" }, "node_modules/lodash._releasearray": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash._releasearray/-/lodash._releasearray-2.4.1.tgz", - "integrity": "sha1-phOWMNdtFTawfdyAliiJsIL2pkE=", + "integrity": "sha512-wwCwWX8PK/mYR5VZjcU5JFl6py/qrfLGMxzpKOfSqgA1PaZ6Z625CZLCxH1KsqyxSkOFmNm+mEYjeDpXlM4hrg==", + "license": "MIT", "dependencies": { "lodash._arraypool": "~2.4.1", "lodash._maxpoolsize": "~2.4.1" @@ -28236,7 +28502,8 @@ "node_modules/lodash._setbinddata": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash._setbinddata/-/lodash._setbinddata-2.4.1.tgz", - "integrity": "sha1-98IAzRuS7yNrOZ7s9zxkjReqlNI=", + "integrity": "sha512-Vx0XKzpg2DFbQw4wrp1xSWd2sfl3W/BG6bucSRZmftS1AzbWRemCmBQDxyQTNhlLNec428PXkuuja+VNBZgu2A==", + "license": "MIT", "dependencies": { "lodash._isnative": "~2.4.1", "lodash.noop": "~2.4.1" @@ -28245,7 +28512,8 @@ "node_modules/lodash._shimkeys": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash._shimkeys/-/lodash._shimkeys-2.4.1.tgz", - "integrity": "sha1-bpzJZm/wgfC1psl4uD4kLmlJ0gM=", + "integrity": "sha512-lBrglYxLD/6KAJ8IEa5Lg+YHgNAL7FyKqXg4XOUI+Du/vtniLs1ZqS+yHNKPkK54waAgkdUnDOYaWf+rv4B+AA==", + "license": "MIT", "dependencies": { "lodash._objecttypes": "~2.4.1" } @@ -28253,12 +28521,14 @@ "node_modules/lodash._slice": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash._slice/-/lodash._slice-2.4.1.tgz", - "integrity": "sha1-dFz0GlNZexj2iImFREBe+isG2Q8=" + "integrity": "sha512-+odPJa4PE2UgYnQgJgkLs0UD03QU78R2ivhrFnG9GdtYOZdE6ObxOj7KiUEUlqOOgatFT+ZqSypFjDSduTigKg==", + "license": "MIT" }, "node_modules/lodash.assign": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash.assign/-/lodash.assign-2.4.1.tgz", - "integrity": "sha1-hMOVlt1xGBqXsGUpE6fJZ15Jsao=", + "integrity": "sha512-AqQ4AJz5buSx9ELXWt5dONwJyVPd4NTADMKhoVYWCugjoVf172/LpvVhwmSJn4g8/Dc0S8hxTe8rt5Dob3X9KQ==", + "license": "MIT", "dependencies": { "lodash._basecreatecallback": "~2.4.1", "lodash._objecttypes": "~2.4.1", @@ -28268,7 +28538,8 @@ "node_modules/lodash.bind": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash.bind/-/lodash.bind-2.4.1.tgz", - "integrity": "sha1-XRn6AFyMTSNvr0dCx7eh/Kvikmc=", + "integrity": "sha512-hn2VWYZ+N9aYncRad4jORvlGgpFrn+axnPIWRvFxjk6CWcZH5b5alI8EymYsHITI23Z9wrW/+ORq+azrVFpOfw==", + "license": "MIT", "dependencies": { "lodash._createwrapper": "~2.4.1", "lodash._slice": "~2.4.1" @@ -28282,7 +28553,8 @@ "node_modules/lodash.clonedeep": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash.clonedeep/-/lodash.clonedeep-2.4.1.tgz", - "integrity": "sha1-8pIDtAsS/uCkXTYxZIJZvrq8eGg=", + "integrity": "sha512-zj5vReFLkR+lJOBKP1wyteZ13zut/KSmXtdCBgxcy/m4UTitcBxpeVZT7gwk8BQrztPI5dIgO4bhBppXV4rpTQ==", + "license": "MIT", "dependencies": { "lodash._baseclone": "~2.4.1", "lodash._basecreatecallback": "~2.4.1" @@ -28312,7 +28584,8 @@ "node_modules/lodash.foreach": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash.foreach/-/lodash.foreach-2.4.1.tgz", - "integrity": "sha1-/j/Do0yGyUyrb5UiVgKCdB4BYwk=", + "integrity": "sha512-AvOobAkE7qBtIiHU5QHQIfveWH5Usr9pIcFIzBv7u4S6bvb3FWpFrh9ltqBY7UeL5lw6e8d+SggiUXQVyh+FpA==", + "license": "MIT", "dependencies": { "lodash._basecreatecallback": "~2.4.1", "lodash.forown": "~2.4.1" @@ -28321,7 +28594,8 @@ "node_modules/lodash.forown": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash.forown/-/lodash.forown-2.4.1.tgz", - "integrity": "sha1-eLQer+FAX6lmRZ6kGT/VAtCEUks=", + "integrity": "sha512-VC+CKm/zSs5t3i/MHv71HZoQphuqOvez1xhjWBwHU5zAbsCYrqwHr+MyQyMk14HzA3hSRNA5lCqDMSw5G2Qscg==", + "license": "MIT", "dependencies": { "lodash._basecreatecallback": "~2.4.1", "lodash._objecttypes": "~2.4.1", @@ -28342,7 +28616,8 @@ "node_modules/lodash.identity": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash.identity/-/lodash.identity-2.4.1.tgz", - "integrity": "sha1-ZpTP+mX++TH3wxzobHRZfPVg9PE=" + "integrity": "sha512-VRYX+8XipeLjorag5bz3YBBRJ+5kj8hVBzfnaHgXPZAVTYowBdY5l0M5ZnOmlAMCOXBFabQtm7f5VqjMKEji0w==", + "license": "MIT" }, "node_modules/lodash.includes": { "version": "4.3.0", @@ -28357,7 +28632,8 @@ "node_modules/lodash.isarray": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash.isarray/-/lodash.isarray-2.4.1.tgz", - "integrity": "sha1-tSoybB9i9tfac6MdVAHfbvRPD6E=", + "integrity": "sha512-yRDd0z+APziDqbk0MqR6Qfwj/Qn3jLxFJbI9U8MuvdTnqIXdZ5YXyGLnwuzCpZmjr26F1GNOjKLMMZ10i/wy6A==", + "license": "MIT", "dependencies": { "lodash._isnative": "~2.4.1" } @@ -28370,12 +28646,15 @@ "node_modules/lodash.isequal": { "version": "4.5.0", "resolved": "https://registry.npmjs.org/lodash.isequal/-/lodash.isequal-4.5.0.tgz", - "integrity": "sha1-QVxEePK8wwEgwizhDtMib30+GOA=" + "integrity": "sha512-pDo3lu8Jhfjqls6GkMgpahsF9kCyayhgykjyLMNFTKWrpVdAQtYyB4muAMWozBB4ig/dtWAmsMxLEI8wuz+DYQ==", + "deprecated": "This package is deprecated. Use require('node:util').isDeepStrictEqual instead.", + "license": "MIT" }, "node_modules/lodash.isfunction": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash.isfunction/-/lodash.isfunction-2.4.1.tgz", - "integrity": "sha1-LP1XXHPkmKtX4xm3f6Aq3vE6lNE=" + "integrity": "sha512-6XcAB3izeQxPOQQNAJbbdjXbvWEt2Pn9ezPrjr4CwoLwmqsLVbsiEXD19cmmt4mbzOCOCdHzOQiUivUOJLra7w==", + "license": "MIT" }, "node_modules/lodash.isinteger": { "version": "4.0.4", @@ -28390,7 +28669,8 @@ "node_modules/lodash.isobject": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash.isobject/-/lodash.isobject-2.4.1.tgz", - "integrity": "sha1-Wi5H/mmVPx7mMafrof5k0tBlWPU=", + "integrity": "sha512-sTebg2a1PoicYEZXD5PBdQcTlIJ6hUslrlWr7iV0O7n+i4596s2NQ9I5CaZ5FbXSfya/9WQsrYLANUJv9paYVA==", + "license": "MIT", "dependencies": { "lodash._objecttypes": "~2.4.1" } @@ -28408,7 +28688,8 @@ "node_modules/lodash.keys": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash.keys/-/lodash.keys-2.4.1.tgz", - "integrity": "sha1-SN6kbfj/djKxDXBrissmWR4rNyc=", + "integrity": "sha512-ZpJhwvUXHSNL5wYd1RM6CUa2ZuqorG9ngoJ9Ix5Cce+uX7I5O/E06FCJdhSZ33b5dVyeQDnIlWH7B2s5uByZ7g==", + "license": "MIT", "dependencies": { "lodash._isnative": "~2.4.1", "lodash._shimkeys": "~2.4.1", @@ -28429,7 +28710,8 @@ "node_modules/lodash.noop": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash.noop/-/lodash.noop-2.4.1.tgz", - "integrity": "sha1-T7VPgWZS5a4Q6PcvcXo4jHMmU4o=" + "integrity": "sha512-uNcV98/blRhInPUGQEnj9ekXXfG+q+rfoNSFZgl/eBfog9yBDW9gfUv2AHX/rAF7zZRlzWhbslGhbGQFZlCkZA==", + "license": "MIT" }, "node_modules/lodash.once": { "version": "4.1.1", @@ -28445,7 +28727,8 @@ "node_modules/lodash.support": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash.support/-/lodash.support-2.4.1.tgz", - "integrity": "sha1-Mg4LZwMWc8KNeiu12eAzGkUkBRU=", + "integrity": "sha512-6SwqWwGFHhTXEiqB/yQgu8FYd//tm786d49y7kizHVCJH7zdzs191UQn3ES3tkkDbUddNRfkCRYqJFHtbLnbCw==", + "license": "MIT", "dependencies": { "lodash._isnative": "~2.4.1" } @@ -28835,6 +29118,15 @@ "dev": true, "license": "ISC" }, + "node_modules/math-intrinsics": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", + "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, "node_modules/mathjax": { "version": "3.2.2", "resolved": "https://registry.npmjs.org/mathjax/-/mathjax-3.2.2.tgz", @@ -29448,7 +29740,6 @@ "version": "2.6.0", "resolved": "https://registry.npmjs.org/mime/-/mime-2.6.0.tgz", "integrity": "sha512-USPkMeET31rOMiarsBNIHZKLGgvKc/LrjofAnBlOttf5ajRvqiRA8QsenbcooctK6d6Ts6aqZXBA+XbkKthiQg==", - "dev": true, "bin": { "mime": "cli.js" }, @@ -29457,9 +29748,10 @@ } }, "node_modules/mime-db": { - "version": "1.51.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.51.0.tgz", - "integrity": "sha512-5y8A56jg7XVQx2mbv1lu49NR4dokRnhZYTtL+KGfaa27uq4pSTXkwQkFJl4pkRMyNFz/EtYDSkiiEHx3F7UN6g==", + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "license": "MIT", "engines": { "node": ">= 0.6" } @@ -29475,11 +29767,12 @@ } }, "node_modules/mime-types": { - "version": "2.1.34", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.34.tgz", - "integrity": "sha512-6cP692WwGIs9XXdOO4++N+7qjqv0rqxxVvJ3VHPh/Sc9mVZcQP+ZGhkKiTvWMQRr2tbHkJP/Yn7Y0npb3ZBs4A==", + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "license": "MIT", "dependencies": { - "mime-db": "1.51.0" + "mime-db": "1.52.0" }, "engines": { "node": ">= 0.6" @@ -29716,7 +30009,8 @@ "node_modules/mkdirp-classic": { "version": "0.5.3", "resolved": "https://registry.npmjs.org/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz", - "integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==" + "integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==", + "license": "MIT" }, "node_modules/mlly": { "version": "1.7.4", @@ -30181,18 +30475,18 @@ } }, "node_modules/multer": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/multer/-/multer-2.0.0.tgz", - "integrity": "sha512-bS8rPZurbAuHGAnApbM9d4h1wSoYqrOqkE+6a64KLMK9yWU7gJXBDDVklKQ3TPi9DRb85cRs6yXaC0+cjxRtRg==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/multer/-/multer-2.0.1.tgz", + "integrity": "sha512-Ug8bXeTIUlxurg8xLTEskKShvcKDZALo1THEX5E41pYCD2sCVub5/kIRIGqWNoqV6szyLyQKV6mD4QUrWE5GCQ==", "license": "MIT", "dependencies": { "append-field": "^1.0.0", - "busboy": "^1.0.0", - "concat-stream": "^1.5.2", - "mkdirp": "^0.5.4", + "busboy": "^1.6.0", + "concat-stream": "^2.0.0", + "mkdirp": "^0.5.6", "object-assign": "^4.1.1", - "type-is": "^1.6.4", - "xtend": "^4.0.0" + "type-is": "^1.6.18", + "xtend": "^4.0.2" }, "engines": { "node": ">= 10.16.0" @@ -30322,7 +30616,8 @@ "node_modules/native-promise-only": { "version": "0.8.1", "resolved": "https://registry.npmjs.org/native-promise-only/-/native-promise-only-0.8.1.tgz", - "integrity": "sha1-IKMYwwy0X3H+et+/eyHJnBRy7xE=" + "integrity": "sha512-zkVhZUA3y8mbz652WrL5x0fB0ehrBkulWT3TomAQ9iDtyXZvzKeEA6GPxAItBYeNYl5yngKRX612qHOhvMkDeg==", + "license": "MIT" }, "node_modules/native-request": { "version": "1.1.0", @@ -30876,9 +31171,13 @@ } }, "node_modules/object-inspect": { - "version": "1.13.1", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.1.tgz", - "integrity": "sha512-5qoj1RUiKOMsCCNLV1CBiPYE10sziTsnmNxkAI/rZhiD63CF7IqdFGC/XzjWjpSgLf0LxXX3bDFIh0E18f6UhQ==", + "version": "1.13.4", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz", + "integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, "funding": { "url": "https://github.com/sponsors/ljharb" } @@ -30920,14 +31219,16 @@ } }, "node_modules/object.assign": { - "version": "4.1.5", - "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.5.tgz", - "integrity": "sha512-byy+U7gp+FVwmyzKPYhW2h5l3crpmGsxl7X2s8y43IgxvG4g3QZ6CffDtsNQy1WsmZpQbO+ybo0AlW7TY6DcBQ==", + "version": "4.1.7", + "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.7.tgz", + "integrity": "sha512-nK28WOo+QIjBkDduTINE4JkF/UJJKyf2EJxvJKfblDpyg0Q+pkOHNTL0Qwy6NP6FhE/EnzV73BxxqcJaXY9anw==", "license": "MIT", "dependencies": { - "call-bind": "^1.0.5", + "call-bind": "^1.0.8", + "call-bound": "^1.0.3", "define-properties": "^1.2.1", - "has-symbols": "^1.0.3", + "es-object-atoms": "^1.0.0", + "has-symbols": "^1.1.0", "object-keys": "^1.1.1" }, "engines": { @@ -31178,6 +31479,23 @@ "resolved": "libraries/overleaf-editor-core", "link": true }, + "node_modules/own-keys": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/own-keys/-/own-keys-1.0.1.tgz", + "integrity": "sha512-qFOyK5PjiWZd+QQIh+1jhdb9LpxTF0qs7Pm8o5QHYZ0M3vKqSqzsZaEB6oWlxZ+q2sJBMI/Ktgd2N5ZwQoRHfg==", + "license": "MIT", + "dependencies": { + "get-intrinsic": "^1.2.6", + "object-keys": "^1.1.1", + "safe-push-apply": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/p-event": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/p-event/-/p-event-4.2.0.tgz", @@ -31737,12 +32055,80 @@ } }, "node_modules/path-loader": { - "version": "1.0.10", - "resolved": "https://registry.npmjs.org/path-loader/-/path-loader-1.0.10.tgz", - "integrity": "sha512-CMP0v6S6z8PHeJ6NFVyVJm6WyJjIwFvyz2b0n2/4bKdS/0uZa/9sKUlYZzubrn3zuDRU0zIuEDX9DZYQ2ZI8TA==", + "version": "1.0.12", + "resolved": "https://registry.npmjs.org/path-loader/-/path-loader-1.0.12.tgz", + "integrity": "sha512-n7oDG8B+k/p818uweWrOixY9/Dsr89o2TkCm6tOTex3fpdo2+BFDgR+KpB37mGKBRsBAlR8CIJMFN0OEy/7hIQ==", + "license": "MIT", "dependencies": { "native-promise-only": "^0.8.1", - "superagent": "^3.8.3" + "superagent": "^7.1.6" + } + }, + "node_modules/path-loader/node_modules/debug": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz", + "integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==", + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/path-loader/node_modules/formidable": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/formidable/-/formidable-2.1.5.tgz", + "integrity": "sha512-Oz5Hwvwak/DCaXVVUtPn4oLMLLy1CdclLKO1LFgU7XzDpVMUU5UjlSLpGMocyQNNk8F6IJW9M/YdooSn2MRI+Q==", + "license": "MIT", + "dependencies": { + "@paralleldrive/cuid2": "^2.2.2", + "dezalgo": "^1.0.4", + "once": "^1.4.0", + "qs": "^6.11.0" + }, + "funding": { + "url": "https://ko-fi.com/tunnckoCore/commissions" + } + }, + "node_modules/path-loader/node_modules/semver": { + "version": "7.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", + "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/path-loader/node_modules/superagent": { + "version": "7.1.6", + "resolved": "https://registry.npmjs.org/superagent/-/superagent-7.1.6.tgz", + "integrity": "sha512-gZkVCQR1gy/oUXr+kxJMLDjla434KmSOKbx5iGD30Ql+AkJQ/YlPKECJy2nhqOsHLjGHzoDTXNSjhnvWhzKk7g==", + "deprecated": "Please upgrade to v9.0.0+ as we have fixed a public vulnerability with formidable dependency. Note that v9.0.0+ requires Node.js v14.18.0+. See https://github.com/ladjs/superagent/pull/1800 for insight. This project is supported and maintained by the team at Forward Email @ https://forwardemail.net", + "license": "MIT", + "dependencies": { + "component-emitter": "^1.3.0", + "cookiejar": "^2.1.3", + "debug": "^4.3.4", + "fast-safe-stringify": "^2.1.1", + "form-data": "^4.0.0", + "formidable": "^2.0.1", + "methods": "^1.1.2", + "mime": "2.6.0", + "qs": "^6.10.3", + "readable-stream": "^3.6.0", + "semver": "^7.3.7" + }, + "engines": { + "node": ">=6.4.0 <13 || >=14" } }, "node_modules/path-parse": { @@ -35079,6 +35465,28 @@ "node": ">=4.0.0" } }, + "node_modules/reflect.getprototypeof": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/reflect.getprototypeof/-/reflect.getprototypeof-1.0.10.tgz", + "integrity": "sha512-00o4I+DVrefhv+nX0ulyi3biSHCPDe+yLv5o/p6d/UVlirijB8E16FtfwSAi4g3tcqrQ4lRAqQSoFEZJehYEcw==", + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.9", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "get-intrinsic": "^1.2.7", + "get-proto": "^1.0.1", + "which-builtin-type": "^1.2.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/regenerate": { "version": "1.4.2", "resolved": "https://registry.npmjs.org/regenerate/-/regenerate-1.4.2.tgz", @@ -35143,15 +35551,17 @@ } }, "node_modules/regexp.prototype.flags": { - "version": "1.5.2", - "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.2.tgz", - "integrity": "sha512-NcDiDkTLuPR+++OCKB0nWafEmhg/Da8aUPLPMQbK+bxKKCm1/S5he+AqYa4PlMCVBalb4/yxIRub6qkEx5yJbw==", + "version": "1.5.4", + "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.4.tgz", + "integrity": "sha512-dYqgNSZbDwkaJ2ceRd9ojCGjBq+mOm9LmtXnAnEGyHhN/5R7iDW2TRw3h+o/jCFxus3P2LfWIIiwowAjANm7IA==", "license": "MIT", "dependencies": { - "call-bind": "^1.0.6", + "call-bind": "^1.0.8", "define-properties": "^1.2.1", "es-errors": "^1.3.0", - "set-function-name": "^2.0.1" + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "set-function-name": "^2.0.2" }, "engines": { "node": ">= 0.4" @@ -35666,14 +36076,15 @@ } }, "node_modules/safe-array-concat": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/safe-array-concat/-/safe-array-concat-1.1.2.tgz", - "integrity": "sha512-vj6RsCsWBCf19jIeHEfkRMw8DPiBb+DMXklQ/1SGDHOMlHdPUkZXFQ2YdplS23zESTijAcurb1aSgJA3AgMu1Q==", + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/safe-array-concat/-/safe-array-concat-1.1.3.tgz", + "integrity": "sha512-AURm5f0jYEOydBj7VQlVvDrjeFgthDdEF5H1dP+6mNpoXOMo1quQqJ4wvJDyRZ9+pO3kGWoOdmV08cSv2aJV6Q==", "license": "MIT", "dependencies": { - "call-bind": "^1.0.7", - "get-intrinsic": "^1.2.4", - "has-symbols": "^1.0.3", + "call-bind": "^1.0.8", + "call-bound": "^1.0.2", + "get-intrinsic": "^1.2.6", + "has-symbols": "^1.1.0", "isarray": "^2.0.5" }, "engines": { @@ -35694,6 +36105,22 @@ "integrity": "sha512-gH8eh2nZudPQO6TytOvbxnuhYBOvDBBLW52tz5q6X58lJcd/tkmqFR+5Z9adS8aJtURSXWThWy/xJtJwixErvg==", "optional": true }, + "node_modules/safe-push-apply": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/safe-push-apply/-/safe-push-apply-1.0.0.tgz", + "integrity": "sha512-iKE9w/Z7xCzUMIZqdBsp6pEQvwuEebH4vdpjcDWnyzaI6yl6O9FHvVpmGelvEHNsoY6wGblkxR6Zty/h00WiSA==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "isarray": "^2.0.5" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/safe-regex": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/safe-regex/-/safe-regex-1.1.0.tgz", @@ -35704,14 +36131,14 @@ } }, "node_modules/safe-regex-test": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.0.3.tgz", - "integrity": "sha512-CdASjNJPvRa7roO6Ra/gLYBTzYzzPyyBXxIMdGW3USQLyjWEls2RgW5UBTXaQVp+OrpeCK3bLem8smtmheoRuw==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.1.0.tgz", + "integrity": "sha512-x/+Cz4YrimQxQccJf5mKEbIa1NzeCRNI5Ecl/ekmlYaampdNLPalVyIcCZNNH3MvmqBugV5TMYZXv0ljslUlaw==", "license": "MIT", "dependencies": { - "call-bind": "^1.0.6", + "call-bound": "^1.0.2", "es-errors": "^1.3.0", - "is-regex": "^1.1.4" + "is-regex": "^1.2.1" }, "engines": { "node": ">= 0.4" @@ -36429,13 +36856,29 @@ } }, "node_modules/set-function-name": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.1.tgz", - "integrity": "sha512-tMNCiqYVkXIZgc2Hnoy2IvC/f8ezc5koaRFkCjrpWzGpCd3qbZXPzVy9MAZzK1ch/X0jvSkojys3oqJN0qCmdA==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.2.tgz", + "integrity": "sha512-7PGFlmtwsEADb0WYyvCMa1t+yke6daIG4Wirafur5kcf+MhUnPms1UeR0CKQdTZD81yESwMHbtn+TR+dMviakQ==", + "license": "MIT", "dependencies": { - "define-data-property": "^1.0.1", + "define-data-property": "^1.1.4", + "es-errors": "^1.3.0", "functions-have-names": "^1.2.3", - "has-property-descriptors": "^1.0.0" + "has-property-descriptors": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/set-proto": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/set-proto/-/set-proto-1.0.0.tgz", + "integrity": "sha512-RJRdvCo6IAnPdsvP/7m6bsQqNnn1FCBX5ZNtFL98MmFF/4xAIJTIg1YbHW5DC2W5SKZanrC6i4HsJqlajw/dZw==", + "license": "MIT", + "dependencies": { + "dunder-proto": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0" }, "engines": { "node": ">= 0.4" @@ -36546,14 +36989,69 @@ "integrity": "sha512-sQTKC1Re/rM6XyFM6fIAGHRPVGvyXfgzIDvzoq608vM+jeyVD0Tu1E6Np0Kc2zAIFWIj963V2800iF/9LPieQw==" }, "node_modules/side-channel": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.6.tgz", - "integrity": "sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz", + "integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==", + "license": "MIT", "dependencies": { - "call-bind": "^1.0.7", "es-errors": "^1.3.0", - "get-intrinsic": "^1.2.4", - "object-inspect": "^1.13.1" + "object-inspect": "^1.13.3", + "side-channel-list": "^1.0.0", + "side-channel-map": "^1.0.1", + "side-channel-weakmap": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-list": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/side-channel-list/-/side-channel-list-1.0.0.tgz", + "integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-map": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/side-channel-map/-/side-channel-map-1.0.1.tgz", + "integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==", + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-weakmap": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz", + "integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==", + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3", + "side-channel-map": "^1.0.1" }, "engines": { "node": ">= 0.4" @@ -37132,7 +37630,8 @@ "node_modules/spark-md5": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/spark-md5/-/spark-md5-3.0.2.tgz", - "integrity": "sha512-wcFzz9cDfbuqe0FZzfi2or1sgyIrsDwmPwfZC4hiNidPdPINjeUwNfv5kldczoEAcjl9Y1L3SM7Uz2PUEQzxQw==" + "integrity": "sha512-wcFzz9cDfbuqe0FZzfi2or1sgyIrsDwmPwfZC4hiNidPdPINjeUwNfv5kldczoEAcjl9Y1L3SM7Uz2PUEQzxQw==", + "license": "(WTFPL OR MIT)" }, "node_modules/sparse-bitfield": { "version": "3.0.3", @@ -37221,7 +37720,8 @@ "node_modules/split-ca": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/split-ca/-/split-ca-1.0.1.tgz", - "integrity": "sha1-bIOv82kvphJW4M0ZfgXp3hV2kaY=" + "integrity": "sha512-Q5thBSxp5t8WPTTJQS59LrGqOZqOsrhDGDVm8azCqIBjSBd7nd9o2PM+mDulQQkh8h//4U6hFZnc/mul8t5pWQ==", + "license": "ISC" }, "node_modules/split-string": { "version": "3.1.0", @@ -37256,6 +37756,23 @@ "es5-ext": "^0.10.53" } }, + "node_modules/ssh2": { + "version": "1.16.0", + "resolved": "https://registry.npmjs.org/ssh2/-/ssh2-1.16.0.tgz", + "integrity": "sha512-r1X4KsBGedJqo7h8F5c4Ybpcr5RjyP+aWIG007uBPRjmdQWfEiVLzSK71Zji1B9sKxwaCvD8y8cwSkYrlLiRRg==", + "hasInstallScript": true, + "dependencies": { + "asn1": "^0.2.6", + "bcrypt-pbkdf": "^1.0.2" + }, + "engines": { + "node": ">=10.16.0" + }, + "optionalDependencies": { + "cpu-features": "~0.0.10", + "nan": "^2.20.0" + } + }, "node_modules/sshpk": { "version": "1.17.0", "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.17.0.tgz", @@ -37370,12 +37887,13 @@ "license": "MIT" }, "node_modules/stop-iteration-iterator": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/stop-iteration-iterator/-/stop-iteration-iterator-1.0.0.tgz", - "integrity": "sha512-iCGQj+0l0HOdZ2AEeBADlsRC+vsnDsZsbdSiH1yNSjcfKM7fdpCMfqAL/dwF5BLiw/XhRft/Wax6zQbhq2BcjQ==", - "dev": true, + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/stop-iteration-iterator/-/stop-iteration-iterator-1.1.0.tgz", + "integrity": "sha512-eLoXW/DHyl62zxY4SCaIgnRhuMr6ri4juEYARS8E6sCEqzKpOiE521Ucofdx+KnDZl5xmvGYaaKCk5FEOxJCoQ==", + "license": "MIT", "dependencies": { - "internal-slot": "^1.0.4" + "es-errors": "^1.3.0", + "internal-slot": "^1.1.0" }, "engines": { "node": ">= 0.4" @@ -37548,15 +38066,18 @@ } }, "node_modules/string.prototype.trim": { - "version": "1.2.9", - "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.9.tgz", - "integrity": "sha512-klHuCNxiMZ8MlsOihJhJEBJAiMVqU3Z2nEXWfWnIqjN0gEFS9J9+IxKozWWtQGcgoa1WUZzLjKPTr4ZHNFTFxw==", + "version": "1.2.10", + "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.10.tgz", + "integrity": "sha512-Rs66F0P/1kedk5lyYyH9uBzuiI/kNRmwJAR9quK6VOtIpZ2G+hMZd+HQbbv25MgCA6gEffoMZYxlTod4WcdrKA==", "license": "MIT", "dependencies": { - "call-bind": "^1.0.7", + "call-bind": "^1.0.8", + "call-bound": "^1.0.2", + "define-data-property": "^1.1.4", "define-properties": "^1.2.1", - "es-abstract": "^1.23.0", - "es-object-atoms": "^1.0.0" + "es-abstract": "^1.23.5", + "es-object-atoms": "^1.0.0", + "has-property-descriptors": "^1.0.2" }, "engines": { "node": ">= 0.4" @@ -37566,15 +38087,19 @@ } }, "node_modules/string.prototype.trimend": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.8.tgz", - "integrity": "sha512-p73uL5VCHCO2BZZ6krwwQE3kCzM7NKmis8S//xEC6fQonchbum4eP6kR4DLEjQFO3Wnj3Fuo8NM0kOSjVdHjZQ==", + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.9.tgz", + "integrity": "sha512-G7Ok5C6E/j4SGfyLCloXTrngQIQU3PWtXGst3yM7Bea9FRURf1S42ZHlZZtsNque2FN2PoUhfZXYLNWwEr4dLQ==", "license": "MIT", "dependencies": { - "call-bind": "^1.0.7", + "call-bind": "^1.0.8", + "call-bound": "^1.0.2", "define-properties": "^1.2.1", "es-object-atoms": "^1.0.0" }, + "engines": { + "node": ">= 0.4" + }, "funding": { "url": "https://github.com/sponsors/ljharb" } @@ -38063,7 +38588,8 @@ "version": "3.8.3", "resolved": "https://registry.npmjs.org/superagent/-/superagent-3.8.3.tgz", "integrity": "sha512-GLQtLMCoEIK4eDv6OGtkOoSMt3D+oq0y3dsxMuYuDvaNUvuT8eFBuLmfR0iYYzHC1e8hpzC6ZsxbuP6DIalMFA==", - "deprecated": "Please upgrade to v7.0.2+ of superagent. We have fixed numerous issues with streams, form-data, attach(), filesystem errors not bubbling up (ENOENT on attach()), and all tests are now passing. See the releases tab for more information at . Thanks to @shadowgate15, @spence-s, and @niftylettuce. Superagent is sponsored by Forward Email at .", + "deprecated": "Please upgrade to v9.0.0+ as we have fixed a public vulnerability with formidable dependency. Note that v9.0.0+ requires Node.js v14.18.0+. See https://github.com/ladjs/superagent/pull/1800 for insight. This project is supported and maintained by the team at Forward Email @ https://forwardemail.net", + "license": "MIT", "dependencies": { "component-emitter": "^1.2.0", "cookiejar": "^2.1.0", @@ -38084,32 +38610,58 @@ "version": "3.2.7", "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "license": "MIT", "dependencies": { "ms": "^2.1.1" } }, "node_modules/superagent/node_modules/form-data": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.1.tgz", - "integrity": "sha512-m21N3WOmEEURgk6B9GLOE4RuWOFf28Lhh9qGYeNlGq4VDXUlJy2th2slBNU8Gp8EzloYZOibZJ7t5ecIrFSjVA==", + "version": "2.5.3", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.3.tgz", + "integrity": "sha512-XHIrMD0NpDrNM/Ckf7XJiBbLl57KEhT3+i3yY+eWm+cqYZJQTZrKo8Y8AWKnuV5GT4scfuUGt9LzNoIx3dU1nQ==", + "license": "MIT", "dependencies": { "asynckit": "^0.4.0", - "combined-stream": "^1.0.6", - "mime-types": "^2.1.12" + "combined-stream": "^1.0.8", + "es-set-tostringtag": "^2.1.0", + "mime-types": "^2.1.35", + "safe-buffer": "^5.2.1" }, "engines": { "node": ">= 0.12" } }, + "node_modules/superagent/node_modules/form-data/node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, "node_modules/superagent/node_modules/isarray": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", - "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=" + "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==", + "license": "MIT" }, "node_modules/superagent/node_modules/mime": { "version": "1.6.0", "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", + "license": "MIT", "bin": { "mime": "cli.js" }, @@ -38118,9 +38670,10 @@ } }, "node_modules/superagent/node_modules/readable-stream": { - "version": "2.3.7", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", - "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", + "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", + "license": "MIT", "dependencies": { "core-util-is": "~1.0.0", "inherits": "~2.0.3", @@ -38135,6 +38688,7 @@ "version": "1.1.1", "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "license": "MIT", "dependencies": { "safe-buffer": "~5.1.0" } @@ -38375,7 +38929,8 @@ "node_modules/swagger-converter": { "version": "0.1.7", "resolved": "https://registry.npmjs.org/swagger-converter/-/swagger-converter-0.1.7.tgz", - "integrity": "sha1-oJdRnG8e5N1n4wjZtT3cnCslf5c=", + "integrity": "sha512-O2hZbWqq8x6j0uZ4qWj5dw45WPoAxKsJLJZqOgTqRtPNi8IqA+rDkDV/48S8qanS3KGv1QcVoPNLivMbyHHdAQ==", + "license": "MIT", "dependencies": { "lodash.clonedeep": "^2.4.1" } @@ -38426,12 +38981,6 @@ "lodash": "^4.17.14" } }, - "node_modules/swagger-tools/node_modules/commander": { - "version": "2.11.0", - "resolved": "https://registry.npmjs.org/commander/-/commander-2.11.0.tgz", - "integrity": "sha512-b0553uYA5YAEGgyYIGYROzKQ7X5RAqedkfjiZxwi0kL1g3bOaBNNZfYkzt/CL0umgD5wc9Jec2FbB98CjkMRvQ==", - "license": "MIT" - }, "node_modules/swagger-tools/node_modules/debug": { "version": "3.2.7", "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", @@ -38530,9 +39079,9 @@ } }, "node_modules/tar-fs": { - "version": "3.0.8", - "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-3.0.8.tgz", - "integrity": "sha512-ZoROL70jptorGAlgAYiLoBLItEKw/fUxg9BSYK/dF/GAGYFJOJJJMvjPAKDJraCXFwadD456FCuvLWgfhMsPwg==", + "version": "3.0.9", + "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-3.0.9.tgz", + "integrity": "sha512-XF4w9Xp+ZQgifKakjZYmFdkLoSWd34VGKcsTCwlNWM7QG3ZbaxnTsaBwnjFZqHRf/rROxaR8rXnbtwdvaDI+lA==", "license": "MIT", "dependencies": { "pump": "^3.0.0", @@ -39356,14 +39905,14 @@ } }, "node_modules/traverse": { - "version": "0.6.9", - "resolved": "https://registry.npmjs.org/traverse/-/traverse-0.6.9.tgz", - "integrity": "sha512-7bBrcF+/LQzSgFmT0X5YclVqQxtv7TDJ1f8Wj7ibBu/U6BMLeOpUxuZjV7rMc44UtKxlnMFigdhFAIszSX1DMg==", + "version": "0.6.11", + "resolved": "https://registry.npmjs.org/traverse/-/traverse-0.6.11.tgz", + "integrity": "sha512-vxXDZg8/+p3gblxB6BhhG5yWVn1kGRlaL8O78UDXc3wRnPizB5g83dcvWV1jpDMIPnjZjOFuxlMmE82XJ4407w==", "license": "MIT", "dependencies": { - "gopd": "^1.0.1", - "typedarray.prototype.slice": "^1.0.3", - "which-typed-array": "^1.1.15" + "gopd": "^1.2.0", + "typedarray.prototype.slice": "^1.0.5", + "which-typed-array": "^1.1.18" }, "engines": { "node": ">= 0.4" @@ -39520,30 +40069,30 @@ } }, "node_modules/typed-array-buffer": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.2.tgz", - "integrity": "sha512-gEymJYKZtKXzzBzM4jqa9w6Q1Jjm7x2d+sh19AdsD4wqnMPDYyvwpsIc2Q/835kHuo3BEQ7CjelGhfTsoBb2MQ==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.3.tgz", + "integrity": "sha512-nAYYwfY3qnzX30IkA6AQZjVbtK6duGontcQm1WSG1MD94YLqK0515GNApXkoxKOWMusVssAHWLh9SeaoefYFGw==", "license": "MIT", "dependencies": { - "call-bind": "^1.0.7", + "call-bound": "^1.0.3", "es-errors": "^1.3.0", - "is-typed-array": "^1.1.13" + "is-typed-array": "^1.1.14" }, "engines": { "node": ">= 0.4" } }, "node_modules/typed-array-byte-length": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/typed-array-byte-length/-/typed-array-byte-length-1.0.1.tgz", - "integrity": "sha512-3iMJ9q0ao7WE9tWcaYKIptkNBuOIcZCCT0d4MRvuuH88fEoEH62IuQe0OtraD3ebQEoTRk8XCBoknUNc1Y67pw==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/typed-array-byte-length/-/typed-array-byte-length-1.0.3.tgz", + "integrity": "sha512-BaXgOuIxz8n8pIq3e7Atg/7s+DpiYrxn4vdot3w9KbnBhcRQq6o3xemQdIfynqSeXeDrF32x+WvfzmOjPiY9lg==", "license": "MIT", "dependencies": { - "call-bind": "^1.0.7", + "call-bind": "^1.0.8", "for-each": "^0.3.3", - "gopd": "^1.0.1", - "has-proto": "^1.0.3", - "is-typed-array": "^1.1.13" + "gopd": "^1.2.0", + "has-proto": "^1.2.0", + "is-typed-array": "^1.1.14" }, "engines": { "node": ">= 0.4" @@ -39553,17 +40102,18 @@ } }, "node_modules/typed-array-byte-offset": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/typed-array-byte-offset/-/typed-array-byte-offset-1.0.2.tgz", - "integrity": "sha512-Ous0vodHa56FviZucS2E63zkgtgrACj7omjwd/8lTEMEPFFyjfixMZ1ZXenpgCFBBt4EC1J2XsyVS2gkG0eTFA==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/typed-array-byte-offset/-/typed-array-byte-offset-1.0.4.tgz", + "integrity": "sha512-bTlAFB/FBYMcuX81gbL4OcpH5PmlFHqlCCpAl8AlEzMz5k53oNDvN8p1PNOWLEmI2x4orp3raOFB51tv9X+MFQ==", "license": "MIT", "dependencies": { "available-typed-arrays": "^1.0.7", - "call-bind": "^1.0.7", + "call-bind": "^1.0.8", "for-each": "^0.3.3", - "gopd": "^1.0.1", - "has-proto": "^1.0.3", - "is-typed-array": "^1.1.13" + "gopd": "^1.2.0", + "has-proto": "^1.2.0", + "is-typed-array": "^1.1.15", + "reflect.getprototypeof": "^1.0.9" }, "engines": { "node": ">= 0.4" @@ -39573,17 +40123,17 @@ } }, "node_modules/typed-array-length": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.6.tgz", - "integrity": "sha512-/OxDN6OtAk5KBpGb28T+HZc2M+ADtvRxXrKKbUwtsLgdoxgX13hyy7ek6bFRl5+aBs2yZzB0c4CnQfAtVypW/g==", + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.7.tgz", + "integrity": "sha512-3KS2b+kL7fsuk/eJZ7EQdnEmQoaho/r6KUef7hxvltNA5DR8NAUM+8wJMbJyZ4G9/7i3v5zPBIMN5aybAh2/Jg==", "license": "MIT", "dependencies": { "call-bind": "^1.0.7", "for-each": "^0.3.3", "gopd": "^1.0.1", - "has-proto": "^1.0.3", "is-typed-array": "^1.1.13", - "possible-typed-array-names": "^1.0.0" + "possible-typed-array-names": "^1.0.0", + "reflect.getprototypeof": "^1.0.6" }, "engines": { "node": ">= 0.4" @@ -39598,17 +40148,19 @@ "integrity": "sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c=" }, "node_modules/typedarray.prototype.slice": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/typedarray.prototype.slice/-/typedarray.prototype.slice-1.0.3.tgz", - "integrity": "sha512-8WbVAQAUlENo1q3c3zZYuy5k9VzBQvp8AX9WOtbvyWlLM1v5JaSRmjubLjzHF4JFtptjH/5c/i95yaElvcjC0A==", + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/typedarray.prototype.slice/-/typedarray.prototype.slice-1.0.5.tgz", + "integrity": "sha512-q7QNVDGTdl702bVFiI5eY4l/HkgCM6at9KhcFbgUAzezHFbOVy4+0O/lCjsABEQwbZPravVfBIiBVGo89yzHFg==", "license": "MIT", "dependencies": { - "call-bind": "^1.0.7", + "call-bind": "^1.0.8", "define-properties": "^1.2.1", - "es-abstract": "^1.23.0", + "es-abstract": "^1.23.9", "es-errors": "^1.3.0", - "typed-array-buffer": "^1.0.2", - "typed-array-byte-offset": "^1.0.2" + "get-proto": "^1.0.1", + "math-intrinsics": "^1.1.0", + "typed-array-buffer": "^1.0.3", + "typed-array-byte-offset": "^1.0.4" }, "engines": { "node": ">= 0.4" @@ -39696,14 +40248,18 @@ } }, "node_modules/unbox-primitive": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.2.tgz", - "integrity": "sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.1.0.tgz", + "integrity": "sha512-nWJ91DjeOkej/TA8pXQ3myruKpKEYgqvpw9lz4OPHj/NWFNluYrjbz9j01CJ8yKQd2g4jFoOkINCTW2I5LEEyw==", + "license": "MIT", "dependencies": { - "call-bind": "^1.0.2", + "call-bound": "^1.0.3", "has-bigints": "^1.0.2", - "has-symbols": "^1.0.3", - "which-boxed-primitive": "^1.0.2" + "has-symbols": "^1.1.0", + "which-boxed-primitive": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -41388,30 +41944,64 @@ } }, "node_modules/which-boxed-primitive": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz", - "integrity": "sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.1.1.tgz", + "integrity": "sha512-TbX3mj8n0odCBFVlY8AxkqcHASw3L60jIuF8jFP78az3C2YhmGvqbHBpAjTRH2/xqYunrJ9g1jSyjCjpoWzIAA==", + "license": "MIT", "dependencies": { - "is-bigint": "^1.0.1", - "is-boolean-object": "^1.1.0", - "is-number-object": "^1.0.4", - "is-string": "^1.0.5", - "is-symbol": "^1.0.3" + "is-bigint": "^1.1.0", + "is-boolean-object": "^1.2.1", + "is-number-object": "^1.1.1", + "is-string": "^1.1.1", + "is-symbol": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/which-builtin-type": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/which-builtin-type/-/which-builtin-type-1.2.1.tgz", + "integrity": "sha512-6iBczoX+kDQ7a3+YJBnh3T+KZRxM/iYNPXicqk66/Qfm1b93iu+yOImkg0zHbj5LNOcNv1TEADiZ0xa34B4q6Q==", + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "function.prototype.name": "^1.1.6", + "has-tostringtag": "^1.0.2", + "is-async-function": "^2.0.0", + "is-date-object": "^1.1.0", + "is-finalizationregistry": "^1.1.0", + "is-generator-function": "^1.0.10", + "is-regex": "^1.2.1", + "is-weakref": "^1.0.2", + "isarray": "^2.0.5", + "which-boxed-primitive": "^1.1.0", + "which-collection": "^1.0.2", + "which-typed-array": "^1.1.16" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/which-collection": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/which-collection/-/which-collection-1.0.1.tgz", - "integrity": "sha512-W8xeTUwaln8i3K/cY1nGXzdnVZlidBcagyNFtBdD5kxnb4TvGKR7FfSIS3mYpwWS1QUCutfKz8IY8RjftB0+1A==", - "dev": true, + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/which-collection/-/which-collection-1.0.2.tgz", + "integrity": "sha512-K4jVyjnBdgvc86Y6BkaLZEN933SwYOuBFkdmBu9ZfkcAbdVbpITnDmjvZ/aQjRXQrv5EPkTnD1s39GiiqbngCw==", + "license": "MIT", "dependencies": { - "is-map": "^2.0.1", - "is-set": "^2.0.1", - "is-weakmap": "^2.0.1", - "is-weakset": "^2.0.1" + "is-map": "^2.0.3", + "is-set": "^2.0.3", + "is-weakmap": "^2.0.2", + "is-weakset": "^2.0.3" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -41424,15 +42014,17 @@ "dev": true }, "node_modules/which-typed-array": { - "version": "1.1.15", - "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.15.tgz", - "integrity": "sha512-oV0jmFtUky6CXfkqehVvBP/LSWJ2sy4vWMioiENyJLePrBO/yKyV9OyJySfAKosh+RYkIl5zJCNZ8/4JncrpdA==", + "version": "1.1.19", + "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.19.tgz", + "integrity": "sha512-rEvr90Bck4WZt9HHFC4DJMsjvu7x+r6bImz0/BrbWb7A2djJ8hnZMrWnHo9F8ssv0OMErasDhftrfROTyqSDrw==", "license": "MIT", "dependencies": { "available-typed-arrays": "^1.0.7", - "call-bind": "^1.0.7", - "for-each": "^0.3.3", - "gopd": "^1.0.1", + "call-bind": "^1.0.8", + "call-bound": "^1.0.4", + "for-each": "^0.3.5", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", "has-tostringtag": "^1.0.2" }, "engines": { @@ -41949,6 +42541,7 @@ "version": "3.25.1", "resolved": "https://registry.npmjs.org/z-schema/-/z-schema-3.25.1.tgz", "integrity": "sha512-7tDlwhrBG+oYFdXNOjILSurpfQyuVgkRe3hB2q8TEssamDHB7BbLWYkYO98nTn0FibfdFroFKDjndbgufAgS/Q==", + "license": "MIT", "dependencies": { "core-js": "^2.5.7", "lodash.get": "^4.0.0", @@ -41962,23 +42555,19 @@ "commander": "^2.7.1" } }, - "node_modules/z-schema/node_modules/commander": { - "version": "2.20.3", - "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", - "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==", - "optional": true - }, "node_modules/z-schema/node_modules/core-js": { "version": "2.6.12", "resolved": "https://registry.npmjs.org/core-js/-/core-js-2.6.12.tgz", "integrity": "sha512-Kb2wC0fvsWfQrgk8HU5lW6U/Lcs8+9aaYcy4ZFc6DDlo4nZ7n70dEgE5rtR0oG6ufKDUnrwfWL1mXR5ljDatrQ==", - "deprecated": "core-js@<3.4 is no longer maintained and not recommended for usage due to the number of issues. Because of the V8 engine whims, feature detection in old core-js versions could cause a slowdown up to 100x even if nothing is polyfilled. Please, upgrade your dependencies to the actual version of core-js.", - "hasInstallScript": true + "deprecated": "core-js@<3.23.3 is no longer maintained and not recommended for usage due to the number of issues. Because of the V8 engine whims, feature detection in old core-js versions could cause a slowdown up to 100x even if nothing is polyfilled. Some versions have web compatibility issues. Please, upgrade your dependencies to the actual version of core-js.", + "hasInstallScript": true, + "license": "MIT" }, "node_modules/z-schema/node_modules/validator": { "version": "10.11.0", "resolved": "https://registry.npmjs.org/validator/-/validator-10.11.0.tgz", "integrity": "sha512-X/p3UZerAIsbBfN/IwahhYaBbY68EN/UQBWHtsbXGT5bfrH/p4NQzUCG1kF/rtKaNpnJ7jAu6NGTdSNtyNIXMw==", + "license": "MIT", "engines": { "node": ">= 0.10" } @@ -42102,13 +42691,13 @@ "async": "^3.2.5", "body-parser": "^1.20.3", "bunyan": "^1.8.15", - "dockerode": "^4.0.5", + "dockerode": "^4.0.7", "express": "^4.21.2", "lodash": "^4.17.21", "p-limit": "^3.1.0", "request": "^2.88.2", "send": "^0.19.0", - "tar-fs": "^3.0.4", + "tar-fs": "^3.0.9", "workerpool": "^6.1.5" }, "devDependencies": { @@ -42175,33 +42764,6 @@ "node": ">= 0.6" } }, - "services/clsi/node_modules/@grpc/grpc-js": { - "version": "1.13.2", - "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.13.2.tgz", - "integrity": "sha512-nnR5nmL6lxF8YBqb6gWvEgLdLh/Fn+kvAdX5hUOnt48sNSb0riz/93ASd2E5gvanPA41X6Yp25bIfGRp1SMb2g==", - "license": "Apache-2.0", - "dependencies": { - "@grpc/proto-loader": "^0.7.13", - "@js-sdsl/ordered-map": "^4.4.2" - }, - "engines": { - "node": ">=12.10.0" - } - }, - "services/clsi/node_modules/cpu-features": { - "version": "0.0.10", - "resolved": "https://registry.npmjs.org/cpu-features/-/cpu-features-0.0.10.tgz", - "integrity": "sha512-9IkYqtX3YHPCzoVg1Py+o9057a3i0fp7S530UWokCSaFVTc7CwXPRiOjRjBQQ18ZCNafx78YfnG+HALxtVmOGA==", - "hasInstallScript": true, - "optional": true, - "dependencies": { - "buildcheck": "~0.0.6", - "nan": "^2.19.0" - }, - "engines": { - "node": ">=10.0.0" - } - }, "services/clsi/node_modules/diff": { "version": "4.0.2", "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", @@ -42211,75 +42773,6 @@ "node": ">=0.3.1" } }, - "services/clsi/node_modules/docker-modem": { - "version": "5.0.6", - "resolved": "https://registry.npmjs.org/docker-modem/-/docker-modem-5.0.6.tgz", - "integrity": "sha512-ens7BiayssQz/uAxGzH8zGXCtiV24rRWXdjNha5V4zSOcxmAZsfGVm/PPFbwQdqEkDnhG+SyR9E3zSHUbOKXBQ==", - "license": "Apache-2.0", - "dependencies": { - "debug": "^4.1.1", - "readable-stream": "^3.5.0", - "split-ca": "^1.0.1", - "ssh2": "^1.15.0" - }, - "engines": { - "node": ">= 8.0" - } - }, - "services/clsi/node_modules/dockerode": { - "version": "4.0.5", - "resolved": "https://registry.npmjs.org/dockerode/-/dockerode-4.0.5.tgz", - "integrity": "sha512-ZPmKSr1k1571Mrh7oIBS/j0AqAccoecY2yH420ni5j1KyNMgnoTh4Nu4FWunh0HZIJmRSmSysJjBIpa/zyWUEA==", - "license": "Apache-2.0", - "dependencies": { - "@balena/dockerignore": "^1.0.2", - "@grpc/grpc-js": "^1.11.1", - "@grpc/proto-loader": "^0.7.13", - "docker-modem": "^5.0.6", - "protobufjs": "^7.3.2", - "tar-fs": "~2.1.2", - "uuid": "^10.0.0" - }, - "engines": { - "node": ">= 8.0" - } - }, - "services/clsi/node_modules/dockerode/node_modules/tar-fs": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.2.tgz", - "integrity": "sha512-EsaAXwxmx8UB7FRKqeozqEPop69DXcmYwTQwXvyAPF352HJsPdkVhvTaDPYqfNgruveJIJy3TA2l+2zj8LJIJA==", - "license": "MIT", - "dependencies": { - "chownr": "^1.1.1", - "mkdirp-classic": "^0.5.2", - "pump": "^3.0.0", - "tar-stream": "^2.1.4" - } - }, - "services/clsi/node_modules/protobufjs": { - "version": "7.4.0", - "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-7.4.0.tgz", - "integrity": "sha512-mRUWCc3KUU4w1jU8sGxICXH/gNS94DvI1gxqDvBzhj1JpcsimQkYiOJfwsPUykUI5ZaspFbSgmBLER8IrQ3tqw==", - "hasInstallScript": true, - "license": "BSD-3-Clause", - "dependencies": { - "@protobufjs/aspromise": "^1.1.2", - "@protobufjs/base64": "^1.1.2", - "@protobufjs/codegen": "^2.0.4", - "@protobufjs/eventemitter": "^1.1.0", - "@protobufjs/fetch": "^1.1.0", - "@protobufjs/float": "^1.0.2", - "@protobufjs/inquire": "^1.1.0", - "@protobufjs/path": "^1.1.2", - "@protobufjs/pool": "^1.1.0", - "@protobufjs/utf8": "^1.1.0", - "@types/node": ">=13.7.0", - "long": "^5.0.0" - }, - "engines": { - "node": ">=12.0.0" - } - }, "services/clsi/node_modules/sinon": { "version": "9.0.3", "resolved": "https://registry.npmjs.org/sinon/-/sinon-9.0.3.tgz", @@ -42299,23 +42792,6 @@ "url": "https://opencollective.com/sinon" } }, - "services/clsi/node_modules/ssh2": { - "version": "1.16.0", - "resolved": "https://registry.npmjs.org/ssh2/-/ssh2-1.16.0.tgz", - "integrity": "sha512-r1X4KsBGedJqo7h8F5c4Ybpcr5RjyP+aWIG007uBPRjmdQWfEiVLzSK71Zji1B9sKxwaCvD8y8cwSkYrlLiRRg==", - "hasInstallScript": true, - "dependencies": { - "asn1": "^0.2.6", - "bcrypt-pbkdf": "^1.0.2" - }, - "engines": { - "node": ">=10.16.0" - }, - "optionalDependencies": { - "cpu-features": "~0.0.10", - "nan": "^2.20.0" - } - }, "services/clsi/node_modules/supports-color": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", @@ -42328,19 +42804,6 @@ "node": ">=8" } }, - "services/clsi/node_modules/uuid": { - "version": "10.0.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-10.0.0.tgz", - "integrity": "sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ==", - "funding": [ - "https://github.com/sponsors/broofa", - "https://github.com/sponsors/ctavan" - ], - "license": "MIT", - "bin": { - "uuid": "dist/bin/uuid" - } - }, "services/contacts": { "name": "@overleaf/contacts", "dependencies": { @@ -42408,6 +42871,7 @@ "services/docstore": { "name": "@overleaf/docstore", "dependencies": { + "@overleaf/fetch-utils": "*", "@overleaf/logger": "*", "@overleaf/metrics": "*", "@overleaf/o-error": "*", @@ -42718,6 +43182,7 @@ "license": "Proprietary", "dependencies": { "@google-cloud/secret-manager": "^5.6.0", + "@overleaf/fetch-utils": "*", "@overleaf/logger": "*", "@overleaf/metrics": "*", "@overleaf/mongo-utils": "*", @@ -42747,6 +43212,7 @@ "mongodb": "6.12.0", "overleaf-editor-core": "*", "p-limit": "^6.2.0", + "p-queue": "^8.1.0", "pg": "^8.7.1", "pg-query-stream": "^4.2.4", "swagger-tools": "^0.10.4", @@ -44228,6 +44694,7 @@ "@overleaf/promise-utils": "*", "@overleaf/redis-wrapper": "*", "@overleaf/settings": "*", + "@overleaf/stream-utils": "*", "async": "^3.2.5", "aws-sdk": "^2.650.0", "body-parser": "^1.20.3", @@ -44687,8 +45154,7 @@ "@overleaf/settings": "*", "@phosphor-icons/react": "^2.1.7", "@slack/webhook": "^7.0.2", - "@stripe/react-stripe-js": "^3.1.1", - "@stripe/stripe-js": "^5.6.0", + "@stripe/stripe-js": "^7.3.0", "@xmldom/xmldom": "^0.7.13", "accepts": "^1.3.7", "ajv": "^8.12.0", @@ -44740,7 +45206,7 @@ "moment": "^2.29.4", "mongodb-legacy": "6.1.3", "mongoose": "8.9.5", - "multer": "overleaf/multer#199c5ff05bd375c508f4074498237baead7f5148", + "multer": "overleaf/multer#4dbceda355efc3fc8ac3cf5c66c3778c8a6fdb23", "nocache": "^2.1.0", "node-fetch": "^2.7.0", "nodemailer": "^6.7.0", @@ -45175,6 +45641,15 @@ "lodash": "^4.17.15" } }, + "services/web/node_modules/@stripe/stripe-js": { + "version": "7.3.0", + "resolved": "https://registry.npmjs.org/@stripe/stripe-js/-/stripe-js-7.3.0.tgz", + "integrity": "sha512-xnCyFIEI5SQnQrKkCxVj7nS5fWTZap+zuIGzmmxLMdlmgahFJaihK4zogqE8YyKKTLtrp/EldkEijSgtXsRVDg==", + "license": "MIT", + "engines": { + "node": ">=12.16" + } + }, "services/web/node_modules/@transloadit/prettier-bytes": { "version": "0.0.9", "resolved": "https://registry.npmjs.org/@transloadit/prettier-bytes/-/prettier-bytes-0.0.9.tgz", @@ -46022,18 +46497,18 @@ } }, "services/web/node_modules/multer": { - "version": "2.0.0", - "resolved": "git+ssh://git@github.com/overleaf/multer.git#199c5ff05bd375c508f4074498237baead7f5148", - "integrity": "sha512-S5MlIoOgrDr+a2jLS8z7jQlbzvZ0m30U2tRwdyLrxhnnMUQZYEzkVysEv10Dw41RTpM5bQQDs563Vzl1LLhxhQ==", + "version": "2.0.1", + "resolved": "git+ssh://git@github.com/overleaf/multer.git#4dbceda355efc3fc8ac3cf5c66c3778c8a6fdb23", + "integrity": "sha512-kkvPK48OQibR5vIoTQBbZp1uWVCvT9MrW3Y0mqdhFYJP/HVJujb4eSCEU0yj+hyf0Y+H/BKCmPdM4fJnzqAO4w==", "license": "MIT", "dependencies": { "append-field": "^1.0.0", - "busboy": "^1.0.0", - "concat-stream": "^1.5.2", - "mkdirp": "^0.5.4", + "busboy": "^1.6.0", + "concat-stream": "^2.0.0", + "mkdirp": "^0.5.6", "object-assign": "^4.1.1", - "type-is": "^1.6.4", - "xtend": "^4.0.0" + "type-is": "^1.6.18", + "xtend": "^4.0.2" }, "engines": { "node": ">= 10.16.0" diff --git a/package.json b/package.json index 64fbd258ed..a51bbcd743 100644 --- a/package.json +++ b/package.json @@ -37,7 +37,7 @@ }, "swagger-tools": { "body-parser": "1.20.3", - "multer": "2.0.0", + "multer": "2.0.1", "path-to-regexp": "3.3.0", "qs": "6.13.0" } diff --git a/patches/@node-saml+node-saml+4.0.5.patch b/patches/@node-saml+node-saml+4.0.5.patch new file mode 100644 index 0000000000..81fd700b31 --- /dev/null +++ b/patches/@node-saml+node-saml+4.0.5.patch @@ -0,0 +1,23 @@ +diff --git a/node_modules/@node-saml/node-saml/lib/saml.js b/node_modules/@node-saml/node-saml/lib/saml.js +index fba15b9..a5778cb 100644 +--- a/node_modules/@node-saml/node-saml/lib/saml.js ++++ b/node_modules/@node-saml/node-saml/lib/saml.js +@@ -336,7 +336,8 @@ class SAML { + const requestOrResponse = request || response; + (0, utility_1.assertRequired)(requestOrResponse, "either request or response is required"); + let buffer; +- if (this.options.skipRequestCompression) { ++ // logout requestOrResponse must be compressed anyway ++ if (this.options.skipRequestCompression && operation !== "logout") { + buffer = Buffer.from(requestOrResponse, "utf8"); + } + else { +@@ -495,7 +496,7 @@ class SAML { + try { + xml = Buffer.from(container.SAMLResponse, "base64").toString("utf8"); + doc = await (0, xml_1.parseDomFromString)(xml); +- const inResponseToNodes = xml_1.xpath.selectAttributes(doc, "/*[local-name()='Response']/@InResponseTo"); ++ const inResponseToNodes = xml_1.xpath.selectAttributes(doc, "/*[local-name()='Response' or local-name()='LogoutResponse']/@InResponseTo"); + if (inResponseToNodes) { + inResponseTo = inResponseToNodes.length ? inResponseToNodes[0].nodeValue : null; + await this.validateInResponseTo(inResponseTo); diff --git a/patches/ldapauth-fork+4.3.3.patch b/patches/ldapauth-fork+4.3.3.patch new file mode 100644 index 0000000000..4d31210c9d --- /dev/null +++ b/patches/ldapauth-fork+4.3.3.patch @@ -0,0 +1,64 @@ +diff --git a/node_modules/ldapauth-fork/lib/ldapauth.js b/node_modules/ldapauth-fork/lib/ldapauth.js +index 85ecf36a8b..a7d07e0f78 100644 +--- a/node_modules/ldapauth-fork/lib/ldapauth.js ++++ b/node_modules/ldapauth-fork/lib/ldapauth.js +@@ -69,6 +69,7 @@ function LdapAuth(opts) { + this.opts.bindProperty || (this.opts.bindProperty = 'dn'); + this.opts.groupSearchScope || (this.opts.groupSearchScope = 'sub'); + this.opts.groupDnProperty || (this.opts.groupDnProperty = 'dn'); ++ this.opts.tlsStarted = false; + + EventEmitter.call(this); + +@@ -108,21 +109,7 @@ function LdapAuth(opts) { + this._userClient.on('error', this._handleError.bind(this)); + + var self = this; +- if (this.opts.starttls) { +- // When starttls is enabled, this callback supplants the 'connect' callback +- this._adminClient.starttls(this.opts.tlsOptions, this._adminClient.controls, function(err) { +- if (err) { +- self._handleError(err); +- } else { +- self._onConnectAdmin(); +- } +- }); +- this._userClient.starttls(this.opts.tlsOptions, this._userClient.controls, function(err) { +- if (err) { +- self._handleError(err); +- } +- }); +- } else if (opts.reconnect) { ++ if (opts.reconnect && !this.opts.starttls) { + this.once('_installReconnectListener', function() { + self.log && self.log.trace('install reconnect listener'); + self._adminClient.on('connect', function() { +@@ -384,6 +371,28 @@ LdapAuth.prototype._findGroups = function(user, callback) { + */ + LdapAuth.prototype.authenticate = function(username, password, callback) { + var self = this; ++ if (this.opts.starttls && !this.opts.tlsStarted) { ++ // When starttls is enabled, this callback supplants the 'connect' callback ++ this._adminClient.starttls(this.opts.tlsOptions, this._adminClient.controls, function (err) { ++ if (err) { ++ self._handleError(err); ++ } else { ++ self._onConnectAdmin(function(){self._handleAuthenticate(username, password, callback);}); ++ } ++ }); ++ this._userClient.starttls(this.opts.tlsOptions, this._userClient.controls, function (err) { ++ if (err) { ++ self._handleError(err); ++ } ++ }); ++ } else { ++ self._handleAuthenticate(username, password, callback); ++ } ++}; ++ ++LdapAuth.prototype._handleAuthenticate = function (username, password, callback) { ++ this.opts.tlsStarted = true; ++ var self = this; + + if (typeof password === 'undefined' || password === null || password === '') { + return callback(new Error('no password given')); diff --git a/server-ce/hotfix/5.5.1/Dockerfile b/server-ce/hotfix/5.5.1/Dockerfile new file mode 100644 index 0000000000..9572d29740 --- /dev/null +++ b/server-ce/hotfix/5.5.1/Dockerfile @@ -0,0 +1,28 @@ +FROM sharelatex/sharelatex:5.5.0 + + +# fix tls configuration in redis for history-v1 +COPY pr_25168.patch . +RUN patch -p1 < pr_25168.patch && rm pr_25168.patch + +# improve logging in history system +COPY pr_26086.patch . +RUN patch -p1 < pr_26086.patch && rm pr_26086.patch + +# fix create-user.mjs script +COPY pr_26152.patch . +RUN patch -p1 < pr_26152.patch && rm pr_26152.patch + +# check mongo featureCompatibilityVersion +COPY pr_26091.patch . +RUN patch -p1 < pr_26091.patch && rm pr_26091.patch + +# update multer and tar-fs +RUN sed -i 's/"multer": "2.0.0"/"multer": "2.0.1"/g' package.json +RUN sed -i 's/"dockerode": "^4.0.5"/"dockerode": "^4.0.7"/g' services/clsi/package.json +RUN sed -i 's/"tar-fs": "^3.0.4"/"tar-fs": "^3.0.9"/g' services/clsi/package.json +RUN sed -i 's/199c5ff05bd375c508f4074498237baead7f5148/4dbceda355efc3fc8ac3cf5c66c3778c8a6fdb23/g' services/web/package.json +COPY package-lock.json.diff . +RUN patch package-lock.json < package-lock.json.diff +RUN npm install --omit=dev +RUN npm install @paralleldrive/cuid2@2.2.2 -w services/history-v1 diff --git a/server-ce/hotfix/5.5.1/package-lock.json.diff b/server-ce/hotfix/5.5.1/package-lock.json.diff new file mode 100644 index 0000000000..ecbf851bc8 --- /dev/null +++ b/server-ce/hotfix/5.5.1/package-lock.json.diff @@ -0,0 +1,2202 @@ +4954,4956c4954,4957 +< "version": "1.8.22", +< "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.8.22.tgz", +< "integrity": "sha512-oAjDdN7fzbUi+4hZjKG96MR6KTEubAeMpQEb+77qy+3r0Ua5xTFuie6JOLr4ZZgl5g+W5/uRTS2M1V8mVAFPuA==", +--- +> "version": "1.13.4", +> "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.13.4.tgz", +> "integrity": "sha512-GsFaMXCkMqkKIvwCQjCrwH+GHbPKBjhwo/8ZuUkWHqbI73Kky9I+pQltrlT0+MWpedCoosda53lgjYfyEPgxBg==", +> "license": "Apache-2.0", +4958,4959c4959,4960 +< "@grpc/proto-loader": "^0.7.0", +< "@types/node": ">=12.12.47" +--- +> "@grpc/proto-loader": "^0.7.13", +> "@js-sdsl/ordered-map": "^4.4.2" +4962c4963 +< "node": "^8.13.0 || >=10.10.0" +--- +> "node": ">=12.10.0" +5915a5917,5928 +> "node_modules/@noble/hashes": { +> "version": "1.8.0", +> "resolved": "https://registry.npmjs.org/@noble/hashes/-/hashes-1.8.0.tgz", +> "integrity": "sha512-jCs9ldd7NwzpgXDIf6P3+NrHh9/sD6CQdxHyjQI+h/6rDNo88ypBxxz45UDuZHz9r3tNz7N/VInSVoVdtXEI4A==", +> "license": "MIT", +> "engines": { +> "node": "^14.21.3 || >=16" +> }, +> "funding": { +> "url": "https://paulmillr.com/funding/" +> } +> }, +7528a7542,7550 +> "node_modules/@paralleldrive/cuid2": { +> "version": "2.2.2", +> "resolved": "https://registry.npmjs.org/@paralleldrive/cuid2/-/cuid2-2.2.2.tgz", +> "integrity": "sha512-ZOBkgDwEdoYVlSeRbYYXs0S9MejQofiVYoTbKzy/6GQa39/q5tQU2IX46+shYnUkpEl3wc+J6wRlar7r2EK2xA==", +> "license": "MIT", +> "dependencies": { +> "@noble/hashes": "^1.1.5" +> } +> }, +13479,13481c13501,13503 +< "version": "1.0.1", +< "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.1.tgz", +< "integrity": "sha512-ahC5W1xgou+KTXix4sAO8Ki12Q+jf4i0+tmk3sC+zgcynshkHxzpXdImBehiUYKKKDwvfFiJl1tZt6ewscS1Mg==", +--- +> "version": "1.0.2", +> "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.2.tgz", +> "integrity": "sha512-LHE+8BuR7RYGDKvnrmcuSq3tDcKv9OFEXQt/HpbZhY7V6h0zlUXutnAD82GiFx9rdieCMjkvtcsPqBwgUl1Iiw==", +13484,13485c13506,13507 +< "call-bind": "^1.0.5", +< "is-array-buffer": "^3.0.4" +--- +> "call-bound": "^1.0.3", +> "is-array-buffer": "^3.0.5" +13601,13603c13623,13625 +< "version": "1.0.3", +< "resolved": "https://registry.npmjs.org/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.3.tgz", +< "integrity": "sha512-bMxMKAjg13EBSVscxTaYA4mRc5t1UAXa2kXiGTNfZ079HIWXEkKmkgFrh/nJqamaLSrXO5H4WFFkPEaLJWbs3A==", +--- +> "version": "1.0.4", +> "resolved": "https://registry.npmjs.org/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.4.tgz", +> "integrity": "sha512-BNoCY6SXXPQ7gF2opIP4GBE+Xw7U+pHMYKuzjgCN3GwiaIR09UUeKfheyIry77QtrCBlC0KK0q5/TER/tYh3PQ==", +13607c13629 +< "call-bind": "^1.0.5", +--- +> "call-bind": "^1.0.8", +13609,13613c13631,13634 +< "es-abstract": "^1.22.3", +< "es-errors": "^1.2.1", +< "get-intrinsic": "^1.2.3", +< "is-array-buffer": "^3.0.4", +< "is-shared-array-buffer": "^1.0.2" +--- +> "es-abstract": "^1.23.5", +> "es-errors": "^1.3.0", +> "get-intrinsic": "^1.2.6", +> "is-array-buffer": "^3.0.4" +13706a13728,13736 +> "node_modules/async-function": { +> "version": "1.0.0", +> "resolved": "https://registry.npmjs.org/async-function/-/async-function-1.0.0.tgz", +> "integrity": "sha512-hsU18Ae8CDTR6Kgu9DYf0EbCr/a5iGL0rytQDobUcdpYOKokk8LEjVphnXkDkgpi0wYVsqrXuP0bZxJaTqdgoA==", +> "license": "MIT", +> "engines": { +> "node": ">= 0.4" +> } +> }, +14255,14257c14285,14287 +< "version": "4.0.1", +< "resolved": "https://registry.npmjs.org/bare-fs/-/bare-fs-4.0.1.tgz", +< "integrity": "sha512-ilQs4fm/l9eMfWY2dY0WCIUplSUp7U0CT1vrqMg1MUdeZl4fypu5UP0XcDBK5WBQPJAKP1b7XEodISmekH/CEg==", +--- +> "version": "4.1.5", +> "resolved": "https://registry.npmjs.org/bare-fs/-/bare-fs-4.1.5.tgz", +> "integrity": "sha512-1zccWBMypln0jEE05LzZt+V/8y8AQsQQqxtklqaIyg5nu6OAYFhZxPXinJTSG+kU5qyNmeLgcn9AW7eHiCHVLA==", +14261c14291 +< "bare-events": "^2.0.0", +--- +> "bare-events": "^2.5.4", +14263c14293 +< "bare-stream": "^2.0.0" +--- +> "bare-stream": "^2.6.4" +14266c14296,14304 +< "bare": ">=1.7.0" +--- +> "bare": ">=1.16.0" +> }, +> "peerDependencies": { +> "bare-buffer": "*" +> }, +> "peerDependenciesMeta": { +> "bare-buffer": { +> "optional": true +> } +14270,14272c14308,14310 +< "version": "3.6.0", +< "resolved": "https://registry.npmjs.org/bare-os/-/bare-os-3.6.0.tgz", +< "integrity": "sha512-BUrFS5TqSBdA0LwHop4OjPJwisqxGy6JsWVqV6qaFoe965qqtaKfDzHY5T2YA1gUL0ZeeQeA+4BBc1FJTcHiPw==", +--- +> "version": "3.6.1", +> "resolved": "https://registry.npmjs.org/bare-os/-/bare-os-3.6.1.tgz", +> "integrity": "sha512-uaIjxokhFidJP+bmmvKSgiMzj2sV5GPHaZVAIktcxcpCyBFFWO+YlikVAdhmUo2vYFvFhOXIAlldqV29L8126g==", +15110,15112c15148,15151 +< "version": "1.0.7", +< "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.7.tgz", +< "integrity": "sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==", +--- +> "version": "1.0.8", +> "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.8.tgz", +> "integrity": "sha512-oKlSFMcMwpUg2ednkhQ454wfWiU/ul3CkJe/PEHcTKuiX6RpbehUiFMXu13HalGZxfUwCQzZG747YXBn1im9ww==", +> "license": "MIT", +15113a15153 +> "call-bind-apply-helpers": "^1.0.0", +15115,15116d15154 +< "es-errors": "^1.3.0", +< "function-bind": "^1.1.2", +15118c15156,15185 +< "set-function-length": "^1.2.1" +--- +> "set-function-length": "^1.2.2" +> }, +> "engines": { +> "node": ">= 0.4" +> }, +> "funding": { +> "url": "https://github.com/sponsors/ljharb" +> } +> }, +> "node_modules/call-bind-apply-helpers": { +> "version": "1.0.2", +> "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", +> "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", +> "license": "MIT", +> "dependencies": { +> "es-errors": "^1.3.0", +> "function-bind": "^1.1.2" +> }, +> "engines": { +> "node": ">= 0.4" +> } +> }, +> "node_modules/call-bound": { +> "version": "1.0.4", +> "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.4.tgz", +> "integrity": "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==", +> "license": "MIT", +> "dependencies": { +> "call-bind-apply-helpers": "^1.0.2", +> "get-intrinsic": "^1.3.0" +15423c15490,15491 +< "integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==" +--- +> "integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==", +> "license": "ISC" +15751,15756c15819,15822 +< "version": "4.1.1", +< "resolved": "https://registry.npmjs.org/commander/-/commander-4.1.1.tgz", +< "integrity": "sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==", +< "engines": { +< "node": ">= 6" +< } +--- +> "version": "2.11.0", +> "resolved": "https://registry.npmjs.org/commander/-/commander-2.11.0.tgz", +> "integrity": "sha512-b0553uYA5YAEGgyYIGYROzKQ7X5RAqedkfjiZxwi0kL1g3bOaBNNZfYkzt/CL0umgD5wc9Jec2FbB98CjkMRvQ==", +> "license": "MIT" +15871,15873c15937,15939 +< "version": "1.6.2", +< "resolved": "https://registry.npmjs.org/concat-stream/-/concat-stream-1.6.2.tgz", +< "integrity": "sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw==", +--- +> "version": "2.0.0", +> "resolved": "https://registry.npmjs.org/concat-stream/-/concat-stream-2.0.0.tgz", +> "integrity": "sha512-MWufYdFw53ccGjCA+Ol7XJYpAlW6/prSMzuPOTRnJGcGzuhLn4Scrz7qf6o8bROZ514ltazcIFJZevcfbo0x7A==", +15875c15941 +< "node >= 0.8" +--- +> "node >= 6.0" +15876a15943 +> "license": "MIT", +15880c15947 +< "readable-stream": "^2.2.2", +--- +> "readable-stream": "^3.0.2", +15884,15910d15950 +< "node_modules/concat-stream/node_modules/isarray": { +< "version": "1.0.0", +< "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", +< "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==" +< }, +< "node_modules/concat-stream/node_modules/readable-stream": { +< "version": "2.3.8", +< "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", +< "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", +< "dependencies": { +< "core-util-is": "~1.0.0", +< "inherits": "~2.0.3", +< "isarray": "~1.0.0", +< "process-nextick-args": "~2.0.0", +< "safe-buffer": "~5.1.1", +< "string_decoder": "~1.1.1", +< "util-deprecate": "~1.0.1" +< } +< }, +< "node_modules/concat-stream/node_modules/string_decoder": { +< "version": "1.1.1", +< "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", +< "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", +< "dependencies": { +< "safe-buffer": "~5.1.0" +< } +< }, +16125c16165,16166 +< "integrity": "sha512-LDx6oHrK+PhzLKJU9j5S7/Y3jM/mUHvD/DeI1WQmJn652iPC5Y4TBzC9l+5OMOXlyTTA+SmVUPm0HQUwpD5Jqw==" +--- +> "integrity": "sha512-LDx6oHrK+PhzLKJU9j5S7/Y3jM/mUHvD/DeI1WQmJn652iPC5Y4TBzC9l+5OMOXlyTTA+SmVUPm0HQUwpD5Jqw==", +> "license": "MIT" +16337a16379,16392 +> "node_modules/cpu-features": { +> "version": "0.0.10", +> "resolved": "https://registry.npmjs.org/cpu-features/-/cpu-features-0.0.10.tgz", +> "integrity": "sha512-9IkYqtX3YHPCzoVg1Py+o9057a3i0fp7S530UWokCSaFVTc7CwXPRiOjRjBQQ18ZCNafx78YfnG+HALxtVmOGA==", +> "hasInstallScript": true, +> "optional": true, +> "dependencies": { +> "buildcheck": "~0.0.6", +> "nan": "^2.19.0" +> }, +> "engines": { +> "node": ">=10.0.0" +> } +> }, +17268,17270c17323,17325 +< "version": "1.0.1", +< "resolved": "https://registry.npmjs.org/data-view-buffer/-/data-view-buffer-1.0.1.tgz", +< "integrity": "sha512-0lht7OugA5x3iJLOWFhWK/5ehONdprk0ISXqVFn/NFrDu+cuc8iADFrGQz5BnRK7LLU3JmkbXSxaqX+/mXYtUA==", +--- +> "version": "1.0.2", +> "resolved": "https://registry.npmjs.org/data-view-buffer/-/data-view-buffer-1.0.2.tgz", +> "integrity": "sha512-EmKO5V3OLXh1rtK2wgXRansaK1/mtVdTUEiEI0W8RkvgT05kfxaH29PliLnpLP73yYO6142Q72QNa8Wx/A5CqQ==", +17273c17328 +< "call-bind": "^1.0.6", +--- +> "call-bound": "^1.0.3", +17275c17330 +< "is-data-view": "^1.0.1" +--- +> "is-data-view": "^1.0.2" +17285,17287c17340,17342 +< "version": "1.0.1", +< "resolved": "https://registry.npmjs.org/data-view-byte-length/-/data-view-byte-length-1.0.1.tgz", +< "integrity": "sha512-4J7wRJD3ABAzr8wP+OcIcqq2dlUKp4DVflx++hs5h5ZKydWMI6/D/fAot+yh6g2tHh8fLFTvNOaVN357NvSrOQ==", +--- +> "version": "1.0.2", +> "resolved": "https://registry.npmjs.org/data-view-byte-length/-/data-view-byte-length-1.0.2.tgz", +> "integrity": "sha512-tuhGbE6CfTM9+5ANGf+oQb72Ky/0+s3xKUpHvShfiz2RxMFgFPjsXuRLBVMtvMs15awe45SRb83D6wH4ew6wlQ==", +17290c17345 +< "call-bind": "^1.0.7", +--- +> "call-bound": "^1.0.3", +17292c17347 +< "is-data-view": "^1.0.1" +--- +> "is-data-view": "^1.0.2" +17298c17353 +< "url": "https://github.com/sponsors/ljharb" +--- +> "url": "https://github.com/sponsors/inspect-js" +17302,17304c17357,17359 +< "version": "1.0.0", +< "resolved": "https://registry.npmjs.org/data-view-byte-offset/-/data-view-byte-offset-1.0.0.tgz", +< "integrity": "sha512-t/Ygsytq+R995EJ5PZlD4Cu56sWa8InXySaViRzw9apusqsOO2bQP+SbYzAhR0pFKoB+43lYy8rWban9JSuXnA==", +--- +> "version": "1.0.1", +> "resolved": "https://registry.npmjs.org/data-view-byte-offset/-/data-view-byte-offset-1.0.1.tgz", +> "integrity": "sha512-BS8PfmtDGnrgYdOonGZQdLZslWIeCGFP9tpan0hi1Co2Zr2NKADsvGYA8XxuG/4UWgJ6Cjtv+YJnB6MM69QGlQ==", +17307c17362 +< "call-bind": "^1.0.6", +--- +> "call-bound": "^1.0.2", +17666a17722,17731 +> "node_modules/dezalgo": { +> "version": "1.0.4", +> "resolved": "https://registry.npmjs.org/dezalgo/-/dezalgo-1.0.4.tgz", +> "integrity": "sha512-rXSP0bf+5n0Qonsb+SVVfNfIsimO4HEtmnIpPHY8Q1UCzKlQrDMfdobr8nJOOsRgWCyMRqeSBQzmWUMq7zvVig==", +> "license": "ISC", +> "dependencies": { +> "asap": "^2.0.0", +> "wrappy": "1" +> } +> }, +17725a17791,17872 +> "node_modules/docker-modem": { +> "version": "5.0.6", +> "resolved": "https://registry.npmjs.org/docker-modem/-/docker-modem-5.0.6.tgz", +> "integrity": "sha512-ens7BiayssQz/uAxGzH8zGXCtiV24rRWXdjNha5V4zSOcxmAZsfGVm/PPFbwQdqEkDnhG+SyR9E3zSHUbOKXBQ==", +> "license": "Apache-2.0", +> "dependencies": { +> "debug": "^4.1.1", +> "readable-stream": "^3.5.0", +> "split-ca": "^1.0.1", +> "ssh2": "^1.15.0" +> }, +> "engines": { +> "node": ">= 8.0" +> } +> }, +> "node_modules/dockerode": { +> "version": "4.0.7", +> "resolved": "https://registry.npmjs.org/dockerode/-/dockerode-4.0.7.tgz", +> "integrity": "sha512-R+rgrSRTRdU5mH14PZTCPZtW/zw3HDWNTS/1ZAQpL/5Upe/ye5K9WQkIysu4wBoiMwKynsz0a8qWuGsHgEvSAA==", +> "license": "Apache-2.0", +> "dependencies": { +> "@balena/dockerignore": "^1.0.2", +> "@grpc/grpc-js": "^1.11.1", +> "@grpc/proto-loader": "^0.7.13", +> "docker-modem": "^5.0.6", +> "protobufjs": "^7.3.2", +> "tar-fs": "~2.1.2", +> "uuid": "^10.0.0" +> }, +> "engines": { +> "node": ">= 8.0" +> } +> }, +> "node_modules/dockerode/node_modules/protobufjs": { +> "version": "7.5.3", +> "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-7.5.3.tgz", +> "integrity": "sha512-sildjKwVqOI2kmFDiXQ6aEB0fjYTafpEvIBs8tOR8qI4spuL9OPROLVu2qZqi/xgCfsHIwVqlaF8JBjWFHnKbw==", +> "hasInstallScript": true, +> "license": "BSD-3-Clause", +> "dependencies": { +> "@protobufjs/aspromise": "^1.1.2", +> "@protobufjs/base64": "^1.1.2", +> "@protobufjs/codegen": "^2.0.4", +> "@protobufjs/eventemitter": "^1.1.0", +> "@protobufjs/fetch": "^1.1.0", +> "@protobufjs/float": "^1.0.2", +> "@protobufjs/inquire": "^1.1.0", +> "@protobufjs/path": "^1.1.2", +> "@protobufjs/pool": "^1.1.0", +> "@protobufjs/utf8": "^1.1.0", +> "@types/node": ">=13.7.0", +> "long": "^5.0.0" +> }, +> "engines": { +> "node": ">=12.0.0" +> } +> }, +> "node_modules/dockerode/node_modules/tar-fs": { +> "version": "2.1.3", +> "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.3.tgz", +> "integrity": "sha512-090nwYJDmlhwFwEW3QQl+vaNnxsO2yVsd45eTKRBzSzu+hlb1w2K9inVq5b0ngXuLVqQ4ApvsUHHnu/zQNkWAg==", +> "license": "MIT", +> "dependencies": { +> "chownr": "^1.1.1", +> "mkdirp-classic": "^0.5.2", +> "pump": "^3.0.0", +> "tar-stream": "^2.1.4" +> } +> }, +> "node_modules/dockerode/node_modules/uuid": { +> "version": "10.0.0", +> "resolved": "https://registry.npmjs.org/uuid/-/uuid-10.0.0.tgz", +> "integrity": "sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ==", +> "funding": [ +> "https://github.com/sponsors/broofa", +> "https://github.com/sponsors/ctavan" +> ], +> "license": "MIT", +> "bin": { +> "uuid": "dist/bin/uuid" +> } +> }, +17926a18074,18087 +> "node_modules/dunder-proto": { +> "version": "1.0.1", +> "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", +> "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", +> "license": "MIT", +> "dependencies": { +> "call-bind-apply-helpers": "^1.0.1", +> "es-errors": "^1.3.0", +> "gopd": "^1.2.0" +> }, +> "engines": { +> "node": ">= 0.4" +> } +> }, +18212,18214c18373,18375 +< "version": "1.23.3", +< "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.23.3.tgz", +< "integrity": "sha512-e+HfNH61Bj1X9/jLc5v1owaLYuHdeHHSQlkhCBiTK8rBvKaULl/beGMxwrMXjpYrv4pz22BlY570vVePA2ho4A==", +--- +> "version": "1.24.0", +> "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.24.0.tgz", +> "integrity": "sha512-WSzPgsdLtTcQwm4CROfS5ju2Wa1QQcVeT37jFjYzdFz1r9ahadC8B8/a4qxJxM+09F18iumCdRmlr96ZYkQvEg==", +18217,18218c18378,18379 +< "array-buffer-byte-length": "^1.0.1", +< "arraybuffer.prototype.slice": "^1.0.3", +--- +> "array-buffer-byte-length": "^1.0.2", +> "arraybuffer.prototype.slice": "^1.0.4", +18220,18224c18381,18386 +< "call-bind": "^1.0.7", +< "data-view-buffer": "^1.0.1", +< "data-view-byte-length": "^1.0.1", +< "data-view-byte-offset": "^1.0.0", +< "es-define-property": "^1.0.0", +--- +> "call-bind": "^1.0.8", +> "call-bound": "^1.0.4", +> "data-view-buffer": "^1.0.2", +> "data-view-byte-length": "^1.0.2", +> "data-view-byte-offset": "^1.0.1", +> "es-define-property": "^1.0.1", +18226,18233c18388,18396 +< "es-object-atoms": "^1.0.0", +< "es-set-tostringtag": "^2.0.3", +< "es-to-primitive": "^1.2.1", +< "function.prototype.name": "^1.1.6", +< "get-intrinsic": "^1.2.4", +< "get-symbol-description": "^1.0.2", +< "globalthis": "^1.0.3", +< "gopd": "^1.0.1", +--- +> "es-object-atoms": "^1.1.1", +> "es-set-tostringtag": "^2.1.0", +> "es-to-primitive": "^1.3.0", +> "function.prototype.name": "^1.1.8", +> "get-intrinsic": "^1.3.0", +> "get-proto": "^1.0.1", +> "get-symbol-description": "^1.1.0", +> "globalthis": "^1.0.4", +> "gopd": "^1.2.0", +18235,18236c18398,18399 +< "has-proto": "^1.0.3", +< "has-symbols": "^1.0.3", +--- +> "has-proto": "^1.2.0", +> "has-symbols": "^1.1.0", +18238,18239c18401,18402 +< "internal-slot": "^1.0.7", +< "is-array-buffer": "^3.0.4", +--- +> "internal-slot": "^1.1.0", +> "is-array-buffer": "^3.0.5", +18241c18404 +< "is-data-view": "^1.0.1", +--- +> "is-data-view": "^1.0.2", +18243,18248c18406,18413 +< "is-regex": "^1.1.4", +< "is-shared-array-buffer": "^1.0.3", +< "is-string": "^1.0.7", +< "is-typed-array": "^1.1.13", +< "is-weakref": "^1.0.2", +< "object-inspect": "^1.13.1", +--- +> "is-regex": "^1.2.1", +> "is-set": "^2.0.3", +> "is-shared-array-buffer": "^1.0.4", +> "is-string": "^1.1.1", +> "is-typed-array": "^1.1.15", +> "is-weakref": "^1.1.1", +> "math-intrinsics": "^1.1.0", +> "object-inspect": "^1.13.4", +18250,18255c18415,18424 +< "object.assign": "^4.1.5", +< "regexp.prototype.flags": "^1.5.2", +< "safe-array-concat": "^1.1.2", +< "safe-regex-test": "^1.0.3", +< "string.prototype.trim": "^1.2.9", +< "string.prototype.trimend": "^1.0.8", +--- +> "object.assign": "^4.1.7", +> "own-keys": "^1.0.1", +> "regexp.prototype.flags": "^1.5.4", +> "safe-array-concat": "^1.1.3", +> "safe-push-apply": "^1.0.0", +> "safe-regex-test": "^1.1.0", +> "set-proto": "^1.0.0", +> "stop-iteration-iterator": "^1.1.0", +> "string.prototype.trim": "^1.2.10", +> "string.prototype.trimend": "^1.0.9", +18257,18262c18426,18431 +< "typed-array-buffer": "^1.0.2", +< "typed-array-byte-length": "^1.0.1", +< "typed-array-byte-offset": "^1.0.2", +< "typed-array-length": "^1.0.6", +< "unbox-primitive": "^1.0.2", +< "which-typed-array": "^1.1.15" +--- +> "typed-array-buffer": "^1.0.3", +> "typed-array-byte-length": "^1.0.3", +> "typed-array-byte-offset": "^1.0.4", +> "typed-array-length": "^1.0.7", +> "unbox-primitive": "^1.1.0", +> "which-typed-array": "^1.1.19" +18272,18277c18441,18444 +< "version": "1.0.0", +< "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.0.tgz", +< "integrity": "sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==", +< "dependencies": { +< "get-intrinsic": "^1.2.4" +< }, +--- +> "version": "1.0.1", +> "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", +> "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", +> "license": "MIT", +18318,18320c18485,18487 +< "version": "1.0.0", +< "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.0.0.tgz", +< "integrity": "sha512-MZ4iQ6JwHOBQjahnjwaC1ZtIBH+2ohjamzAO3oaHcXYup7qxjF2fixyH+Q71voWHeOkI2q/TnJao/KfXYIZWbw==", +--- +> "version": "1.1.1", +> "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", +> "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", +18330,18332c18497,18499 +< "version": "2.0.3", +< "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.0.3.tgz", +< "integrity": "sha512-3T8uNMC3OQTHkFUsFq8r/BwAXLHvU/9O9mE0fBc/MY5iq/8H7ncvO947LmYA6ldWw9Uh8Yhf25zu6n7nML5QWQ==", +--- +> "version": "2.1.0", +> "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz", +> "integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==", +18335c18502,18503 +< "get-intrinsic": "^1.2.4", +--- +> "es-errors": "^1.3.0", +> "get-intrinsic": "^1.2.6", +18337c18505 +< "hasown": "^2.0.1" +--- +> "hasown": "^2.0.2" +18353,18355c18521,18524 +< "version": "1.2.1", +< "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", +< "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", +--- +> "version": "1.3.0", +> "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.3.0.tgz", +> "integrity": "sha512-w+5mJ3GuFL+NjVtJlvydShqE1eN3h3PbI7/5LAsYJP/2qtuMXjfL2LpHSRqo4b4eSF5K/DH1JXKUAHSB2UW50g==", +> "license": "MIT", +18357,18359c18526,18528 +< "is-callable": "^1.1.4", +< "is-date-object": "^1.0.1", +< "is-symbol": "^1.0.2" +--- +> "is-callable": "^1.2.7", +> "is-date-object": "^1.0.5", +> "is-symbol": "^1.0.4" +20463a20633,20638 +> "node_modules/fast-safe-stringify": { +> "version": "2.1.1", +> "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", +> "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==", +> "license": "MIT" +> }, +20933,20935c21108,21111 +< "version": "0.3.3", +< "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.3.tgz", +< "integrity": "sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw==", +--- +> "version": "0.3.5", +> "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.5.tgz", +> "integrity": "sha512-dKx12eRCVIzqCxFGplyFKJMPvLEWgmNtUrpTiJIR5u97zEhRG8ySrtboPHZXx7daLxQVrl643cTzbab2tkQjxg==", +> "license": "MIT", +20937c21113,21119 +< "is-callable": "^1.1.3" +--- +> "is-callable": "^1.2.7" +> }, +> "engines": { +> "node": ">= 0.4" +> }, +> "funding": { +> "url": "https://github.com/sponsors/ljharb" +21100a21283 +> "license": "MIT", +21272,21274c21455,21458 +< "version": "1.1.6", +< "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.6.tgz", +< "integrity": "sha512-Z5kx79swU5P27WEayXM1tBi5Ze/lbIyiNgU3qyXUOf9b2rgXYyF9Dy9Cx+IQv/Lc8WCG6L82zwUPpSS9hGehIg==", +--- +> "version": "1.1.8", +> "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.8.tgz", +> "integrity": "sha512-e5iwyodOHhbMr/yNrc7fDYG4qlbIvI5gajyzPnb5TCwyhjApznQh1BMFou9b30SevY43gCJKXycoCBjMbsuW0Q==", +> "license": "MIT", +21276,21279c21460,21465 +< "call-bind": "^1.0.2", +< "define-properties": "^1.2.0", +< "es-abstract": "^1.22.1", +< "functions-have-names": "^1.2.3" +--- +> "call-bind": "^1.0.8", +> "call-bound": "^1.0.3", +> "define-properties": "^1.2.1", +> "functions-have-names": "^1.2.3", +> "hasown": "^2.0.2", +> "is-callable": "^1.2.7" +21385,21387c21571,21574 +< "version": "1.2.4", +< "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.4.tgz", +< "integrity": "sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==", +--- +> "version": "1.3.0", +> "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", +> "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", +> "license": "MIT", +21388a21576,21577 +> "call-bind-apply-helpers": "^1.0.2", +> "es-define-property": "^1.0.1", +21389a21579 +> "es-object-atoms": "^1.1.1", +21391,21393c21581,21585 +< "has-proto": "^1.0.1", +< "has-symbols": "^1.0.3", +< "hasown": "^2.0.0" +--- +> "get-proto": "^1.0.1", +> "gopd": "^1.2.0", +> "has-symbols": "^1.1.0", +> "hasown": "^2.0.2", +> "math-intrinsics": "^1.1.0" +21420a21613,21625 +> "node_modules/get-proto": { +> "version": "1.0.1", +> "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", +> "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", +> "license": "MIT", +> "dependencies": { +> "dunder-proto": "^1.0.1", +> "es-object-atoms": "^1.0.0" +> }, +> "engines": { +> "node": ">= 0.4" +> } +> }, +21437,21439c21642,21644 +< "version": "1.0.2", +< "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.2.tgz", +< "integrity": "sha512-g0QYk1dZBxGwk+Ngc+ltRH2IBp2f7zBkBMBJZCDerh6EhlhSR6+9irMCuT/09zD6qkarHUSn529sK/yL4S27mg==", +--- +> "version": "1.1.0", +> "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.1.0.tgz", +> "integrity": "sha512-w9UMqWwJxHNOvoNzSJ2oPF5wvYcvP7jUvYzhp67yEhTi17ZDBBC1z9pTdGuzjD+EFIqLSYRweZjqfiPzQ06Ebg==", +21442c21647 +< "call-bind": "^1.0.5", +--- +> "call-bound": "^1.0.3", +21444c21649 +< "get-intrinsic": "^1.2.4" +--- +> "get-intrinsic": "^1.2.6" +21664,21666c21869,21872 +< "version": "1.0.3", +< "resolved": "https://registry.npmjs.org/globalthis/-/globalthis-1.0.3.tgz", +< "integrity": "sha512-sFdI5LyBiNTHjRd7cGPWapiHWMOXKyuBNX/cWJ3NfzrZQVa8GI/8cofCl74AOVqq9W5kNmguTIzJ/1s2gyI9wA==", +--- +> "version": "1.0.4", +> "resolved": "https://registry.npmjs.org/globalthis/-/globalthis-1.0.4.tgz", +> "integrity": "sha512-DpLKbNU4WylpxJykQujfCcwYWiV/Jhm50Goo0wrVILAv5jOr9d+H+UR3PhSCD2rCCEIg0uc+G+muBTwD54JhDQ==", +> "license": "MIT", +21668c21874,21875 +< "define-properties": "^1.1.3" +--- +> "define-properties": "^1.2.1", +> "gopd": "^1.0.1" +22055,22059c22262,22267 +< "version": "1.0.1", +< "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.0.1.tgz", +< "integrity": "sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==", +< "dependencies": { +< "get-intrinsic": "^1.1.3" +--- +> "version": "1.2.0", +> "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", +> "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", +> "license": "MIT", +> "engines": { +> "node": ">= 0.4" +22079a22288 +> "license": "MIT", +22300,22302c22509,22511 +< "version": "1.0.3", +< "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.3.tgz", +< "integrity": "sha512-SJ1amZAJUiZS+PhsVLf5tGydlaVB8EdFpaSO4gmiUKUOxk8qzn5AIy4ZeJUmh22znIdk/uMAUT2pl3FxzVUH+Q==", +--- +> "version": "1.2.0", +> "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.2.0.tgz", +> "integrity": "sha512-KIL7eQPfHQRC8+XluaIw7BHUwwqL19bQn4hzNgdr+1wXoU0KKj6rufu47lhY7KbJR2C6T6+PfyN0Ea7wkSS+qQ==", +22303a22513,22515 +> "dependencies": { +> "dunder-proto": "^1.0.0" +> }, +22312,22314c22524,22527 +< "version": "1.0.3", +< "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", +< "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==", +--- +> "version": "1.1.0", +> "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", +> "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", +> "license": "MIT", +23257,23259c23470,23472 +< "version": "1.0.7", +< "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.7.tgz", +< "integrity": "sha512-NGnrKwXzSms2qUUih/ILZ5JBqNTSa1+ZmP6flaIp6KmSElgE9qdndzS3cqjrDovwFdmwsGsLdeFgB6suw+1e9g==", +--- +> "version": "1.1.0", +> "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.1.0.tgz", +> "integrity": "sha512-4gd7VpWNQNB4UKKCFFVcp1AVv+FMOgs9NKzjHKusc8jTMhd5eL1NqQqOpE0KzMds804/yHlglp3uxgluOqAPLw==", +23263,23264c23476,23477 +< "hasown": "^2.0.0", +< "side-channel": "^1.0.4" +--- +> "hasown": "^2.0.2", +> "side-channel": "^1.1.0" +23402,23404c23615,23617 +< "version": "3.0.4", +< "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.4.tgz", +< "integrity": "sha512-wcjaerHw0ydZwfhiKbXJWLDY8A7yV7KhjQOpb83hGgGfId/aQa4TOvwyzn2PuswW2gPCYEL/nEAiSVpdOj1lXw==", +--- +> "version": "3.0.5", +> "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.5.tgz", +> "integrity": "sha512-DDfANUiiG2wC1qawP66qlTugJeL5HyzMpfr8lLK+jMQirGzNod0B12cFB/9q838Ru27sBwfw78/rdoU7RERz6A==", +23407,23408c23620,23622 +< "call-bind": "^1.0.2", +< "get-intrinsic": "^1.2.1" +--- +> "call-bind": "^1.0.8", +> "call-bound": "^1.0.3", +> "get-intrinsic": "^1.2.6" +23422a23637,23655 +> "node_modules/is-async-function": { +> "version": "2.1.1", +> "resolved": "https://registry.npmjs.org/is-async-function/-/is-async-function-2.1.1.tgz", +> "integrity": "sha512-9dgM/cZBnNvjzaMYHVoxxfPj2QXt22Ev7SuuPrs+xav0ukGB0S6d4ydZdEiM48kLx5kDV+QBPrpVnFyefL8kkQ==", +> "license": "MIT", +> "dependencies": { +> "async-function": "^1.0.0", +> "call-bound": "^1.0.3", +> "get-proto": "^1.0.1", +> "has-tostringtag": "^1.0.2", +> "safe-regex-test": "^1.1.0" +> }, +> "engines": { +> "node": ">= 0.4" +> }, +> "funding": { +> "url": "https://github.com/sponsors/ljharb" +> } +> }, +23424,23426c23657,23660 +< "version": "1.0.4", +< "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.4.tgz", +< "integrity": "sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg==", +--- +> "version": "1.1.0", +> "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.1.0.tgz", +> "integrity": "sha512-n4ZT37wG78iz03xPRKJrHTdZbe3IicyucEtdRsV5yglwc3GyUfbAfpSeD0FJ41NbUNSt5wbhqfp1fS+BgnvDFQ==", +> "license": "MIT", +23428c23662,23665 +< "has-bigints": "^1.0.1" +--- +> "has-bigints": "^1.0.2" +> }, +> "engines": { +> "node": ">= 0.4" +23447,23449c23684,23687 +< "version": "1.1.2", +< "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.1.2.tgz", +< "integrity": "sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA==", +--- +> "version": "1.2.2", +> "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.2.2.tgz", +> "integrity": "sha512-wa56o2/ElJMYqjCjGkXri7it5FbebW5usLw/nPmCMs5DeZ7eziSYZhSmPRn0txqeW4LnAmQQU7FgqLpsEFKM4A==", +> "license": "MIT", +23451,23452c23689,23690 +< "call-bind": "^1.0.2", +< "has-tostringtag": "^1.0.0" +--- +> "call-bound": "^1.0.3", +> "has-tostringtag": "^1.0.2" +23517,23519c23755,23757 +< "version": "1.0.1", +< "resolved": "https://registry.npmjs.org/is-data-view/-/is-data-view-1.0.1.tgz", +< "integrity": "sha512-AHkaJrsUVW6wq6JS8y3JnM/GJF/9cf+k20+iDzlSaJrinEo5+7vRiteOSwBhHRiAyQATN1AmY4hwzxJKPmYf+w==", +--- +> "version": "1.0.2", +> "resolved": "https://registry.npmjs.org/is-data-view/-/is-data-view-1.0.2.tgz", +> "integrity": "sha512-RKtWF8pGmS87i2D6gqQu/l7EYRlVdfzemCJN/P3UOs//x1QE7mfhvzHIApBTRf7axvT6DMGwSwBXYCT0nfB9xw==", +23521a23760,23761 +> "call-bound": "^1.0.2", +> "get-intrinsic": "^1.2.6", +23532,23534c23772,23775 +< "version": "1.0.5", +< "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.5.tgz", +< "integrity": "sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ==", +--- +> "version": "1.1.0", +> "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.1.0.tgz", +> "integrity": "sha512-PwwhEakHVKTdRNVOw+/Gyh0+MzlCl4R6qKvkhuvLtPMggI1WAHt9sOwZxQLSGpUaDnrdyDsomoRgNnCfKNSXXg==", +> "license": "MIT", +23536c23777,23778 +< "has-tostringtag": "^1.0.0" +--- +> "call-bound": "^1.0.2", +> "has-tostringtag": "^1.0.2" +23601a23844,23858 +> "node_modules/is-finalizationregistry": { +> "version": "1.1.1", +> "resolved": "https://registry.npmjs.org/is-finalizationregistry/-/is-finalizationregistry-1.1.1.tgz", +> "integrity": "sha512-1pC6N8qWJbWoPtEjgcL2xyhQOP491EQjeUo3qTKcmV8YSDDJrOepfG8pcC7h/QgnQHYSv0mJ3Z/ZWxmatVrysg==", +> "license": "MIT", +> "dependencies": { +> "call-bound": "^1.0.3" +> }, +> "engines": { +> "node": ">= 0.4" +> }, +> "funding": { +> "url": "https://github.com/sponsors/ljharb" +> } +> }, +23688,23691c23945,23951 +< "version": "2.0.2", +< "resolved": "https://registry.npmjs.org/is-map/-/is-map-2.0.2.tgz", +< "integrity": "sha512-cOZFQQozTha1f4MxLFzlgKYPTyj26picdZTx82hbc/Xf4K/tZOOXSCkMvU4pKioRXGDLJRn0GM7Upe7kR721yg==", +< "dev": true, +--- +> "version": "2.0.3", +> "resolved": "https://registry.npmjs.org/is-map/-/is-map-2.0.3.tgz", +> "integrity": "sha512-1Qed0/Hr2m+YqxnM09CjA2d/i6YZNfF6R2oRAOj36eUdS6qIV/huPJNSEpKbupewFs+ZsJlxsjjPbc0/afW6Lw==", +> "license": "MIT", +> "engines": { +> "node": ">= 0.4" +> }, +23753,23755c24013,24016 +< "version": "1.0.7", +< "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.7.tgz", +< "integrity": "sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ==", +--- +> "version": "1.1.1", +> "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.1.1.tgz", +> "integrity": "sha512-lZhclumE1G6VYD8VHe35wFaIif+CTy5SJIi5+3y4psDgWu4wPDoBhF8NxUOinEc7pHgiTsT6MaBb92rKhhD+Xw==", +> "license": "MIT", +23757c24018,24019 +< "has-tostringtag": "^1.0.0" +--- +> "call-bound": "^1.0.3", +> "has-tostringtag": "^1.0.2" +23817,23819c24079,24082 +< "version": "1.1.4", +< "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", +< "integrity": "sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==", +--- +> "version": "1.2.1", +> "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.2.1.tgz", +> "integrity": "sha512-MjYsKHO5O7mCsmRGxWcLWheFqN9DJ/2TmngvjKXihe6efViPqc274+Fx/4fYj/r03+ESvBdTXK0V6tA3rgez1g==", +> "license": "MIT", +23821,23822c24084,24087 +< "call-bind": "^1.0.2", +< "has-tostringtag": "^1.0.0" +--- +> "call-bound": "^1.0.2", +> "gopd": "^1.2.0", +> "has-tostringtag": "^1.0.2", +> "hasown": "^2.0.2" +23832,23835c24097,24103 +< "version": "2.0.2", +< "resolved": "https://registry.npmjs.org/is-set/-/is-set-2.0.2.tgz", +< "integrity": "sha512-+2cnTEZeY5z/iXGbLhPrOAaK/Mau5k5eXq9j14CpRTftq0pAJu2MwVRSZhyZWBzx3o6X795Lz6Bpb6R0GKf37g==", +< "dev": true, +--- +> "version": "2.0.3", +> "resolved": "https://registry.npmjs.org/is-set/-/is-set-2.0.3.tgz", +> "integrity": "sha512-iPAjerrse27/ygGLxw+EBR9agv9Y6uLeYVJMu+QNCoouJ1/1ri0mGrcWpfCqFZuzzx3WjtwxG098X+n4OuRkPg==", +> "license": "MIT", +> "engines": { +> "node": ">= 0.4" +> }, +23848,23850c24116,24118 +< "version": "1.0.3", +< "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.3.tgz", +< "integrity": "sha512-nA2hv5XIhLR3uVzDDfCIknerhx8XUKnstuOERPNNIinXG7v9u+ohXF67vxm4TPTEPU6lm61ZkwP3c9PCB97rhg==", +--- +> "version": "1.0.4", +> "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.4.tgz", +> "integrity": "sha512-ISWac8drv4ZGfwKl5slpHG9OwPNty4jOWPRIhBpxOoD+hqITiwuipOQ2bNthAzwA3B4fIjO4Nln74N0S9byq8A==", +23853c24121 +< "call-bind": "^1.0.7" +--- +> "call-bound": "^1.0.3" +23874,23876c24142,24145 +< "version": "1.0.7", +< "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.7.tgz", +< "integrity": "sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==", +--- +> "version": "1.1.1", +> "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.1.1.tgz", +> "integrity": "sha512-BtEeSsoaQjlSPBemMQIrY1MY0uM6vnS1g5fmufYOtnxLGUZM2178PKbhsk7Ffv58IX+ZtcvoGwccYsh0PglkAA==", +> "license": "MIT", +23878c24147,24148 +< "has-tostringtag": "^1.0.0" +--- +> "call-bound": "^1.0.3", +> "has-tostringtag": "^1.0.2" +23888,23890c24158,24161 +< "version": "1.0.4", +< "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.4.tgz", +< "integrity": "sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg==", +--- +> "version": "1.1.1", +> "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.1.1.tgz", +> "integrity": "sha512-9gGx6GTtCQM73BgmHQXfDmLtfjjTUDSyoxTCbp5WtoixAhfgsDirWIcVQ/IHpvI5Vgd5i/J5F7B9cN/WlVbC/w==", +> "license": "MIT", +23892c24163,24165 +< "has-symbols": "^1.0.2" +--- +> "call-bound": "^1.0.2", +> "has-symbols": "^1.1.0", +> "safe-regex-test": "^1.1.0" +23902,23904c24175,24177 +< "version": "1.1.13", +< "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.13.tgz", +< "integrity": "sha512-uZ25/bUAlUY5fR4OKT4rZQEBrzQWYV9ZJYGGsUmEJ6thodVJ1HX64ePQ6Z0qPWP+m+Uq6e9UugrE38jeYsDSMw==", +--- +> "version": "1.1.15", +> "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.15.tgz", +> "integrity": "sha512-p3EcsicXjit7SaskXHs1hA91QxgTw46Fv6EFKKGS5DRFLD8yKnohjF3hxoju94b/OcMZoQukzpPpBE9uLVKzgQ==", +23907c24180 +< "which-typed-array": "^1.1.14" +--- +> "which-typed-array": "^1.1.16" +23943,23946c24216,24222 +< "version": "2.0.1", +< "resolved": "https://registry.npmjs.org/is-weakmap/-/is-weakmap-2.0.1.tgz", +< "integrity": "sha512-NSBR4kH5oVj1Uwvv970ruUkCV7O1mzgVFO4/rev2cLRda9Tm9HrL70ZPut4rOHgY0FNrUu9BCbXA2sdQ+x0chA==", +< "dev": true, +--- +> "version": "2.0.2", +> "resolved": "https://registry.npmjs.org/is-weakmap/-/is-weakmap-2.0.2.tgz", +> "integrity": "sha512-K5pXYOm9wqY1RgjpL3YTkF39tni1XajUIkawTLUo9EZEVUFga5gSQJF8nNS7ZwJQ02y+1YCNYcMh+HIf1ZqE+w==", +> "license": "MIT", +> "engines": { +> "node": ">= 0.4" +> }, +23952,23954c24228,24231 +< "version": "1.0.2", +< "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.0.2.tgz", +< "integrity": "sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ==", +--- +> "version": "1.1.1", +> "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.1.1.tgz", +> "integrity": "sha512-6i9mGWSlqzNMEqpCp93KwRS1uUOodk2OJ6b+sq7ZPDSy2WuI5NFIxp/254TytR8ftefexkWn5xNiHUNpPOfSew==", +> "license": "MIT", +23956c24233,24236 +< "call-bind": "^1.0.2" +--- +> "call-bound": "^1.0.3" +> }, +> "engines": { +> "node": ">= 0.4" +23963,23966c24243,24246 +< "version": "2.0.2", +< "resolved": "https://registry.npmjs.org/is-weakset/-/is-weakset-2.0.2.tgz", +< "integrity": "sha512-t2yVvttHkQktwnNNmBQ98AhENLdPUTDTE21uPqAQ0ARwQfGeQKRVS0NNurH7bTf7RrvcVn1OOge45CnBeHCSmg==", +< "dev": true, +--- +> "version": "2.0.4", +> "resolved": "https://registry.npmjs.org/is-weakset/-/is-weakset-2.0.4.tgz", +> "integrity": "sha512-mfcwb6IzQyOKTs84CQMrOwW4gQcaTOAWJ0zzJCl2WSPDrWk/OzDaImWFH3djXhb24g4eudZfLRozAvPGw4d9hQ==", +> "license": "MIT", +23968,23969c24248,24252 +< "call-bind": "^1.0.2", +< "get-intrinsic": "^1.1.1" +--- +> "call-bound": "^1.0.3", +> "get-intrinsic": "^1.2.6" +> }, +> "engines": { +> "node": ">= 0.4" +24543a24827 +> "license": "MIT", +24564a24849 +> "license": "MIT", +24568a24854,24862 +> "node_modules/json-refs/node_modules/commander": { +> "version": "4.1.1", +> "resolved": "https://registry.npmjs.org/commander/-/commander-4.1.1.tgz", +> "integrity": "sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==", +> "license": "MIT", +> "engines": { +> "node": ">= 6" +> } +> }, +24572a24867 +> "license": "MIT", +24584a24880 +> "license": "MIT", +25175,25187d25470 +< "node_modules/less/node_modules/mime": { +< "version": "1.6.0", +< "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", +< "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", +< "dev": true, +< "optional": true, +< "bin": { +< "mime": "cli.js" +< }, +< "engines": { +< "node": ">=4" +< } +< }, +25335c25618,25619 +< "integrity": "sha1-6I7suS4ruEyQZWEv2VigcZzUf5Q=" +--- +> "integrity": "sha512-tC2aLC7bbkDXKNrjDu9OLiVx9pFIvjinID2eD9PzNdAQGZScWUd/h8faqOw5d6oLsOvFRCRbz1ASoB+deyMVUw==", +> "license": "MIT" +25340c25624,25625 +< "integrity": "sha1-6UC5690nwyfgqNqxtVkWxTQelXU=", +--- +> "integrity": "sha512-VGHm6DH+1UiuafQdE/DNMqxOcSyhRu0xO9+jPDq7xITRn5YOorGrHVQmavMVXCYmTm80YRTZZCn/jTW7MokwLg==", +> "license": "MIT", +25351c25636,25637 +< "integrity": "sha1-MPgj5X4X43NdODvWK2Czh1Q7QYY=", +--- +> "integrity": "sha512-+zJVXs0VxC/Au+/7foiKzw8UaWvfSfPh20XhqK/6HFQiUeclL5fz05zY7G9yDAFItAKKZwB4cgpzGvxiwuG1wQ==", +> "license": "MIT", +25366c25652,25653 +< "integrity": "sha1-+Ob1tXip405UEXm1a47uv0oofgg=", +--- +> "integrity": "sha512-8JJ3FnMPm54t3BwPLk8q8mPyQKQXm/rt9df+awr4NGtyJrtcCXM3Of1I86S6jVy1b4yAyFBb8wbKPEauuqzRmQ==", +> "license": "MIT", +25376c25663,25664 +< "integrity": "sha1-fQsmdknLKeehOdAQO3wR+uhOSFE=", +--- +> "integrity": "sha512-SLczhg860fGW7AKlYcuOFstDtJuQhaANlJ4Y/jrOoRxhmVtK41vbJDH3OefVRSRkSCQo4HI82QVkAVsoGa5gSw==", +> "license": "MIT", +25387c25675,25676 +< "integrity": "sha1-TTHy595+E0+/KAN2K4FQsyUZZm8=", +--- +> "integrity": "sha512-x2ja1fa/qmzbizuXgVM4QAP9svtMbdxjG8Anl9bCeDAwLOVQ1vLrA0hLb/NkpbGi9evjtkl0aWLTEoOlUdBPQA==", +> "license": "MIT", +25398c25687,25688 +< "integrity": "sha1-UdaVeXPaTtVW43KQ2MGhjFPeFgc=", +--- +> "integrity": "sha512-5TCfLt1haQpsa7bgLYRKNNE4yqhO4ZxIayN1btQmazMchO6Q8JYFRMqbJ3W+uNmMm4R0Jw7KGkZX5YfDDnywuw==", +> "license": "MIT", +25409c25699,25700 +< "integrity": "sha1-+vH3+BD6mFolHCGHQESBCUg55e4=", +--- +> "integrity": "sha512-iIrScwY3atGvLVbQL/+CNUznaPwBJg78S/JO4cTUFXRkRsZgEBhscB27cVoT4tsIOUyFu/5M/0umfHNGJ6wYwg==", +> "license": "MIT", +25417c25708,25709 +< "integrity": "sha1-PqZAS3hKe+g2x7V1gOHN95sUgyw=" +--- +> "integrity": "sha512-BOlKGKNHhCHswGOWtmVb5zBygyxN7EmTuzVOSQI6QSoGhG+kvv71gICFS1TBpnqvT1n53txK8CDK3u5D2/GZxQ==", +> "license": "MIT" +25422c25714,25715 +< "integrity": "sha1-nUgvRjuOZq++WcLBTtsRcGAXIzQ=" +--- +> "integrity": "sha512-xKDem1BxoIfcCtaJHotjtyfdIvZO9qrF+mv3G1+ngQmaI3MJt3Qm46i9HLk/CbzABbavUrr1/EomQT8KxtsrYA==", +> "license": "MIT" +25427c25720,25721 +< "integrity": "sha1-fAt/admKH3ZSn4kLDNsbTf7BHBE=" +--- +> "integrity": "sha512-XpqGh1e7hhkOzftBfWE7zt+Yn9mVHFkDhicVttvKLsoCMLVVL+xTQjfjB4X4vtznauxv0QZ5ZAeqjvat0dh62Q==", +> "license": "MIT" +25432c25726,25727 +< "integrity": "sha1-phOWMNdtFTawfdyAliiJsIL2pkE=", +--- +> "integrity": "sha512-wwCwWX8PK/mYR5VZjcU5JFl6py/qrfLGMxzpKOfSqgA1PaZ6Z625CZLCxH1KsqyxSkOFmNm+mEYjeDpXlM4hrg==", +> "license": "MIT", +25441c25736,25737 +< "integrity": "sha1-98IAzRuS7yNrOZ7s9zxkjReqlNI=", +--- +> "integrity": "sha512-Vx0XKzpg2DFbQw4wrp1xSWd2sfl3W/BG6bucSRZmftS1AzbWRemCmBQDxyQTNhlLNec428PXkuuja+VNBZgu2A==", +> "license": "MIT", +25450c25746,25747 +< "integrity": "sha1-bpzJZm/wgfC1psl4uD4kLmlJ0gM=", +--- +> "integrity": "sha512-lBrglYxLD/6KAJ8IEa5Lg+YHgNAL7FyKqXg4XOUI+Du/vtniLs1ZqS+yHNKPkK54waAgkdUnDOYaWf+rv4B+AA==", +> "license": "MIT", +25458c25755,25756 +< "integrity": "sha1-dFz0GlNZexj2iImFREBe+isG2Q8=" +--- +> "integrity": "sha512-+odPJa4PE2UgYnQgJgkLs0UD03QU78R2ivhrFnG9GdtYOZdE6ObxOj7KiUEUlqOOgatFT+ZqSypFjDSduTigKg==", +> "license": "MIT" +25463c25761,25762 +< "integrity": "sha1-hMOVlt1xGBqXsGUpE6fJZ15Jsao=", +--- +> "integrity": "sha512-AqQ4AJz5buSx9ELXWt5dONwJyVPd4NTADMKhoVYWCugjoVf172/LpvVhwmSJn4g8/Dc0S8hxTe8rt5Dob3X9KQ==", +> "license": "MIT", +25473c25772,25773 +< "integrity": "sha1-XRn6AFyMTSNvr0dCx7eh/Kvikmc=", +--- +> "integrity": "sha512-hn2VWYZ+N9aYncRad4jORvlGgpFrn+axnPIWRvFxjk6CWcZH5b5alI8EymYsHITI23Z9wrW/+ORq+azrVFpOfw==", +> "license": "MIT", +25487c25787,25788 +< "integrity": "sha1-8pIDtAsS/uCkXTYxZIJZvrq8eGg=", +--- +> "integrity": "sha512-zj5vReFLkR+lJOBKP1wyteZ13zut/KSmXtdCBgxcy/m4UTitcBxpeVZT7gwk8BQrztPI5dIgO4bhBppXV4rpTQ==", +> "license": "MIT", +25517c25818,25819 +< "integrity": "sha1-/j/Do0yGyUyrb5UiVgKCdB4BYwk=", +--- +> "integrity": "sha512-AvOobAkE7qBtIiHU5QHQIfveWH5Usr9pIcFIzBv7u4S6bvb3FWpFrh9ltqBY7UeL5lw6e8d+SggiUXQVyh+FpA==", +> "license": "MIT", +25526c25828,25829 +< "integrity": "sha1-eLQer+FAX6lmRZ6kGT/VAtCEUks=", +--- +> "integrity": "sha512-VC+CKm/zSs5t3i/MHv71HZoQphuqOvez1xhjWBwHU5zAbsCYrqwHr+MyQyMk14HzA3hSRNA5lCqDMSw5G2Qscg==", +> "license": "MIT", +25547c25850,25851 +< "integrity": "sha1-ZpTP+mX++TH3wxzobHRZfPVg9PE=" +--- +> "integrity": "sha512-VRYX+8XipeLjorag5bz3YBBRJ+5kj8hVBzfnaHgXPZAVTYowBdY5l0M5ZnOmlAMCOXBFabQtm7f5VqjMKEji0w==", +> "license": "MIT" +25562c25866,25867 +< "integrity": "sha1-tSoybB9i9tfac6MdVAHfbvRPD6E=", +--- +> "integrity": "sha512-yRDd0z+APziDqbk0MqR6Qfwj/Qn3jLxFJbI9U8MuvdTnqIXdZ5YXyGLnwuzCpZmjr26F1GNOjKLMMZ10i/wy6A==", +> "license": "MIT", +25575c25880,25882 +< "integrity": "sha1-QVxEePK8wwEgwizhDtMib30+GOA=" +--- +> "integrity": "sha512-pDo3lu8Jhfjqls6GkMgpahsF9kCyayhgykjyLMNFTKWrpVdAQtYyB4muAMWozBB4ig/dtWAmsMxLEI8wuz+DYQ==", +> "deprecated": "This package is deprecated. Use require('node:util').isDeepStrictEqual instead.", +> "license": "MIT" +25580c25887,25888 +< "integrity": "sha1-LP1XXHPkmKtX4xm3f6Aq3vE6lNE=" +--- +> "integrity": "sha512-6XcAB3izeQxPOQQNAJbbdjXbvWEt2Pn9ezPrjr4CwoLwmqsLVbsiEXD19cmmt4mbzOCOCdHzOQiUivUOJLra7w==", +> "license": "MIT" +25595c25903,25904 +< "integrity": "sha1-Wi5H/mmVPx7mMafrof5k0tBlWPU=", +--- +> "integrity": "sha512-sTebg2a1PoicYEZXD5PBdQcTlIJ6hUslrlWr7iV0O7n+i4596s2NQ9I5CaZ5FbXSfya/9WQsrYLANUJv9paYVA==", +> "license": "MIT", +25613c25922,25923 +< "integrity": "sha1-SN6kbfj/djKxDXBrissmWR4rNyc=", +--- +> "integrity": "sha512-ZpJhwvUXHSNL5wYd1RM6CUa2ZuqorG9ngoJ9Ix5Cce+uX7I5O/E06FCJdhSZ33b5dVyeQDnIlWH7B2s5uByZ7g==", +> "license": "MIT", +25634c25944,25945 +< "integrity": "sha1-T7VPgWZS5a4Q6PcvcXo4jHMmU4o=" +--- +> "integrity": "sha512-uNcV98/blRhInPUGQEnj9ekXXfG+q+rfoNSFZgl/eBfog9yBDW9gfUv2AHX/rAF7zZRlzWhbslGhbGQFZlCkZA==", +> "license": "MIT" +25650c25961,25962 +< "integrity": "sha1-Mg4LZwMWc8KNeiu12eAzGkUkBRU=", +--- +> "integrity": "sha512-6SwqWwGFHhTXEiqB/yQgu8FYd//tm786d49y7kizHVCJH7zdzs191UQn3ES3tkkDbUddNRfkCRYqJFHtbLnbCw==", +> "license": "MIT", +25997a26310,26318 +> "node_modules/math-intrinsics": { +> "version": "1.1.0", +> "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", +> "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", +> "license": "MIT", +> "engines": { +> "node": ">= 0.4" +> } +> }, +26577a26899,26910 +> "node_modules/mime": { +> "version": "1.6.0", +> "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", +> "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", +> "license": "MIT", +> "bin": { +> "mime": "cli.js" +> }, +> "engines": { +> "node": ">=4" +> } +> }, +26579,26581c26912,26915 +< "version": "1.51.0", +< "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.51.0.tgz", +< "integrity": "sha512-5y8A56jg7XVQx2mbv1lu49NR4dokRnhZYTtL+KGfaa27uq4pSTXkwQkFJl4pkRMyNFz/EtYDSkiiEHx3F7UN6g==", +--- +> "version": "1.52.0", +> "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", +> "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", +> "license": "MIT", +26597,26599c26931,26934 +< "version": "2.1.34", +< "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.34.tgz", +< "integrity": "sha512-6cP692WwGIs9XXdOO4++N+7qjqv0rqxxVvJ3VHPh/Sc9mVZcQP+ZGhkKiTvWMQRr2tbHkJP/Yn7Y0npb3ZBs4A==", +--- +> "version": "2.1.35", +> "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", +> "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", +> "license": "MIT", +26601c26936 +< "mime-db": "1.51.0" +--- +> "mime-db": "1.52.0" +26792c27127,27128 +< "integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==" +--- +> "integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==", +> "license": "MIT" +27248,27250c27584,27586 +< "version": "2.0.0", +< "resolved": "https://registry.npmjs.org/multer/-/multer-2.0.0.tgz", +< "integrity": "sha512-bS8rPZurbAuHGAnApbM9d4h1wSoYqrOqkE+6a64KLMK9yWU7gJXBDDVklKQ3TPi9DRb85cRs6yXaC0+cjxRtRg==", +--- +> "version": "2.0.1", +> "resolved": "https://registry.npmjs.org/multer/-/multer-2.0.1.tgz", +> "integrity": "sha512-Ug8bXeTIUlxurg8xLTEskKShvcKDZALo1THEX5E41pYCD2sCVub5/kIRIGqWNoqV6szyLyQKV6mD4QUrWE5GCQ==", +27254,27256c27590,27592 +< "busboy": "^1.0.0", +< "concat-stream": "^1.5.2", +< "mkdirp": "^0.5.4", +--- +> "busboy": "^1.6.0", +> "concat-stream": "^2.0.0", +> "mkdirp": "^0.5.6", +27258,27259c27594,27595 +< "type-is": "^1.6.4", +< "xtend": "^4.0.0" +--- +> "type-is": "^1.6.18", +> "xtend": "^4.0.2" +27384c27720,27721 +< "integrity": "sha1-IKMYwwy0X3H+et+/eyHJnBRy7xE=" +--- +> "integrity": "sha512-zkVhZUA3y8mbz652WrL5x0fB0ehrBkulWT3TomAQ9iDtyXZvzKeEA6GPxAItBYeNYl5yngKRX612qHOhvMkDeg==", +> "license": "MIT" +27842,27844c28179,28185 +< "version": "1.13.1", +< "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.1.tgz", +< "integrity": "sha512-5qoj1RUiKOMsCCNLV1CBiPYE10sziTsnmNxkAI/rZhiD63CF7IqdFGC/XzjWjpSgLf0LxXX3bDFIh0E18f6UhQ==", +--- +> "version": "1.13.4", +> "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz", +> "integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==", +> "license": "MIT", +> "engines": { +> "node": ">= 0.4" +> }, +27886,27888c28227,28229 +< "version": "4.1.5", +< "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.5.tgz", +< "integrity": "sha512-byy+U7gp+FVwmyzKPYhW2h5l3crpmGsxl7X2s8y43IgxvG4g3QZ6CffDtsNQy1WsmZpQbO+ybo0AlW7TY6DcBQ==", +--- +> "version": "4.1.7", +> "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.7.tgz", +> "integrity": "sha512-nK28WOo+QIjBkDduTINE4JkF/UJJKyf2EJxvJKfblDpyg0Q+pkOHNTL0Qwy6NP6FhE/EnzV73BxxqcJaXY9anw==", +27891c28232,28233 +< "call-bind": "^1.0.5", +--- +> "call-bind": "^1.0.8", +> "call-bound": "^1.0.3", +27893c28235,28236 +< "has-symbols": "^1.0.3", +--- +> "es-object-atoms": "^1.0.0", +> "has-symbols": "^1.1.0", +28129a28473,28489 +> "node_modules/own-keys": { +> "version": "1.0.1", +> "resolved": "https://registry.npmjs.org/own-keys/-/own-keys-1.0.1.tgz", +> "integrity": "sha512-qFOyK5PjiWZd+QQIh+1jhdb9LpxTF0qs7Pm8o5QHYZ0M3vKqSqzsZaEB6oWlxZ+q2sJBMI/Ktgd2N5ZwQoRHfg==", +> "license": "MIT", +> "dependencies": { +> "get-intrinsic": "^1.2.6", +> "object-keys": "^1.1.1", +> "safe-push-apply": "^1.0.0" +> }, +> "engines": { +> "node": ">= 0.4" +> }, +> "funding": { +> "url": "https://github.com/sponsors/ljharb" +> } +> }, +28684,28686c29044,29047 +< "version": "1.0.10", +< "resolved": "https://registry.npmjs.org/path-loader/-/path-loader-1.0.10.tgz", +< "integrity": "sha512-CMP0v6S6z8PHeJ6NFVyVJm6WyJjIwFvyz2b0n2/4bKdS/0uZa/9sKUlYZzubrn3zuDRU0zIuEDX9DZYQ2ZI8TA==", +--- +> "version": "1.0.12", +> "resolved": "https://registry.npmjs.org/path-loader/-/path-loader-1.0.12.tgz", +> "integrity": "sha512-n7oDG8B+k/p818uweWrOixY9/Dsr89o2TkCm6tOTex3fpdo2+BFDgR+KpB37mGKBRsBAlR8CIJMFN0OEy/7hIQ==", +> "license": "MIT", +28689c29050,29129 +< "superagent": "^3.8.3" +--- +> "superagent": "^7.1.6" +> } +> }, +> "node_modules/path-loader/node_modules/debug": { +> "version": "4.4.1", +> "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz", +> "integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==", +> "license": "MIT", +> "dependencies": { +> "ms": "^2.1.3" +> }, +> "engines": { +> "node": ">=6.0" +> }, +> "peerDependenciesMeta": { +> "supports-color": { +> "optional": true +> } +> } +> }, +> "node_modules/path-loader/node_modules/formidable": { +> "version": "2.1.5", +> "resolved": "https://registry.npmjs.org/formidable/-/formidable-2.1.5.tgz", +> "integrity": "sha512-Oz5Hwvwak/DCaXVVUtPn4oLMLLy1CdclLKO1LFgU7XzDpVMUU5UjlSLpGMocyQNNk8F6IJW9M/YdooSn2MRI+Q==", +> "license": "MIT", +> "dependencies": { +> "@paralleldrive/cuid2": "^2.2.2", +> "dezalgo": "^1.0.4", +> "once": "^1.4.0", +> "qs": "^6.11.0" +> }, +> "funding": { +> "url": "https://ko-fi.com/tunnckoCore/commissions" +> } +> }, +> "node_modules/path-loader/node_modules/mime": { +> "version": "2.6.0", +> "resolved": "https://registry.npmjs.org/mime/-/mime-2.6.0.tgz", +> "integrity": "sha512-USPkMeET31rOMiarsBNIHZKLGgvKc/LrjofAnBlOttf5ajRvqiRA8QsenbcooctK6d6Ts6aqZXBA+XbkKthiQg==", +> "license": "MIT", +> "bin": { +> "mime": "cli.js" +> }, +> "engines": { +> "node": ">=4.0.0" +> } +> }, +> "node_modules/path-loader/node_modules/semver": { +> "version": "7.7.2", +> "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", +> "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", +> "license": "ISC", +> "bin": { +> "semver": "bin/semver.js" +> }, +> "engines": { +> "node": ">=10" +> } +> }, +> "node_modules/path-loader/node_modules/superagent": { +> "version": "7.1.6", +> "resolved": "https://registry.npmjs.org/superagent/-/superagent-7.1.6.tgz", +> "integrity": "sha512-gZkVCQR1gy/oUXr+kxJMLDjla434KmSOKbx5iGD30Ql+AkJQ/YlPKECJy2nhqOsHLjGHzoDTXNSjhnvWhzKk7g==", +> "deprecated": "Please upgrade to v9.0.0+ as we have fixed a public vulnerability with formidable dependency. Note that v9.0.0+ requires Node.js v14.18.0+. See https://github.com/ladjs/superagent/pull/1800 for insight. This project is supported and maintained by the team at Forward Email @ https://forwardemail.net", +> "license": "MIT", +> "dependencies": { +> "component-emitter": "^1.3.0", +> "cookiejar": "^2.1.3", +> "debug": "^4.3.4", +> "fast-safe-stringify": "^2.1.1", +> "form-data": "^4.0.0", +> "formidable": "^2.0.1", +> "methods": "^1.1.2", +> "mime": "2.6.0", +> "qs": "^6.10.3", +> "readable-stream": "^3.6.0", +> "semver": "^7.3.7" +> }, +> "engines": { +> "node": ">=6.4.0 <13 || >=14" +31146a31587,31608 +> "node_modules/reflect.getprototypeof": { +> "version": "1.0.10", +> "resolved": "https://registry.npmjs.org/reflect.getprototypeof/-/reflect.getprototypeof-1.0.10.tgz", +> "integrity": "sha512-00o4I+DVrefhv+nX0ulyi3biSHCPDe+yLv5o/p6d/UVlirijB8E16FtfwSAi4g3tcqrQ4lRAqQSoFEZJehYEcw==", +> "license": "MIT", +> "dependencies": { +> "call-bind": "^1.0.8", +> "define-properties": "^1.2.1", +> "es-abstract": "^1.23.9", +> "es-errors": "^1.3.0", +> "es-object-atoms": "^1.0.0", +> "get-intrinsic": "^1.2.7", +> "get-proto": "^1.0.1", +> "which-builtin-type": "^1.2.1" +> }, +> "engines": { +> "node": ">= 0.4" +> }, +> "funding": { +> "url": "https://github.com/sponsors/ljharb" +> } +> }, +31211,31213c31673,31675 +< "version": "1.5.2", +< "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.2.tgz", +< "integrity": "sha512-NcDiDkTLuPR+++OCKB0nWafEmhg/Da8aUPLPMQbK+bxKKCm1/S5he+AqYa4PlMCVBalb4/yxIRub6qkEx5yJbw==", +--- +> "version": "1.5.4", +> "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.4.tgz", +> "integrity": "sha512-dYqgNSZbDwkaJ2ceRd9ojCGjBq+mOm9LmtXnAnEGyHhN/5R7iDW2TRw3h+o/jCFxus3P2LfWIIiwowAjANm7IA==", +31216c31678 +< "call-bind": "^1.0.6", +--- +> "call-bind": "^1.0.8", +31219c31681,31683 +< "set-function-name": "^2.0.1" +--- +> "get-proto": "^1.0.1", +> "gopd": "^1.2.0", +> "set-function-name": "^2.0.2" +31679,31681c32143,32145 +< "version": "1.1.2", +< "resolved": "https://registry.npmjs.org/safe-array-concat/-/safe-array-concat-1.1.2.tgz", +< "integrity": "sha512-vj6RsCsWBCf19jIeHEfkRMw8DPiBb+DMXklQ/1SGDHOMlHdPUkZXFQ2YdplS23zESTijAcurb1aSgJA3AgMu1Q==", +--- +> "version": "1.1.3", +> "resolved": "https://registry.npmjs.org/safe-array-concat/-/safe-array-concat-1.1.3.tgz", +> "integrity": "sha512-AURm5f0jYEOydBj7VQlVvDrjeFgthDdEF5H1dP+6mNpoXOMo1quQqJ4wvJDyRZ9+pO3kGWoOdmV08cSv2aJV6Q==", +31684,31686c32148,32151 +< "call-bind": "^1.0.7", +< "get-intrinsic": "^1.2.4", +< "has-symbols": "^1.0.3", +--- +> "call-bind": "^1.0.8", +> "call-bound": "^1.0.2", +> "get-intrinsic": "^1.2.6", +> "has-symbols": "^1.1.0", +31706a32172,32187 +> "node_modules/safe-push-apply": { +> "version": "1.0.0", +> "resolved": "https://registry.npmjs.org/safe-push-apply/-/safe-push-apply-1.0.0.tgz", +> "integrity": "sha512-iKE9w/Z7xCzUMIZqdBsp6pEQvwuEebH4vdpjcDWnyzaI6yl6O9FHvVpmGelvEHNsoY6wGblkxR6Zty/h00WiSA==", +> "license": "MIT", +> "dependencies": { +> "es-errors": "^1.3.0", +> "isarray": "^2.0.5" +> }, +> "engines": { +> "node": ">= 0.4" +> }, +> "funding": { +> "url": "https://github.com/sponsors/ljharb" +> } +> }, +31717,31719c32198,32200 +< "version": "1.0.3", +< "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.0.3.tgz", +< "integrity": "sha512-CdASjNJPvRa7roO6Ra/gLYBTzYzzPyyBXxIMdGW3USQLyjWEls2RgW5UBTXaQVp+OrpeCK3bLem8smtmheoRuw==", +--- +> "version": "1.1.0", +> "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.1.0.tgz", +> "integrity": "sha512-x/+Cz4YrimQxQccJf5mKEbIa1NzeCRNI5Ecl/ekmlYaampdNLPalVyIcCZNNH3MvmqBugV5TMYZXv0ljslUlaw==", +31722c32203 +< "call-bind": "^1.0.6", +--- +> "call-bound": "^1.0.2", +31724c32205 +< "is-regex": "^1.1.4" +--- +> "is-regex": "^1.2.1" +32123,32133d32603 +< "node_modules/send/node_modules/mime": { +< "version": "1.6.0", +< "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", +< "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", +< "bin": { +< "mime": "cli.js" +< }, +< "engines": { +< "node": ">=4" +< } +< }, +32288,32290c32758,32761 +< "version": "2.0.1", +< "resolved": "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.1.tgz", +< "integrity": "sha512-tMNCiqYVkXIZgc2Hnoy2IvC/f8ezc5koaRFkCjrpWzGpCd3qbZXPzVy9MAZzK1ch/X0jvSkojys3oqJN0qCmdA==", +--- +> "version": "2.0.2", +> "resolved": "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.2.tgz", +> "integrity": "sha512-7PGFlmtwsEADb0WYyvCMa1t+yke6daIG4Wirafur5kcf+MhUnPms1UeR0CKQdTZD81yESwMHbtn+TR+dMviakQ==", +> "license": "MIT", +32292c32763,32764 +< "define-data-property": "^1.0.1", +--- +> "define-data-property": "^1.1.4", +> "es-errors": "^1.3.0", +32294c32766,32780 +< "has-property-descriptors": "^1.0.0" +--- +> "has-property-descriptors": "^1.0.2" +> }, +> "engines": { +> "node": ">= 0.4" +> } +> }, +> "node_modules/set-proto": { +> "version": "1.0.0", +> "resolved": "https://registry.npmjs.org/set-proto/-/set-proto-1.0.0.tgz", +> "integrity": "sha512-RJRdvCo6IAnPdsvP/7m6bsQqNnn1FCBX5ZNtFL98MmFF/4xAIJTIg1YbHW5DC2W5SKZanrC6i4HsJqlajw/dZw==", +> "license": "MIT", +> "dependencies": { +> "dunder-proto": "^1.0.1", +> "es-errors": "^1.3.0", +> "es-object-atoms": "^1.0.0" +32391,32393c32877,32880 +< "version": "1.0.6", +< "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.6.tgz", +< "integrity": "sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==", +--- +> "version": "1.1.0", +> "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz", +> "integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==", +> "license": "MIT", +32395d32881 +< "call-bind": "^1.0.7", +32397,32398c32883,32939 +< "get-intrinsic": "^1.2.4", +< "object-inspect": "^1.13.1" +--- +> "object-inspect": "^1.13.3", +> "side-channel-list": "^1.0.0", +> "side-channel-map": "^1.0.1", +> "side-channel-weakmap": "^1.0.2" +> }, +> "engines": { +> "node": ">= 0.4" +> }, +> "funding": { +> "url": "https://github.com/sponsors/ljharb" +> } +> }, +> "node_modules/side-channel-list": { +> "version": "1.0.0", +> "resolved": "https://registry.npmjs.org/side-channel-list/-/side-channel-list-1.0.0.tgz", +> "integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==", +> "license": "MIT", +> "dependencies": { +> "es-errors": "^1.3.0", +> "object-inspect": "^1.13.3" +> }, +> "engines": { +> "node": ">= 0.4" +> }, +> "funding": { +> "url": "https://github.com/sponsors/ljharb" +> } +> }, +> "node_modules/side-channel-map": { +> "version": "1.0.1", +> "resolved": "https://registry.npmjs.org/side-channel-map/-/side-channel-map-1.0.1.tgz", +> "integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==", +> "license": "MIT", +> "dependencies": { +> "call-bound": "^1.0.2", +> "es-errors": "^1.3.0", +> "get-intrinsic": "^1.2.5", +> "object-inspect": "^1.13.3" +> }, +> "engines": { +> "node": ">= 0.4" +> }, +> "funding": { +> "url": "https://github.com/sponsors/ljharb" +> } +> }, +> "node_modules/side-channel-weakmap": { +> "version": "1.0.2", +> "resolved": "https://registry.npmjs.org/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz", +> "integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==", +> "license": "MIT", +> "dependencies": { +> "call-bound": "^1.0.2", +> "es-errors": "^1.3.0", +> "get-intrinsic": "^1.2.5", +> "object-inspect": "^1.13.3", +> "side-channel-map": "^1.0.1" +32871c33412,33413 +< "integrity": "sha512-wcFzz9cDfbuqe0FZzfi2or1sgyIrsDwmPwfZC4hiNidPdPINjeUwNfv5kldczoEAcjl9Y1L3SM7Uz2PUEQzxQw==" +--- +> "integrity": "sha512-wcFzz9cDfbuqe0FZzfi2or1sgyIrsDwmPwfZC4hiNidPdPINjeUwNfv5kldczoEAcjl9Y1L3SM7Uz2PUEQzxQw==", +> "license": "(WTFPL OR MIT)" +32960c33502,33503 +< "integrity": "sha1-bIOv82kvphJW4M0ZfgXp3hV2kaY=" +--- +> "integrity": "sha512-Q5thBSxp5t8WPTTJQS59LrGqOZqOsrhDGDVm8azCqIBjSBd7nd9o2PM+mDulQQkh8h//4U6hFZnc/mul8t5pWQ==", +> "license": "ISC" +32994a33538,33554 +> "node_modules/ssh2": { +> "version": "1.16.0", +> "resolved": "https://registry.npmjs.org/ssh2/-/ssh2-1.16.0.tgz", +> "integrity": "sha512-r1X4KsBGedJqo7h8F5c4Ybpcr5RjyP+aWIG007uBPRjmdQWfEiVLzSK71Zji1B9sKxwaCvD8y8cwSkYrlLiRRg==", +> "hasInstallScript": true, +> "dependencies": { +> "asn1": "^0.2.6", +> "bcrypt-pbkdf": "^1.0.2" +> }, +> "engines": { +> "node": ">=10.16.0" +> }, +> "optionalDependencies": { +> "cpu-features": "~0.0.10", +> "nan": "^2.20.0" +> } +> }, +33095,33098c33655,33658 +< "version": "1.0.0", +< "resolved": "https://registry.npmjs.org/stop-iteration-iterator/-/stop-iteration-iterator-1.0.0.tgz", +< "integrity": "sha512-iCGQj+0l0HOdZ2AEeBADlsRC+vsnDsZsbdSiH1yNSjcfKM7fdpCMfqAL/dwF5BLiw/XhRft/Wax6zQbhq2BcjQ==", +< "dev": true, +--- +> "version": "1.1.0", +> "resolved": "https://registry.npmjs.org/stop-iteration-iterator/-/stop-iteration-iterator-1.1.0.tgz", +> "integrity": "sha512-eLoXW/DHyl62zxY4SCaIgnRhuMr6ri4juEYARS8E6sCEqzKpOiE521Ucofdx+KnDZl5xmvGYaaKCk5FEOxJCoQ==", +> "license": "MIT", +33100c33660,33661 +< "internal-slot": "^1.0.4" +--- +> "es-errors": "^1.3.0", +> "internal-slot": "^1.1.0" +33265,33267c33826,33828 +< "version": "1.2.9", +< "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.9.tgz", +< "integrity": "sha512-klHuCNxiMZ8MlsOihJhJEBJAiMVqU3Z2nEXWfWnIqjN0gEFS9J9+IxKozWWtQGcgoa1WUZzLjKPTr4ZHNFTFxw==", +--- +> "version": "1.2.10", +> "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.10.tgz", +> "integrity": "sha512-Rs66F0P/1kedk5lyYyH9uBzuiI/kNRmwJAR9quK6VOtIpZ2G+hMZd+HQbbv25MgCA6gEffoMZYxlTod4WcdrKA==", +33270c33831,33833 +< "call-bind": "^1.0.7", +--- +> "call-bind": "^1.0.8", +> "call-bound": "^1.0.2", +> "define-data-property": "^1.1.4", +33272,33273c33835,33837 +< "es-abstract": "^1.23.0", +< "es-object-atoms": "^1.0.0" +--- +> "es-abstract": "^1.23.5", +> "es-object-atoms": "^1.0.0", +> "has-property-descriptors": "^1.0.2" +33283,33285c33847,33849 +< "version": "1.0.8", +< "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.8.tgz", +< "integrity": "sha512-p73uL5VCHCO2BZZ6krwwQE3kCzM7NKmis8S//xEC6fQonchbum4eP6kR4DLEjQFO3Wnj3Fuo8NM0kOSjVdHjZQ==", +--- +> "version": "1.0.9", +> "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.9.tgz", +> "integrity": "sha512-G7Ok5C6E/j4SGfyLCloXTrngQIQU3PWtXGst3yM7Bea9FRURf1S42ZHlZZtsNque2FN2PoUhfZXYLNWwEr4dLQ==", +33288c33852,33853 +< "call-bind": "^1.0.7", +--- +> "call-bind": "^1.0.8", +> "call-bound": "^1.0.2", +33291a33857,33859 +> "engines": { +> "node": ">= 0.4" +> }, +33781c34349,34350 +< "deprecated": "Please upgrade to v7.0.2+ of superagent. We have fixed numerous issues with streams, form-data, attach(), filesystem errors not bubbling up (ENOENT on attach()), and all tests are now passing. See the releases tab for more information at . Thanks to @shadowgate15, @spence-s, and @niftylettuce. Superagent is sponsored by Forward Email at .", +--- +> "deprecated": "Please upgrade to v9.0.0+ as we have fixed a public vulnerability with formidable dependency. Note that v9.0.0+ requires Node.js v14.18.0+. See https://github.com/ladjs/superagent/pull/1800 for insight. This project is supported and maintained by the team at Forward Email @ https://forwardemail.net", +> "license": "MIT", +33801a34371 +> "license": "MIT", +33807,33809c34377,34380 +< "version": "2.5.1", +< "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.1.tgz", +< "integrity": "sha512-m21N3WOmEEURgk6B9GLOE4RuWOFf28Lhh9qGYeNlGq4VDXUlJy2th2slBNU8Gp8EzloYZOibZJ7t5ecIrFSjVA==", +--- +> "version": "2.5.3", +> "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.3.tgz", +> "integrity": "sha512-XHIrMD0NpDrNM/Ckf7XJiBbLl57KEhT3+i3yY+eWm+cqYZJQTZrKo8Y8AWKnuV5GT4scfuUGt9LzNoIx3dU1nQ==", +> "license": "MIT", +33812,33813c34383,34386 +< "combined-stream": "^1.0.6", +< "mime-types": "^2.1.12" +--- +> "combined-stream": "^1.0.8", +> "es-set-tostringtag": "^2.1.0", +> "mime-types": "^2.1.35", +> "safe-buffer": "^5.2.1" +33818a34392,34411 +> "node_modules/superagent/node_modules/form-data/node_modules/safe-buffer": { +> "version": "5.2.1", +> "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", +> "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", +> "funding": [ +> { +> "type": "github", +> "url": "https://github.com/sponsors/feross" +> }, +> { +> "type": "patreon", +> "url": "https://www.patreon.com/feross" +> }, +> { +> "type": "consulting", +> "url": "https://feross.org/support" +> } +> ], +> "license": "MIT" +> }, +33822,33833c34415,34416 +< "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=" +< }, +< "node_modules/superagent/node_modules/mime": { +< "version": "1.6.0", +< "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", +< "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", +< "bin": { +< "mime": "cli.js" +< }, +< "engines": { +< "node": ">=4" +< } +--- +> "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==", +> "license": "MIT" +33836,33838c34419,34422 +< "version": "2.3.7", +< "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", +< "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", +--- +> "version": "2.3.8", +> "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", +> "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", +> "license": "MIT", +33852a34437 +> "license": "MIT", +34099c34684,34685 +< "integrity": "sha1-oJdRnG8e5N1n4wjZtT3cnCslf5c=", +--- +> "integrity": "sha512-O2hZbWqq8x6j0uZ4qWj5dw45WPoAxKsJLJZqOgTqRtPNi8IqA+rDkDV/48S8qanS3KGv1QcVoPNLivMbyHHdAQ==", +> "license": "MIT", +34150,34155d34735 +< "node_modules/swagger-tools/node_modules/commander": { +< "version": "2.11.0", +< "resolved": "https://registry.npmjs.org/commander/-/commander-2.11.0.tgz", +< "integrity": "sha512-b0553uYA5YAEGgyYIGYROzKQ7X5RAqedkfjiZxwi0kL1g3bOaBNNZfYkzt/CL0umgD5wc9Jec2FbB98CjkMRvQ==", +< "license": "MIT" +< }, +34257,34259c34837,34839 +< "version": "3.0.8", +< "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-3.0.8.tgz", +< "integrity": "sha512-ZoROL70jptorGAlgAYiLoBLItEKw/fUxg9BSYK/dF/GAGYFJOJJJMvjPAKDJraCXFwadD456FCuvLWgfhMsPwg==", +--- +> "version": "3.0.9", +> "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-3.0.9.tgz", +> "integrity": "sha512-XF4w9Xp+ZQgifKakjZYmFdkLoSWd34VGKcsTCwlNWM7QG3ZbaxnTsaBwnjFZqHRf/rROxaR8rXnbtwdvaDI+lA==", +34943,34945c35523,35525 +< "version": "0.6.9", +< "resolved": "https://registry.npmjs.org/traverse/-/traverse-0.6.9.tgz", +< "integrity": "sha512-7bBrcF+/LQzSgFmT0X5YclVqQxtv7TDJ1f8Wj7ibBu/U6BMLeOpUxuZjV7rMc44UtKxlnMFigdhFAIszSX1DMg==", +--- +> "version": "0.6.11", +> "resolved": "https://registry.npmjs.org/traverse/-/traverse-0.6.11.tgz", +> "integrity": "sha512-vxXDZg8/+p3gblxB6BhhG5yWVn1kGRlaL8O78UDXc3wRnPizB5g83dcvWV1jpDMIPnjZjOFuxlMmE82XJ4407w==", +34948,34950c35528,35530 +< "gopd": "^1.0.1", +< "typedarray.prototype.slice": "^1.0.3", +< "which-typed-array": "^1.1.15" +--- +> "gopd": "^1.2.0", +> "typedarray.prototype.slice": "^1.0.5", +> "which-typed-array": "^1.1.18" +35110,35112c35690,35692 +< "version": "1.0.2", +< "resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.2.tgz", +< "integrity": "sha512-gEymJYKZtKXzzBzM4jqa9w6Q1Jjm7x2d+sh19AdsD4wqnMPDYyvwpsIc2Q/835kHuo3BEQ7CjelGhfTsoBb2MQ==", +--- +> "version": "1.0.3", +> "resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.3.tgz", +> "integrity": "sha512-nAYYwfY3qnzX30IkA6AQZjVbtK6duGontcQm1WSG1MD94YLqK0515GNApXkoxKOWMusVssAHWLh9SeaoefYFGw==", +35115c35695 +< "call-bind": "^1.0.7", +--- +> "call-bound": "^1.0.3", +35117c35697 +< "is-typed-array": "^1.1.13" +--- +> "is-typed-array": "^1.1.14" +35124,35126c35704,35706 +< "version": "1.0.1", +< "resolved": "https://registry.npmjs.org/typed-array-byte-length/-/typed-array-byte-length-1.0.1.tgz", +< "integrity": "sha512-3iMJ9q0ao7WE9tWcaYKIptkNBuOIcZCCT0d4MRvuuH88fEoEH62IuQe0OtraD3ebQEoTRk8XCBoknUNc1Y67pw==", +--- +> "version": "1.0.3", +> "resolved": "https://registry.npmjs.org/typed-array-byte-length/-/typed-array-byte-length-1.0.3.tgz", +> "integrity": "sha512-BaXgOuIxz8n8pIq3e7Atg/7s+DpiYrxn4vdot3w9KbnBhcRQq6o3xemQdIfynqSeXeDrF32x+WvfzmOjPiY9lg==", +35129c35709 +< "call-bind": "^1.0.7", +--- +> "call-bind": "^1.0.8", +35131,35133c35711,35713 +< "gopd": "^1.0.1", +< "has-proto": "^1.0.3", +< "is-typed-array": "^1.1.13" +--- +> "gopd": "^1.2.0", +> "has-proto": "^1.2.0", +> "is-typed-array": "^1.1.14" +35143,35145c35723,35725 +< "version": "1.0.2", +< "resolved": "https://registry.npmjs.org/typed-array-byte-offset/-/typed-array-byte-offset-1.0.2.tgz", +< "integrity": "sha512-Ous0vodHa56FviZucS2E63zkgtgrACj7omjwd/8lTEMEPFFyjfixMZ1ZXenpgCFBBt4EC1J2XsyVS2gkG0eTFA==", +--- +> "version": "1.0.4", +> "resolved": "https://registry.npmjs.org/typed-array-byte-offset/-/typed-array-byte-offset-1.0.4.tgz", +> "integrity": "sha512-bTlAFB/FBYMcuX81gbL4OcpH5PmlFHqlCCpAl8AlEzMz5k53oNDvN8p1PNOWLEmI2x4orp3raOFB51tv9X+MFQ==", +35149c35729 +< "call-bind": "^1.0.7", +--- +> "call-bind": "^1.0.8", +35151,35153c35731,35734 +< "gopd": "^1.0.1", +< "has-proto": "^1.0.3", +< "is-typed-array": "^1.1.13" +--- +> "gopd": "^1.2.0", +> "has-proto": "^1.2.0", +> "is-typed-array": "^1.1.15", +> "reflect.getprototypeof": "^1.0.9" +35163,35165c35744,35746 +< "version": "1.0.6", +< "resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.6.tgz", +< "integrity": "sha512-/OxDN6OtAk5KBpGb28T+HZc2M+ADtvRxXrKKbUwtsLgdoxgX13hyy7ek6bFRl5+aBs2yZzB0c4CnQfAtVypW/g==", +--- +> "version": "1.0.7", +> "resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.7.tgz", +> "integrity": "sha512-3KS2b+kL7fsuk/eJZ7EQdnEmQoaho/r6KUef7hxvltNA5DR8NAUM+8wJMbJyZ4G9/7i3v5zPBIMN5aybAh2/Jg==", +35171d35751 +< "has-proto": "^1.0.3", +35173c35753,35754 +< "possible-typed-array-names": "^1.0.0" +--- +> "possible-typed-array-names": "^1.0.0", +> "reflect.getprototypeof": "^1.0.6" +35188,35190c35769,35771 +< "version": "1.0.3", +< "resolved": "https://registry.npmjs.org/typedarray.prototype.slice/-/typedarray.prototype.slice-1.0.3.tgz", +< "integrity": "sha512-8WbVAQAUlENo1q3c3zZYuy5k9VzBQvp8AX9WOtbvyWlLM1v5JaSRmjubLjzHF4JFtptjH/5c/i95yaElvcjC0A==", +--- +> "version": "1.0.5", +> "resolved": "https://registry.npmjs.org/typedarray.prototype.slice/-/typedarray.prototype.slice-1.0.5.tgz", +> "integrity": "sha512-q7QNVDGTdl702bVFiI5eY4l/HkgCM6at9KhcFbgUAzezHFbOVy4+0O/lCjsABEQwbZPravVfBIiBVGo89yzHFg==", +35193c35774 +< "call-bind": "^1.0.7", +--- +> "call-bind": "^1.0.8", +35195c35776 +< "es-abstract": "^1.23.0", +--- +> "es-abstract": "^1.23.9", +35197,35198c35778,35781 +< "typed-array-buffer": "^1.0.2", +< "typed-array-byte-offset": "^1.0.2" +--- +> "get-proto": "^1.0.1", +> "math-intrinsics": "^1.1.0", +> "typed-array-buffer": "^1.0.3", +> "typed-array-byte-offset": "^1.0.4" +35274,35276c35857,35860 +< "version": "1.0.2", +< "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.2.tgz", +< "integrity": "sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw==", +--- +> "version": "1.1.0", +> "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.1.0.tgz", +> "integrity": "sha512-nWJ91DjeOkej/TA8pXQ3myruKpKEYgqvpw9lz4OPHj/NWFNluYrjbz9j01CJ8yKQd2g4jFoOkINCTW2I5LEEyw==", +> "license": "MIT", +35278c35862 +< "call-bind": "^1.0.2", +--- +> "call-bound": "^1.0.3", +35280,35281c35864,35868 +< "has-symbols": "^1.0.3", +< "which-boxed-primitive": "^1.0.2" +--- +> "has-symbols": "^1.1.0", +> "which-boxed-primitive": "^1.1.1" +> }, +> "engines": { +> "node": ">= 0.4" +35709a36297,36305 +> "node_modules/validator": { +> "version": "10.11.0", +> "resolved": "https://registry.npmjs.org/validator/-/validator-10.11.0.tgz", +> "integrity": "sha512-X/p3UZerAIsbBfN/IwahhYaBbY68EN/UQBWHtsbXGT5bfrH/p4NQzUCG1kF/rtKaNpnJ7jAu6NGTdSNtyNIXMw==", +> "license": "MIT", +> "engines": { +> "node": ">= 0.10" +> } +> }, +36697,36699c37293,37296 +< "version": "1.0.2", +< "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz", +< "integrity": "sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==", +--- +> "version": "1.1.1", +> "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.1.1.tgz", +> "integrity": "sha512-TbX3mj8n0odCBFVlY8AxkqcHASw3L60jIuF8jFP78az3C2YhmGvqbHBpAjTRH2/xqYunrJ9g1jSyjCjpoWzIAA==", +> "license": "MIT", +36701,36705c37298,37332 +< "is-bigint": "^1.0.1", +< "is-boolean-object": "^1.1.0", +< "is-number-object": "^1.0.4", +< "is-string": "^1.0.5", +< "is-symbol": "^1.0.3" +--- +> "is-bigint": "^1.1.0", +> "is-boolean-object": "^1.2.1", +> "is-number-object": "^1.1.1", +> "is-string": "^1.1.1", +> "is-symbol": "^1.1.1" +> }, +> "engines": { +> "node": ">= 0.4" +> }, +> "funding": { +> "url": "https://github.com/sponsors/ljharb" +> } +> }, +> "node_modules/which-builtin-type": { +> "version": "1.2.1", +> "resolved": "https://registry.npmjs.org/which-builtin-type/-/which-builtin-type-1.2.1.tgz", +> "integrity": "sha512-6iBczoX+kDQ7a3+YJBnh3T+KZRxM/iYNPXicqk66/Qfm1b93iu+yOImkg0zHbj5LNOcNv1TEADiZ0xa34B4q6Q==", +> "license": "MIT", +> "dependencies": { +> "call-bound": "^1.0.2", +> "function.prototype.name": "^1.1.6", +> "has-tostringtag": "^1.0.2", +> "is-async-function": "^2.0.0", +> "is-date-object": "^1.1.0", +> "is-finalizationregistry": "^1.1.0", +> "is-generator-function": "^1.0.10", +> "is-regex": "^1.2.1", +> "is-weakref": "^1.0.2", +> "isarray": "^2.0.5", +> "which-boxed-primitive": "^1.1.0", +> "which-collection": "^1.0.2", +> "which-typed-array": "^1.1.16" +> }, +> "engines": { +> "node": ">= 0.4" +36712,36715c37339,37342 +< "version": "1.0.1", +< "resolved": "https://registry.npmjs.org/which-collection/-/which-collection-1.0.1.tgz", +< "integrity": "sha512-W8xeTUwaln8i3K/cY1nGXzdnVZlidBcagyNFtBdD5kxnb4TvGKR7FfSIS3mYpwWS1QUCutfKz8IY8RjftB0+1A==", +< "dev": true, +--- +> "version": "1.0.2", +> "resolved": "https://registry.npmjs.org/which-collection/-/which-collection-1.0.2.tgz", +> "integrity": "sha512-K4jVyjnBdgvc86Y6BkaLZEN933SwYOuBFkdmBu9ZfkcAbdVbpITnDmjvZ/aQjRXQrv5EPkTnD1s39GiiqbngCw==", +> "license": "MIT", +36717,36720c37344,37350 +< "is-map": "^2.0.1", +< "is-set": "^2.0.1", +< "is-weakmap": "^2.0.1", +< "is-weakset": "^2.0.1" +--- +> "is-map": "^2.0.3", +> "is-set": "^2.0.3", +> "is-weakmap": "^2.0.2", +> "is-weakset": "^2.0.3" +> }, +> "engines": { +> "node": ">= 0.4" +36733,36735c37363,37365 +< "version": "1.1.15", +< "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.15.tgz", +< "integrity": "sha512-oV0jmFtUky6CXfkqehVvBP/LSWJ2sy4vWMioiENyJLePrBO/yKyV9OyJySfAKosh+RYkIl5zJCNZ8/4JncrpdA==", +--- +> "version": "1.1.19", +> "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.19.tgz", +> "integrity": "sha512-rEvr90Bck4WZt9HHFC4DJMsjvu7x+r6bImz0/BrbWb7A2djJ8hnZMrWnHo9F8ssv0OMErasDhftrfROTyqSDrw==", +36739,36741c37369,37373 +< "call-bind": "^1.0.7", +< "for-each": "^0.3.3", +< "gopd": "^1.0.1", +--- +> "call-bind": "^1.0.8", +> "call-bound": "^1.0.4", +> "for-each": "^0.3.5", +> "get-proto": "^1.0.1", +> "gopd": "^1.2.0", +37217a37850 +> "license": "MIT", +37231,37236d37863 +< "node_modules/z-schema/node_modules/commander": { +< "version": "2.20.3", +< "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", +< "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==", +< "optional": true +< }, +37241,37250c37868,37870 +< "deprecated": "core-js@<3.4 is no longer maintained and not recommended for usage due to the number of issues. Because of the V8 engine whims, feature detection in old core-js versions could cause a slowdown up to 100x even if nothing is polyfilled. Please, upgrade your dependencies to the actual version of core-js.", +< "hasInstallScript": true +< }, +< "node_modules/z-schema/node_modules/validator": { +< "version": "10.11.0", +< "resolved": "https://registry.npmjs.org/validator/-/validator-10.11.0.tgz", +< "integrity": "sha512-X/p3UZerAIsbBfN/IwahhYaBbY68EN/UQBWHtsbXGT5bfrH/p4NQzUCG1kF/rtKaNpnJ7jAu6NGTdSNtyNIXMw==", +< "engines": { +< "node": ">= 0.10" +< } +--- +> "deprecated": "core-js@<3.23.3 is no longer maintained and not recommended for usage due to the number of issues. Because of the V8 engine whims, feature detection in old core-js versions could cause a slowdown up to 100x even if nothing is polyfilled. Some versions have web compatibility issues. Please, upgrade your dependencies to the actual version of core-js.", +> "hasInstallScript": true, +> "license": "MIT" +37358c37978 +< "dockerode": "^4.0.5", +--- +> "dockerode": "^4.0.7", +37364c37984 +< "tar-fs": "^3.0.4", +--- +> "tar-fs": "^3.0.9", +37425,37451d38044 +< "services/clsi/node_modules/@grpc/grpc-js": { +< "version": "1.13.2", +< "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.13.2.tgz", +< "integrity": "sha512-nnR5nmL6lxF8YBqb6gWvEgLdLh/Fn+kvAdX5hUOnt48sNSb0riz/93ASd2E5gvanPA41X6Yp25bIfGRp1SMb2g==", +< "license": "Apache-2.0", +< "dependencies": { +< "@grpc/proto-loader": "^0.7.13", +< "@js-sdsl/ordered-map": "^4.4.2" +< }, +< "engines": { +< "node": ">=12.10.0" +< } +< }, +< "services/clsi/node_modules/cpu-features": { +< "version": "0.0.10", +< "resolved": "https://registry.npmjs.org/cpu-features/-/cpu-features-0.0.10.tgz", +< "integrity": "sha512-9IkYqtX3YHPCzoVg1Py+o9057a3i0fp7S530UWokCSaFVTc7CwXPRiOjRjBQQ18ZCNafx78YfnG+HALxtVmOGA==", +< "hasInstallScript": true, +< "optional": true, +< "dependencies": { +< "buildcheck": "~0.0.6", +< "nan": "^2.19.0" +< }, +< "engines": { +< "node": ">=10.0.0" +< } +< }, +37461,37529d38053 +< "services/clsi/node_modules/docker-modem": { +< "version": "5.0.6", +< "resolved": "https://registry.npmjs.org/docker-modem/-/docker-modem-5.0.6.tgz", +< "integrity": "sha512-ens7BiayssQz/uAxGzH8zGXCtiV24rRWXdjNha5V4zSOcxmAZsfGVm/PPFbwQdqEkDnhG+SyR9E3zSHUbOKXBQ==", +< "license": "Apache-2.0", +< "dependencies": { +< "debug": "^4.1.1", +< "readable-stream": "^3.5.0", +< "split-ca": "^1.0.1", +< "ssh2": "^1.15.0" +< }, +< "engines": { +< "node": ">= 8.0" +< } +< }, +< "services/clsi/node_modules/dockerode": { +< "version": "4.0.5", +< "resolved": "https://registry.npmjs.org/dockerode/-/dockerode-4.0.5.tgz", +< "integrity": "sha512-ZPmKSr1k1571Mrh7oIBS/j0AqAccoecY2yH420ni5j1KyNMgnoTh4Nu4FWunh0HZIJmRSmSysJjBIpa/zyWUEA==", +< "license": "Apache-2.0", +< "dependencies": { +< "@balena/dockerignore": "^1.0.2", +< "@grpc/grpc-js": "^1.11.1", +< "@grpc/proto-loader": "^0.7.13", +< "docker-modem": "^5.0.6", +< "protobufjs": "^7.3.2", +< "tar-fs": "~2.1.2", +< "uuid": "^10.0.0" +< }, +< "engines": { +< "node": ">= 8.0" +< } +< }, +< "services/clsi/node_modules/dockerode/node_modules/tar-fs": { +< "version": "2.1.2", +< "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.2.tgz", +< "integrity": "sha512-EsaAXwxmx8UB7FRKqeozqEPop69DXcmYwTQwXvyAPF352HJsPdkVhvTaDPYqfNgruveJIJy3TA2l+2zj8LJIJA==", +< "license": "MIT", +< "dependencies": { +< "chownr": "^1.1.1", +< "mkdirp-classic": "^0.5.2", +< "pump": "^3.0.0", +< "tar-stream": "^2.1.4" +< } +< }, +< "services/clsi/node_modules/protobufjs": { +< "version": "7.4.0", +< "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-7.4.0.tgz", +< "integrity": "sha512-mRUWCc3KUU4w1jU8sGxICXH/gNS94DvI1gxqDvBzhj1JpcsimQkYiOJfwsPUykUI5ZaspFbSgmBLER8IrQ3tqw==", +< "hasInstallScript": true, +< "license": "BSD-3-Clause", +< "dependencies": { +< "@protobufjs/aspromise": "^1.1.2", +< "@protobufjs/base64": "^1.1.2", +< "@protobufjs/codegen": "^2.0.4", +< "@protobufjs/eventemitter": "^1.1.0", +< "@protobufjs/fetch": "^1.1.0", +< "@protobufjs/float": "^1.0.2", +< "@protobufjs/inquire": "^1.1.0", +< "@protobufjs/path": "^1.1.2", +< "@protobufjs/pool": "^1.1.0", +< "@protobufjs/utf8": "^1.1.0", +< "@types/node": ">=13.7.0", +< "long": "^5.0.0" +< }, +< "engines": { +< "node": ">=12.0.0" +< } +< }, +37549,37565d38072 +< "services/clsi/node_modules/ssh2": { +< "version": "1.16.0", +< "resolved": "https://registry.npmjs.org/ssh2/-/ssh2-1.16.0.tgz", +< "integrity": "sha512-r1X4KsBGedJqo7h8F5c4Ybpcr5RjyP+aWIG007uBPRjmdQWfEiVLzSK71Zji1B9sKxwaCvD8y8cwSkYrlLiRRg==", +< "hasInstallScript": true, +< "dependencies": { +< "asn1": "^0.2.6", +< "bcrypt-pbkdf": "^1.0.2" +< }, +< "engines": { +< "node": ">=10.16.0" +< }, +< "optionalDependencies": { +< "cpu-features": "~0.0.10", +< "nan": "^2.20.0" +< } +< }, +37578,37590d38084 +< "services/clsi/node_modules/uuid": { +< "version": "10.0.0", +< "resolved": "https://registry.npmjs.org/uuid/-/uuid-10.0.0.tgz", +< "integrity": "sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ==", +< "funding": [ +< "https://github.com/sponsors/broofa", +< "https://github.com/sponsors/ctavan" +< ], +< "license": "MIT", +< "bin": { +< "uuid": "dist/bin/uuid" +< } +< }, +38683c39177 +< "multer": "overleaf/multer#199c5ff05bd375c508f4074498237baead7f5148", +--- +> "multer": "github:overleaf/multer#4dbceda355efc3fc8ac3cf5c66c3778c8a6fdb23", +39602,39604c40096,40098 +< "version": "2.0.0", +< "resolved": "git+ssh://git@github.com/overleaf/multer.git#199c5ff05bd375c508f4074498237baead7f5148", +< "integrity": "sha512-S5MlIoOgrDr+a2jLS8z7jQlbzvZ0m30U2tRwdyLrxhnnMUQZYEzkVysEv10Dw41RTpM5bQQDs563Vzl1LLhxhQ==", +--- +> "version": "2.0.1", +> "resolved": "git+ssh://git@github.com/overleaf/multer.git#4dbceda355efc3fc8ac3cf5c66c3778c8a6fdb23", +> "integrity": "sha512-kkvPK48OQibR5vIoTQBbZp1uWVCvT9MrW3Y0mqdhFYJP/HVJujb4eSCEU0yj+hyf0Y+H/BKCmPdM4fJnzqAO4w==", +39608,39610c40102,40104 +< "busboy": "^1.0.0", +< "concat-stream": "^1.5.2", +< "mkdirp": "^0.5.4", +--- +> "busboy": "^1.6.0", +> "concat-stream": "^2.0.0", +> "mkdirp": "^0.5.6", +39612,39613c40106,40107 +< "type-is": "^1.6.4", +< "xtend": "^4.0.0" +--- +> "type-is": "^1.6.18", +> "xtend": "^4.0.2" diff --git a/server-ce/hotfix/5.5.1/pr_25168.patch b/server-ce/hotfix/5.5.1/pr_25168.patch new file mode 100644 index 0000000000..5d496d1f67 --- /dev/null +++ b/server-ce/hotfix/5.5.1/pr_25168.patch @@ -0,0 +1,19 @@ +--- a/services/history-v1/config/custom-environment-variables.json ++++ b/services/history-v1/config/custom-environment-variables.json +@@ -50,12 +50,14 @@ + "history": { + "host": "OVERLEAF_REDIS_HOST", + "password": "OVERLEAF_REDIS_PASS", +- "port": "OVERLEAF_REDIS_PORT" ++ "port": "OVERLEAF_REDIS_PORT", ++ "tls": "OVERLEAF_REDIS_TLS" + }, + "lock": { + "host": "OVERLEAF_REDIS_HOST", + "password": "OVERLEAF_REDIS_PASS", +- "port": "OVERLEAF_REDIS_PORT" ++ "port": "OVERLEAF_REDIS_PORT", ++ "tls": "OVERLEAF_REDIS_TLS" + } + } + } diff --git a/server-ce/hotfix/5.5.1/pr_26086.patch b/server-ce/hotfix/5.5.1/pr_26086.patch new file mode 100644 index 0000000000..fec417b3a5 --- /dev/null +++ b/server-ce/hotfix/5.5.1/pr_26086.patch @@ -0,0 +1,200 @@ +--- a/services/history-v1/api/controllers/project_import.js ++++ b/services/history-v1/api/controllers/project_import.js +@@ -35,6 +35,7 @@ async function importSnapshot(req, res) { + try { + snapshot = Snapshot.fromRaw(rawSnapshot) + } catch (err) { ++ logger.warn({ err, projectId }, 'failed to import snapshot') + return render.unprocessableEntity(res) + } + +@@ -43,6 +44,7 @@ async function importSnapshot(req, res) { + historyId = await chunkStore.initializeProject(projectId, snapshot) + } catch (err) { + if (err instanceof chunkStore.AlreadyInitialized) { ++ logger.warn({ err, projectId }, 'already initialized') + return render.conflict(res) + } else { + throw err +--- a/services/history-v1/api/controllers/projects.js ++++ b/services/history-v1/api/controllers/projects.js +@@ -34,6 +34,7 @@ async function initializeProject(req, res, next) { + res.status(HTTPStatus.OK).json({ projectId }) + } catch (err) { + if (err instanceof chunkStore.AlreadyInitialized) { ++ logger.warn({ err, projectId }, 'failed to initialize') + render.conflict(res) + } else { + throw err +@@ -242,11 +243,15 @@ async function createProjectBlob(req, res, next) { + const sizeLimit = new StreamSizeLimit(maxUploadSize) + await pipeline(req, sizeLimit, fs.createWriteStream(tmpPath)) + if (sizeLimit.sizeLimitExceeded) { ++ logger.warn( ++ { projectId, expectedHash, maxUploadSize }, ++ 'blob exceeds size threshold' ++ ) + return render.requestEntityTooLarge(res) + } + const hash = await blobHash.fromFile(tmpPath) + if (hash !== expectedHash) { +- logger.debug({ hash, expectedHash }, 'Hash mismatch') ++ logger.warn({ projectId, hash, expectedHash }, 'Hash mismatch') + return render.conflict(res, 'File hash mismatch') + } + +@@ -343,6 +348,10 @@ async function copyProjectBlob(req, res, next) { + targetBlobStore.getBlob(blobHash), + ]) + if (!sourceBlob) { ++ logger.warn( ++ { sourceProjectId, targetProjectId, blobHash }, ++ 'missing source blob when copying across projects' ++ ) + return render.notFound(res) + } + // Exit early if the blob exists in the target project. +--- a/services/history-v1/app.js ++++ b/services/history-v1/app.js +@@ -100,11 +100,13 @@ function setupErrorHandling() { + }) + } + if (err.code === 'ENUM_MISMATCH') { ++ logger.warn({ err, projectId }, err.message) + return res.status(HTTPStatus.UNPROCESSABLE_ENTITY).json({ + message: 'invalid enum value: ' + err.paramName, + }) + } + if (err.code === 'REQUIRED') { ++ logger.warn({ err, projectId }, err.message) + return res.status(HTTPStatus.UNPROCESSABLE_ENTITY).json({ + message: err.message, + }) +--- a/services/project-history/app/js/HistoryStoreManager.js ++++ b/services/project-history/app/js/HistoryStoreManager.js +@@ -35,7 +35,10 @@ class StringStream extends stream.Readable { + _mocks.getMostRecentChunk = (projectId, historyId, callback) => { + const path = `projects/${historyId}/latest/history` + logger.debug({ projectId, historyId }, 'getting chunk from history service') +- _requestChunk({ path, json: true }, callback) ++ _requestChunk({ path, json: true }, (err, chunk) => { ++ if (err) return callback(OError.tag(err)) ++ callback(null, chunk) ++ }) + } + + /** +@@ -54,7 +57,10 @@ export function getChunkAtVersion(projectId, historyId, version, callback) { + { projectId, historyId, version }, + 'getting chunk from history service for version' + ) +- _requestChunk({ path, json: true }, callback) ++ _requestChunk({ path, json: true }, (err, chunk) => { ++ if (err) return callback(OError.tag(err)) ++ callback(null, chunk) ++ }) + } + + export function getMostRecentVersion(projectId, historyId, callback) { +@@ -68,8 +74,10 @@ export function getMostRecentVersion(projectId, historyId, callback) { + _.sortBy(chunk.chunk.history.changes || [], x => x.timestamp) + ) + // find the latest project and doc versions in the chunk +- _getLatestProjectVersion(projectId, chunk, (err1, projectVersion) => ++ _getLatestProjectVersion(projectId, chunk, (err1, projectVersion) => { ++ if (err1) err1 = OError.tag(err1) + _getLatestV2DocVersions(projectId, chunk, (err2, v2DocVersions) => { ++ if (err2) err2 = OError.tag(err2) + // return the project and doc versions + const projectStructureAndDocVersions = { + project: projectVersion, +@@ -83,7 +91,7 @@ export function getMostRecentVersion(projectId, historyId, callback) { + chunk + ) + }) +- ) ++ }) + }) + } + +@@ -211,7 +219,10 @@ export function getProjectBlob(historyId, blobHash, callback) { + logger.debug({ historyId, blobHash }, 'getting blob from history service') + _requestHistoryService( + { path: `projects/${historyId}/blobs/${blobHash}` }, +- callback ++ (err, blob) => { ++ if (err) return callback(OError.tag(err)) ++ callback(null, blob) ++ } + ) + } + +@@ -277,7 +288,10 @@ function createBlobFromString(historyId, data, fileId, callback) { + (fsPath, cb) => { + _createBlob(historyId, fsPath, cb) + }, +- callback ++ (err, hash) => { ++ if (err) return callback(OError.tag(err)) ++ callback(null, hash) ++ } + ) + } + +@@ -330,7 +344,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) { + try { + ranges = HistoryBlobTranslator.createRangeBlobDataFromUpdate(update) + } catch (error) { +- return callback(error) ++ return callback(OError.tag(error)) + } + createBlobFromString( + historyId, +@@ -338,7 +352,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) { + `project-${projectId}-doc-${update.doc}`, + (err, fileHash) => { + if (err) { +- return callback(err) ++ return callback(OError.tag(err)) + } + if (ranges) { + createBlobFromString( +@@ -347,7 +361,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) { + `project-${projectId}-doc-${update.doc}-ranges`, + (err, rangesHash) => { + if (err) { +- return callback(err) ++ return callback(OError.tag(err)) + } + logger.debug( + { fileHash, rangesHash }, +@@ -415,7 +429,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) { + }, + (err, fileHash) => { + if (err) { +- return callback(err) ++ return callback(OError.tag(err)) + } + if (update.hash && update.hash !== fileHash) { + logger.warn( +@@ -447,7 +461,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) { + }, + (err, fileHash) => { + if (err) { +- return callback(err) ++ return callback(OError.tag(err)) + } + logger.debug({ fileHash }, 'created empty blob for file') + callback(null, { file: fileHash }) +@@ -520,7 +534,10 @@ export function initializeProject(historyId, callback) { + export function deleteProject(projectId, callback) { + _requestHistoryService( + { method: 'DELETE', path: `projects/${projectId}` }, +- callback ++ err => { ++ if (err) return callback(OError.tag(err)) ++ callback(null) ++ } + ) + } + diff --git a/server-ce/hotfix/5.5.1/pr_26091.patch b/server-ce/hotfix/5.5.1/pr_26091.patch new file mode 100644 index 0000000000..c88618b8d0 --- /dev/null +++ b/server-ce/hotfix/5.5.1/pr_26091.patch @@ -0,0 +1,60 @@ +--- a/services/web/modules/server-ce-scripts/scripts/check-mongodb.mjs ++++ b/services/web/modules/server-ce-scripts/scripts/check-mongodb.mjs +@@ -7,6 +7,7 @@ import { + const { ObjectId } = mongodb + + const MIN_MONGO_VERSION = [6, 0] ++const MIN_MONGO_FEATURE_COMPATIBILITY_VERSION = [6, 0] + + async function main() { + let mongoClient +@@ -18,6 +19,7 @@ async function main() { + } + + await checkMongoVersion(mongoClient) ++ await checkFeatureCompatibilityVersion(mongoClient) + + try { + await testTransactions(mongoClient) +@@ -53,6 +55,41 @@ async function checkMongoVersion(mongoClient) { + } + } + ++async function checkFeatureCompatibilityVersion(mongoClient) { ++ const { ++ featureCompatibilityVersion: { version }, ++ } = await mongoClient ++ .db() ++ .admin() ++ .command({ getParameter: 1, featureCompatibilityVersion: 1 }) ++ const [major, minor] = version.split('.').map(v => parseInt(v)) ++ const [minMajor, minMinor] = MIN_MONGO_FEATURE_COMPATIBILITY_VERSION ++ ++ if (major < minMajor || (major === minMajor && minor < minMinor)) { ++ const minVersion = MIN_MONGO_FEATURE_COMPATIBILITY_VERSION.join('.') ++ console.error(` ++The MongoDB server has featureCompatibilityVersion=${version}, but Overleaf requires at least version ${minVersion}. ++ ++Open a mongo shell: ++- Overleaf Toolkit deployments: $ bin/mongo ++- Legacy docker-compose.yml deployments: $ docker exec -it mongo mongosh localhost/sharelatex ++ ++In the mongo shell: ++> db.adminCommand( { setFeatureCompatibilityVersion: "${minMajor}.${minMinor}" } ) ++ ++Verify the new value: ++> db.adminCommand( { getParameter: 1, featureCompatibilityVersion: 1 } ) ++ ... ++ { ++ featureCompatibilityVersion: { version: ${minMajor}.${minMinor}' }, ++... ++ ++Aborting. ++`) ++ process.exit(1) ++ } ++} ++ + main() + .then(() => { + console.error('Mongodb is up.') diff --git a/server-ce/hotfix/5.5.1/pr_26152.patch b/server-ce/hotfix/5.5.1/pr_26152.patch new file mode 100644 index 0000000000..9dc5d50e28 --- /dev/null +++ b/server-ce/hotfix/5.5.1/pr_26152.patch @@ -0,0 +1,16 @@ +--- a/services/web/modules/server-ce-scripts/scripts/create-user.mjs ++++ b/services/web/modules/server-ce-scripts/scripts/create-user.mjs +@@ -48,3 +48,13 @@ Please visit the following URL to set a password for ${email} and log in: + ) + }) + } ++ ++if (filename === process.argv[1]) { ++ try { ++ await main() ++ process.exit(0) ++ } catch (error) { ++ console.error({ error }) ++ process.exit(1) ++ } ++} diff --git a/server-ce/test/Makefile b/server-ce/test/Makefile index 18f4446902..6c56b7e8fe 100644 --- a/server-ce/test/Makefile +++ b/server-ce/test/Makefile @@ -6,8 +6,8 @@ all: test-e2e # Notable the container labels com.docker.compose.project.working_dir and com.docker.compose.project.config_files need to match when creating containers from the docker host (how you started things) and from host-admin (how tests reconfigure the instance). export PWD = $(shell pwd) -export TEX_LIVE_DOCKER_IMAGE ?= gcr.io/overleaf-ops/texlive-full:2023.1 -export ALL_TEX_LIVE_DOCKER_IMAGES ?= gcr.io/overleaf-ops/texlive-full:2023.1,gcr.io/overleaf-ops/texlive-full:2022.1 +export TEX_LIVE_DOCKER_IMAGE ?= us-east1-docker.pkg.dev/overleaf-ops/ol-docker/texlive-full:2023.1 +export ALL_TEX_LIVE_DOCKER_IMAGES ?= us-east1-docker.pkg.dev/overleaf-ops/ol-docker/texlive-full:2023.1,us-east1-docker.pkg.dev/overleaf-ops/ol-docker/texlive-full:2022.1 export IMAGE_TAG_PRO ?= us-east1-docker.pkg.dev/overleaf-ops/ol-docker/pro:latest export CYPRESS_SHARD ?= export COMPOSE_PROJECT_NAME ?= test @@ -20,6 +20,7 @@ test-e2e-native: npm run cypress:open test-e2e: + docker compose build host-admin docker compose up --no-log-prefix --exit-code-from=e2e e2e test-e2e-open: @@ -45,7 +46,7 @@ prefetch_custom_compose_pull: prefetch_custom: prefetch_custom_texlive prefetch_custom_texlive: echo "$$ALL_TEX_LIVE_DOCKER_IMAGES" | tr ',' '\n' | xargs -I% \ - sh -exc 'tag=%; re_tag=quay.io/sharelatex/$${tag#*/*/}; docker pull $$tag; docker tag $$tag $$re_tag' + sh -exc 'tag=%; re_tag=quay.io/sharelatex/$${tag#*/*/*/}; docker pull $$tag; docker tag $$tag $$re_tag' prefetch_custom: prefetch_old prefetch_old: diff --git a/server-ce/test/admin.spec.ts b/server-ce/test/admin.spec.ts index 9031e21b68..50a89fb855 100644 --- a/server-ce/test/admin.spec.ts +++ b/server-ce/test/admin.spec.ts @@ -179,6 +179,21 @@ describe('admin panel', function () { cy.get('nav').findByText('Manage Users').click() }) + it('displays expected tabs', () => { + const tabs = ['Users', 'License Usage'] + cy.get('[role="tab"]').each((el, index) => { + cy.wrap(el).findByText(tabs[index]).click() + }) + cy.get('[role="tab"]').should('have.length', tabs.length) + }) + + it('license usage tab', () => { + cy.get('a').contains('License Usage').click() + cy.findByText( + 'An active user is one who has opened a project in this Server Pro instance in the last 12 months.' + ) + }) + describe('create users', () => { beforeEach(() => { cy.get('a').contains('New User').click() diff --git a/server-ce/test/docker-compose.yml b/server-ce/test/docker-compose.yml index 43f494a084..f4255e241b 100644 --- a/server-ce/test/docker-compose.yml +++ b/server-ce/test/docker-compose.yml @@ -131,7 +131,7 @@ services: saml: restart: always - image: gcr.io/overleaf-ops/saml-test + image: us-east1-docker.pkg.dev/overleaf-ops/ol-docker/saml-test environment: SAML_TEST_SP_ENTITY_ID: 'sharelatex-test-saml' SAML_BASE_URL_PATH: 'http://saml/simplesaml/' diff --git a/services/chat/docker-compose.ci.yml b/services/chat/docker-compose.ci.yml index 8fd86c1fbb..24b57ab084 100644 --- a/services/chat/docker-compose.ci.yml +++ b/services/chat/docker-compose.ci.yml @@ -24,10 +24,13 @@ services: MOCHA_GREP: ${MOCHA_GREP} NODE_ENV: test NODE_OPTIONS: "--unhandled-rejections=strict" + volumes: + - ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it depends_on: mongo: condition: service_started user: node + entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 -- command: npm run test:acceptance diff --git a/services/chat/docker-compose.yml b/services/chat/docker-compose.yml index 89a48339bd..43a30e8cc7 100644 --- a/services/chat/docker-compose.yml +++ b/services/chat/docker-compose.yml @@ -26,6 +26,7 @@ services: - .:/overleaf/services/chat - ../../node_modules:/overleaf/node_modules - ../../libraries:/overleaf/libraries + - ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it working_dir: /overleaf/services/chat environment: ELASTIC_SEARCH_DSN: es:9200 @@ -39,6 +40,7 @@ services: depends_on: mongo: condition: service_started + entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 -- command: npm run --silent test:acceptance mongo: diff --git a/services/clsi/README.md b/services/clsi/README.md index 16e40b8990..f1cf927d3d 100644 --- a/services/clsi/README.md +++ b/services/clsi/README.md @@ -19,18 +19,18 @@ The CLSI can be configured through the following environment variables: * `ALLOWED_IMAGES` - Space separated list of allowed Docker TeX Live images * `CATCH_ERRORS` - Set to `true` to log uncaught exceptions * `COMPILE_GROUP_DOCKER_CONFIGS` - JSON string of Docker configs for compile groups -* `COMPILES_HOST_DIR` - Working directory for LaTeX compiles -* `OUTPUT_HOST_DIR` - Output directory for LaTeX compiles +* `SANDBOXED_COMPILES` - Set to true to use sibling containers +* `SANDBOXED_COMPILES_HOST_DIR_COMPILES` - Working directory for LaTeX compiles +* `SANDBOXED_COMPILES_HOST_DIR_OUTPUT` - Output directory for LaTeX compiles * `COMPILE_SIZE_LIMIT` - Sets the body-parser [limit](https://github.com/expressjs/body-parser#limit) -* `DOCKER_RUNNER` - Set to true to use sibling containers * `DOCKER_RUNTIME` - * `FILESTORE_DOMAIN_OVERRIDE` - The url for the filestore service e.g.`http://$FILESTORE_HOST:3009` * `FILESTORE_PARALLEL_FILE_DOWNLOADS` - Number of parallel file downloads * `LISTEN_ADDRESS` - The address for the RESTful service to listen on. Set to `0.0.0.0` to listen on all network interfaces * `PROCESS_LIFE_SPAN_LIMIT_MS` - Process life span limit in milliseconds * `SMOKE_TEST` - Whether to run smoke tests -* `TEXLIVE_IMAGE` - The TeX Live Docker image to use for sibling containers, e.g. `gcr.io/overleaf-ops/texlive-full:2017.1` -* `TEX_LIVE_IMAGE_NAME_OVERRIDE` - The name of the registry for the Docker image e.g. `gcr.io/overleaf-ops` +* `TEXLIVE_IMAGE` - The TeX Live Docker image to use for sibling containers, e.g. `us-east1-docker.pkg.dev/overleaf-ops/ol-docker/texlive-full:2017.1` +* `TEX_LIVE_IMAGE_NAME_OVERRIDE` - The name of the registry for the Docker image e.g. `us-east1-docker.pkg.dev/overleaf-ops/ol-docker` * `TEXLIVE_IMAGE_USER` - When using sibling containers, the user to run as in the TeX Live image. Defaults to `tex` * `TEXLIVE_OPENOUT_ANY` - Sets the `openout_any` environment variable for TeX Live (see the `\openout` primitive [documentation](http://tug.org/texinfohtml/web2c.html#tex-invocation)) @@ -63,10 +63,10 @@ Then start the Docker container: docker run --rm \ -p 127.0.0.1:3013:3013 \ -e LISTEN_ADDRESS=0.0.0.0 \ - -e DOCKER_RUNNER=true \ + -e SANDBOXED_COMPILES=true \ -e TEXLIVE_IMAGE=texlive/texlive \ -e TEXLIVE_IMAGE_USER=root \ - -e COMPILES_HOST_DIR="$PWD/compiles" \ + -e SANDBOXED_COMPILES_HOST_DIR_COMPILES="$PWD/compiles" \ -v "$PWD/compiles:/overleaf/services/clsi/compiles" \ -v "$PWD/cache:/overleaf/services/clsi/cache" \ -v /var/run/docker.sock:/var/run/docker.sock \ diff --git a/services/clsi/buildscript.txt b/services/clsi/buildscript.txt index 709ade18c3..58975135d0 100644 --- a/services/clsi/buildscript.txt +++ b/services/clsi/buildscript.txt @@ -2,7 +2,7 @@ clsi --data-dirs=cache,compiles,output --dependencies= --docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker ---env-add=ENABLE_PDF_CACHING="true",PDF_CACHING_ENABLE_WORKER_POOL="true",ALLOWED_IMAGES=quay.io/sharelatex/texlive-full:2017.1,TEXLIVE_IMAGE=quay.io/sharelatex/texlive-full:2017.1,TEX_LIVE_IMAGE_NAME_OVERRIDE=us-east1-docker.pkg.dev/overleaf-ops/ol-docker,TEXLIVE_IMAGE_USER="tex",DOCKER_RUNNER="true",COMPILES_HOST_DIR=$PWD/compiles,OUTPUT_HOST_DIR=$PWD/output +--env-add=ENABLE_PDF_CACHING="true",PDF_CACHING_ENABLE_WORKER_POOL="true",ALLOWED_IMAGES=quay.io/sharelatex/texlive-full:2017.1,TEXLIVE_IMAGE=quay.io/sharelatex/texlive-full:2017.1,TEX_LIVE_IMAGE_NAME_OVERRIDE=us-east1-docker.pkg.dev/overleaf-ops/ol-docker,TEXLIVE_IMAGE_USER="tex",SANDBOXED_COMPILES="true",SANDBOXED_COMPILES_HOST_DIR_COMPILES=$PWD/compiles,SANDBOXED_COMPILES_HOST_DIR_OUTPUT=$PWD/output --env-pass-through= --esmock-loader=False --node-version=22.15.1 diff --git a/services/clsi/docker-compose.ci.yml b/services/clsi/docker-compose.ci.yml index b6643008f7..77a45615b7 100644 --- a/services/clsi/docker-compose.ci.yml +++ b/services/clsi/docker-compose.ci.yml @@ -29,9 +29,9 @@ services: TEXLIVE_IMAGE: quay.io/sharelatex/texlive-full:2017.1 TEX_LIVE_IMAGE_NAME_OVERRIDE: us-east1-docker.pkg.dev/overleaf-ops/ol-docker TEXLIVE_IMAGE_USER: "tex" - DOCKER_RUNNER: "true" - COMPILES_HOST_DIR: $PWD/compiles - OUTPUT_HOST_DIR: $PWD/output + SANDBOXED_COMPILES: "true" + SANDBOXED_COMPILES_HOST_DIR_COMPILES: $PWD/compiles + SANDBOXED_COMPILES_HOST_DIR_OUTPUT: $PWD/output volumes: - ./compiles:/overleaf/services/clsi/compiles - /var/run/docker.sock:/var/run/docker.sock diff --git a/services/clsi/docker-compose.yml b/services/clsi/docker-compose.yml index e0f29ab09d..b8112a8e17 100644 --- a/services/clsi/docker-compose.yml +++ b/services/clsi/docker-compose.yml @@ -47,8 +47,8 @@ services: TEXLIVE_IMAGE: quay.io/sharelatex/texlive-full:2017.1 TEX_LIVE_IMAGE_NAME_OVERRIDE: us-east1-docker.pkg.dev/overleaf-ops/ol-docker TEXLIVE_IMAGE_USER: "tex" - DOCKER_RUNNER: "true" - COMPILES_HOST_DIR: $PWD/compiles - OUTPUT_HOST_DIR: $PWD/output + SANDBOXED_COMPILES: "true" + SANDBOXED_COMPILES_HOST_DIR_COMPILES: $PWD/compiles + SANDBOXED_COMPILES_HOST_DIR_OUTPUT: $PWD/output command: npm run --silent test:acceptance diff --git a/services/clsi/package.json b/services/clsi/package.json index 86566e0f59..b07430391a 100644 --- a/services/clsi/package.json +++ b/services/clsi/package.json @@ -27,13 +27,13 @@ "async": "^3.2.5", "body-parser": "^1.20.3", "bunyan": "^1.8.15", - "dockerode": "^4.0.5", + "dockerode": "^4.0.7", "express": "^4.21.2", "lodash": "^4.17.21", "p-limit": "^3.1.0", "request": "^2.88.2", "send": "^0.19.0", - "tar-fs": "^3.0.4", + "tar-fs": "^3.0.9", "workerpool": "^6.1.5" }, "devDependencies": { diff --git a/services/contacts/docker-compose.ci.yml b/services/contacts/docker-compose.ci.yml index 8fd86c1fbb..24b57ab084 100644 --- a/services/contacts/docker-compose.ci.yml +++ b/services/contacts/docker-compose.ci.yml @@ -24,10 +24,13 @@ services: MOCHA_GREP: ${MOCHA_GREP} NODE_ENV: test NODE_OPTIONS: "--unhandled-rejections=strict" + volumes: + - ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it depends_on: mongo: condition: service_started user: node + entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 -- command: npm run test:acceptance diff --git a/services/contacts/docker-compose.yml b/services/contacts/docker-compose.yml index 65e1a578cd..305232b55d 100644 --- a/services/contacts/docker-compose.yml +++ b/services/contacts/docker-compose.yml @@ -26,6 +26,7 @@ services: - .:/overleaf/services/contacts - ../../node_modules:/overleaf/node_modules - ../../libraries:/overleaf/libraries + - ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it working_dir: /overleaf/services/contacts environment: ELASTIC_SEARCH_DSN: es:9200 @@ -39,6 +40,7 @@ services: depends_on: mongo: condition: service_started + entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 -- command: npm run --silent test:acceptance mongo: diff --git a/services/contacts/package.json b/services/contacts/package.json index f81f947d6a..db707e55c9 100644 --- a/services/contacts/package.json +++ b/services/contacts/package.json @@ -6,9 +6,9 @@ "main": "app.js", "scripts": { "start": "node app.js", - "test:acceptance:_run": "LOG_LEVEL=fatal mocha --loader=esmock --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js", + "test:acceptance:_run": "mocha --loader=esmock --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js", "test:acceptance": "npm run test:acceptance:_run -- --grep=$MOCHA_GREP", - "test:unit:_run": "LOG_LEVEL=fatal mocha --loader=esmock --recursive --reporter spec $@ test/unit/js", + "test:unit:_run": "mocha --loader=esmock --recursive --reporter spec $@ test/unit/js", "test:unit": "npm run test:unit:_run -- --grep=$MOCHA_GREP", "nodemon": "node --watch app.js", "lint": "eslint --max-warnings 0 --format unix .", diff --git a/services/docstore/app.js b/services/docstore/app.js index 76659e8411..ef755c4bb1 100644 --- a/services/docstore/app.js +++ b/services/docstore/app.js @@ -50,6 +50,14 @@ app.param('doc_id', function (req, res, next, docId) { app.get('/project/:project_id/doc-deleted', HttpController.getAllDeletedDocs) app.get('/project/:project_id/doc', HttpController.getAllDocs) app.get('/project/:project_id/ranges', HttpController.getAllRanges) +app.get( + '/project/:project_id/comment-thread-ids', + HttpController.getCommentThreadIds +) +app.get( + '/project/:project_id/tracked-changes-user-ids', + HttpController.getTrackedChangesUserIds +) app.get('/project/:project_id/has-ranges', HttpController.projectHasRanges) app.get('/project/:project_id/doc/:doc_id', HttpController.getDoc) app.get('/project/:project_id/doc/:doc_id/deleted', HttpController.isDocDeleted) diff --git a/services/docstore/app/js/DocArchiveManager.js b/services/docstore/app/js/DocArchiveManager.js index 4390afe18f..d03ee161a8 100644 --- a/services/docstore/app/js/DocArchiveManager.js +++ b/services/docstore/app/js/DocArchiveManager.js @@ -1,5 +1,4 @@ -const { callbackify } = require('node:util') -const MongoManager = require('./MongoManager').promises +const MongoManager = require('./MongoManager') const Errors = require('./Errors') const logger = require('@overleaf/logger') const Settings = require('@overleaf/settings') @@ -8,29 +7,12 @@ const { ReadableString } = require('@overleaf/stream-utils') const RangeManager = require('./RangeManager') const PersistorManager = require('./PersistorManager') const pMap = require('p-map') -const { streamToBuffer } = require('./StreamToBuffer').promises +const { streamToBuffer } = require('./StreamToBuffer') const { BSON } = require('mongodb-legacy') const PARALLEL_JOBS = Settings.parallelArchiveJobs const UN_ARCHIVE_BATCH_SIZE = Settings.unArchiveBatchSize -module.exports = { - archiveAllDocs: callbackify(archiveAllDocs), - archiveDoc: callbackify(archiveDoc), - unArchiveAllDocs: callbackify(unArchiveAllDocs), - unarchiveDoc: callbackify(unarchiveDoc), - destroyProject: callbackify(destroyProject), - getDoc: callbackify(getDoc), - promises: { - archiveAllDocs, - archiveDoc, - unArchiveAllDocs, - unarchiveDoc, - destroyProject, - getDoc, - }, -} - async function archiveAllDocs(projectId) { if (!_isArchivingEnabled()) { return @@ -62,6 +44,8 @@ async function archiveDoc(projectId, docId) { throw new Error('doc has no lines') } + RangeManager.fixCommentIds(doc) + // warn about any oversized docs already in mongo const linesSize = BSON.calculateObjectSize(doc.lines || {}) const rangesSize = BSON.calculateObjectSize(doc.ranges || {}) @@ -225,3 +209,12 @@ function _isArchivingEnabled() { return true } + +module.exports = { + archiveAllDocs, + archiveDoc, + unArchiveAllDocs, + unarchiveDoc, + destroyProject, + getDoc, +} diff --git a/services/docstore/app/js/DocManager.js b/services/docstore/app/js/DocManager.js index a9ed99425c..c9e8dadc2c 100644 --- a/services/docstore/app/js/DocManager.js +++ b/services/docstore/app/js/DocManager.js @@ -5,7 +5,6 @@ const _ = require('lodash') const DocArchive = require('./DocArchiveManager') const RangeManager = require('./RangeManager') const Settings = require('@overleaf/settings') -const { callbackifyAll } = require('@overleaf/promise-utils') const { setTimeout } = require('node:timers/promises') /** @@ -29,7 +28,7 @@ const DocManager = { throw new Error('must include inS3 when getting doc') } - const doc = await MongoManager.promises.findDoc(projectId, docId, filter) + const doc = await MongoManager.findDoc(projectId, docId, filter) if (doc == null) { throw new Errors.NotFoundError( @@ -38,15 +37,19 @@ const DocManager = { } if (doc.inS3) { - await DocArchive.promises.unarchiveDoc(projectId, docId) + await DocArchive.unarchiveDoc(projectId, docId) return await DocManager._getDoc(projectId, docId, filter) } + if (filter.ranges) { + RangeManager.fixCommentIds(doc) + } + return doc }, async isDocDeleted(projectId, docId) { - const doc = await MongoManager.promises.findDoc(projectId, docId, { + const doc = await MongoManager.findDoc(projectId, docId, { deleted: true, }) @@ -74,7 +77,7 @@ const DocManager = { // returns the doc without any version information async _peekRawDoc(projectId, docId) { - const doc = await MongoManager.promises.findDoc(projectId, docId, { + const doc = await MongoManager.findDoc(projectId, docId, { lines: true, rev: true, deleted: true, @@ -91,7 +94,7 @@ const DocManager = { if (doc.inS3) { // skip the unarchiving to mongo when getting a doc - const archivedDoc = await DocArchive.promises.getDoc(projectId, docId) + const archivedDoc = await DocArchive.getDoc(projectId, docId) Object.assign(doc, archivedDoc) } @@ -102,7 +105,7 @@ const DocManager = { // without unarchiving it (avoids unnecessary writes to mongo) async peekDoc(projectId, docId) { const doc = await DocManager._peekRawDoc(projectId, docId) - await MongoManager.promises.checkRevUnchanged(doc) + await MongoManager.checkRevUnchanged(doc) return doc }, @@ -111,16 +114,18 @@ const DocManager = { lines: true, inS3: true, }) - return doc + if (!doc) throw new Errors.NotFoundError() + if (!Array.isArray(doc.lines)) throw new Errors.DocWithoutLinesError() + return doc.lines.join('\n') }, async getAllDeletedDocs(projectId, filter) { - return await MongoManager.promises.getProjectsDeletedDocs(projectId, filter) + return await MongoManager.getProjectsDeletedDocs(projectId, filter) }, async getAllNonDeletedDocs(projectId, filter) { - await DocArchive.promises.unArchiveAllDocs(projectId) - const docs = await MongoManager.promises.getProjectsDocs( + await DocArchive.unArchiveAllDocs(projectId) + const docs = await MongoManager.getProjectsDocs( projectId, { include_deleted: false }, filter @@ -128,15 +133,46 @@ const DocManager = { if (docs == null) { throw new Errors.NotFoundError(`No docs for project ${projectId}`) } + if (filter.ranges) { + for (const doc of docs) { + RangeManager.fixCommentIds(doc) + } + } return docs }, + async getCommentThreadIds(projectId) { + const docs = await DocManager.getAllNonDeletedDocs(projectId, { + _id: true, + ranges: true, + }) + const byDoc = new Map() + for (const doc of docs) { + const ids = new Set() + for (const comment of doc.ranges?.comments || []) { + ids.add(comment.op.t) + } + if (ids.size > 0) byDoc.set(doc._id.toString(), Array.from(ids)) + } + return Object.fromEntries(byDoc.entries()) + }, + + async getTrackedChangesUserIds(projectId) { + const docs = await DocManager.getAllNonDeletedDocs(projectId, { + ranges: true, + }) + const userIds = new Set() + for (const doc of docs) { + for (const change of doc.ranges?.changes || []) { + if (change.metadata.user_id === 'anonymous-user') continue + userIds.add(change.metadata.user_id) + } + } + return Array.from(userIds) + }, + async projectHasRanges(projectId) { - const docs = await MongoManager.promises.getProjectsDocs( - projectId, - {}, - { _id: 1 } - ) + const docs = await MongoManager.getProjectsDocs(projectId, {}, { _id: 1 }) const docIds = docs.map(doc => doc._id) for (const docId of docIds) { const doc = await DocManager.peekDoc(projectId, docId) @@ -247,7 +283,7 @@ const DocManager = { } modified = true - await MongoManager.promises.upsertIntoDocCollection( + await MongoManager.upsertIntoDocCollection( projectId, docId, doc?.rev, @@ -262,11 +298,7 @@ const DocManager = { async patchDoc(projectId, docId, meta) { const projection = { _id: 1, deleted: true } - const doc = await MongoManager.promises.findDoc( - projectId, - docId, - projection - ) + const doc = await MongoManager.findDoc(projectId, docId, projection) if (!doc) { throw new Errors.NotFoundError( `No such project/doc to delete: ${projectId}/${docId}` @@ -275,7 +307,7 @@ const DocManager = { if (meta.deleted && Settings.docstore.archiveOnSoftDelete) { // The user will not read this doc anytime soon. Flush it out of mongo. - DocArchive.promises.archiveDoc(projectId, docId).catch(err => { + DocArchive.archiveDoc(projectId, docId).catch(err => { logger.warn( { projectId, docId, err }, 'archiving a single doc in the background failed' @@ -283,15 +315,8 @@ const DocManager = { }) } - await MongoManager.promises.patchDoc(projectId, docId, meta) + await MongoManager.patchDoc(projectId, docId, meta) }, } -module.exports = { - ...callbackifyAll(DocManager, { - multiResult: { - updateDoc: ['modified', 'rev'], - }, - }), - promises: DocManager, -} +module.exports = DocManager diff --git a/services/docstore/app/js/Errors.js b/services/docstore/app/js/Errors.js index bbdbe75c08..7b150cc0db 100644 --- a/services/docstore/app/js/Errors.js +++ b/services/docstore/app/js/Errors.js @@ -10,10 +10,13 @@ class DocRevValueError extends OError {} class DocVersionDecrementedError extends OError {} +class DocWithoutLinesError extends OError {} + module.exports = { Md5MismatchError, DocModifiedError, DocRevValueError, DocVersionDecrementedError, + DocWithoutLinesError, ...Errors, } diff --git a/services/docstore/app/js/HealthChecker.js b/services/docstore/app/js/HealthChecker.js index 34cd5c973c..a5b7ad7e9a 100644 --- a/services/docstore/app/js/HealthChecker.js +++ b/services/docstore/app/js/HealthChecker.js @@ -1,67 +1,35 @@ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ const { db, ObjectId } = require('./mongodb') -const request = require('request') -const async = require('async') const _ = require('lodash') const crypto = require('node:crypto') const settings = require('@overleaf/settings') const { port } = settings.internal.docstore const logger = require('@overleaf/logger') +const { fetchNothing, fetchJson } = require('@overleaf/fetch-utils') -module.exports = { - check(callback) { - const docId = new ObjectId() - const projectId = new ObjectId(settings.docstore.healthCheck.project_id) - const url = `http://127.0.0.1:${port}/project/${projectId}/doc/${docId}` - const lines = [ - 'smoke test - delete me', - `${crypto.randomBytes(32).toString('hex')}`, - ] - const getOpts = () => ({ - url, - timeout: 3000, +async function check() { + const docId = new ObjectId() + const projectId = new ObjectId(settings.docstore.healthCheck.project_id) + const url = `http://127.0.0.1:${port}/project/${projectId}/doc/${docId}` + const lines = [ + 'smoke test - delete me', + `${crypto.randomBytes(32).toString('hex')}`, + ] + logger.debug({ lines, url, docId, projectId }, 'running health check') + let body + try { + await fetchNothing(url, { + method: 'POST', + json: { lines, version: 42, ranges: {} }, + signal: AbortSignal.timeout(3_000), }) - logger.debug({ lines, url, docId, projectId }, 'running health check') - const jobs = [ - function (cb) { - const opts = getOpts() - opts.json = { lines, version: 42, ranges: {} } - return request.post(opts, cb) - }, - function (cb) { - const opts = getOpts() - opts.json = true - return request.get(opts, function (err, res, body) { - if (err != null) { - logger.err({ err }, 'docstore returned a error in health check get') - return cb(err) - } else if (res == null) { - return cb(new Error('no response from docstore with get check')) - } else if ((res != null ? res.statusCode : undefined) !== 200) { - return cb(new Error(`status code not 200, its ${res.statusCode}`)) - } else if ( - _.isEqual(body != null ? body.lines : undefined, lines) && - (body != null ? body._id : undefined) === docId.toString() - ) { - return cb() - } else { - return cb( - new Error( - `health check lines not equal ${body.lines} != ${lines}` - ) - ) - } - }) - }, - cb => db.docs.deleteOne({ _id: docId, project_id: projectId }, cb), - ] - return async.series(jobs, callback) - }, + body = await fetchJson(url, { signal: AbortSignal.timeout(3_000) }) + } finally { + await db.docs.deleteOne({ _id: docId, project_id: projectId }) + } + if (!_.isEqual(body?.lines, lines)) { + throw new Error(`health check lines not equal ${body.lines} != ${lines}`) + } +} +module.exports = { + check, } diff --git a/services/docstore/app/js/HttpController.js b/services/docstore/app/js/HttpController.js index 1c4e137033..50c4302aeb 100644 --- a/services/docstore/app/js/HttpController.js +++ b/services/docstore/app/js/HttpController.js @@ -4,143 +4,104 @@ const DocArchive = require('./DocArchiveManager') const HealthChecker = require('./HealthChecker') const Errors = require('./Errors') const Settings = require('@overleaf/settings') +const { expressify } = require('@overleaf/promise-utils') -function getDoc(req, res, next) { +async function getDoc(req, res) { const { doc_id: docId, project_id: projectId } = req.params const includeDeleted = req.query.include_deleted === 'true' logger.debug({ projectId, docId }, 'getting doc') - DocManager.getFullDoc(projectId, docId, function (error, doc) { - if (error) { - return next(error) - } - logger.debug({ docId, projectId }, 'got doc') - if (doc == null) { - res.sendStatus(404) - } else if (doc.deleted && !includeDeleted) { - res.sendStatus(404) - } else { - res.json(_buildDocView(doc)) - } - }) + const doc = await DocManager.getFullDoc(projectId, docId) + logger.debug({ docId, projectId }, 'got doc') + if (doc.deleted && !includeDeleted) { + res.sendStatus(404) + } else { + res.json(_buildDocView(doc)) + } } -function peekDoc(req, res, next) { +async function peekDoc(req, res) { const { doc_id: docId, project_id: projectId } = req.params logger.debug({ projectId, docId }, 'peeking doc') - DocManager.peekDoc(projectId, docId, function (error, doc) { - if (error) { - return next(error) - } - if (doc == null) { - res.sendStatus(404) - } else { - res.setHeader('x-doc-status', doc.inS3 ? 'archived' : 'active') - res.json(_buildDocView(doc)) - } - }) + const doc = await DocManager.peekDoc(projectId, docId) + res.setHeader('x-doc-status', doc.inS3 ? 'archived' : 'active') + res.json(_buildDocView(doc)) } -function isDocDeleted(req, res, next) { +async function isDocDeleted(req, res) { const { doc_id: docId, project_id: projectId } = req.params - DocManager.isDocDeleted(projectId, docId, function (error, deleted) { - if (error) { - return next(error) - } - res.json({ deleted }) - }) + const deleted = await DocManager.isDocDeleted(projectId, docId) + res.json({ deleted }) } -function getRawDoc(req, res, next) { +async function getRawDoc(req, res) { const { doc_id: docId, project_id: projectId } = req.params logger.debug({ projectId, docId }, 'getting raw doc') - DocManager.getDocLines(projectId, docId, function (error, doc) { - if (error) { - return next(error) - } - if (doc == null) { - res.sendStatus(404) - } else { - res.setHeader('content-type', 'text/plain') - res.send(_buildRawDocView(doc)) - } - }) + const content = await DocManager.getDocLines(projectId, docId) + res.setHeader('content-type', 'text/plain') + res.send(content) } -function getAllDocs(req, res, next) { +async function getAllDocs(req, res) { const { project_id: projectId } = req.params logger.debug({ projectId }, 'getting all docs') - DocManager.getAllNonDeletedDocs( - projectId, - { lines: true, rev: true }, - function (error, docs) { - if (docs == null) { - docs = [] - } - if (error) { - return next(error) - } - const docViews = _buildDocsArrayView(projectId, docs) - for (const docView of docViews) { - if (!docView.lines) { - logger.warn({ projectId, docId: docView._id }, 'missing doc lines') - docView.lines = [] - } - } - res.json(docViews) + const docs = await DocManager.getAllNonDeletedDocs(projectId, { + lines: true, + rev: true, + }) + const docViews = _buildDocsArrayView(projectId, docs) + for (const docView of docViews) { + if (!docView.lines) { + logger.warn({ projectId, docId: docView._id }, 'missing doc lines') + docView.lines = [] } - ) + } + res.json(docViews) } -function getAllDeletedDocs(req, res, next) { +async function getAllDeletedDocs(req, res) { const { project_id: projectId } = req.params logger.debug({ projectId }, 'getting all deleted docs') - DocManager.getAllDeletedDocs( - projectId, - { name: true, deletedAt: true }, - function (error, docs) { - if (error) { - return next(error) - } - res.json( - docs.map(doc => ({ - _id: doc._id.toString(), - name: doc.name, - deletedAt: doc.deletedAt, - })) - ) - } + const docs = await DocManager.getAllDeletedDocs(projectId, { + name: true, + deletedAt: true, + }) + res.json( + docs.map(doc => ({ + _id: doc._id.toString(), + name: doc.name, + deletedAt: doc.deletedAt, + })) ) } -function getAllRanges(req, res, next) { +async function getAllRanges(req, res) { const { project_id: projectId } = req.params logger.debug({ projectId }, 'getting all ranges') - DocManager.getAllNonDeletedDocs( - projectId, - { ranges: true }, - function (error, docs) { - if (docs == null) { - docs = [] - } - if (error) { - return next(error) - } - res.json(_buildDocsArrayView(projectId, docs)) - } - ) -} - -function projectHasRanges(req, res, next) { - const { project_id: projectId } = req.params - DocManager.projectHasRanges(projectId, (err, projectHasRanges) => { - if (err) { - return next(err) - } - res.json({ projectHasRanges }) + const docs = await DocManager.getAllNonDeletedDocs(projectId, { + ranges: true, }) + res.json(_buildDocsArrayView(projectId, docs)) } -function updateDoc(req, res, next) { +async function getCommentThreadIds(req, res) { + const { project_id: projectId } = req.params + const threadIds = await DocManager.getCommentThreadIds(projectId) + res.json(threadIds) +} + +async function getTrackedChangesUserIds(req, res) { + const { project_id: projectId } = req.params + const userIds = await DocManager.getTrackedChangesUserIds(projectId) + res.json(userIds) +} + +async function projectHasRanges(req, res) { + const { project_id: projectId } = req.params + const projectHasRanges = await DocManager.projectHasRanges(projectId) + res.json({ projectHasRanges }) +} + +async function updateDoc(req, res) { const { doc_id: docId, project_id: projectId } = req.params const lines = req.body?.lines const version = req.body?.version @@ -172,25 +133,20 @@ function updateDoc(req, res, next) { } logger.debug({ projectId, docId }, 'got http request to update doc') - DocManager.updateDoc( + const { modified, rev } = await DocManager.updateDoc( projectId, docId, lines, version, - ranges, - function (error, modified, rev) { - if (error) { - return next(error) - } - res.json({ - modified, - rev, - }) - } + ranges ) + res.json({ + modified, + rev, + }) } -function patchDoc(req, res, next) { +async function patchDoc(req, res) { const { doc_id: docId, project_id: projectId } = req.params logger.debug({ projectId, docId }, 'patching doc') @@ -203,12 +159,8 @@ function patchDoc(req, res, next) { logger.fatal({ field }, 'joi validation for pathDoc is broken') } }) - DocManager.patchDoc(projectId, docId, meta, function (error) { - if (error) { - return next(error) - } - res.sendStatus(204) - }) + await DocManager.patchDoc(projectId, docId, meta) + res.sendStatus(204) } function _buildDocView(doc) { @@ -221,10 +173,6 @@ function _buildDocView(doc) { return docView } -function _buildRawDocView(doc) { - return (doc?.lines ?? []).join('\n') -} - function _buildDocsArrayView(projectId, docs) { const docViews = [] for (const doc of docs) { @@ -241,79 +189,69 @@ function _buildDocsArrayView(projectId, docs) { return docViews } -function archiveAllDocs(req, res, next) { +async function archiveAllDocs(req, res) { const { project_id: projectId } = req.params logger.debug({ projectId }, 'archiving all docs') - DocArchive.archiveAllDocs(projectId, function (error) { - if (error) { - return next(error) - } - res.sendStatus(204) - }) + await DocArchive.archiveAllDocs(projectId) + res.sendStatus(204) } -function archiveDoc(req, res, next) { +async function archiveDoc(req, res) { const { doc_id: docId, project_id: projectId } = req.params logger.debug({ projectId, docId }, 'archiving a doc') - DocArchive.archiveDoc(projectId, docId, function (error) { - if (error) { - return next(error) - } - res.sendStatus(204) - }) + await DocArchive.archiveDoc(projectId, docId) + res.sendStatus(204) } -function unArchiveAllDocs(req, res, next) { +async function unArchiveAllDocs(req, res) { const { project_id: projectId } = req.params logger.debug({ projectId }, 'unarchiving all docs') - DocArchive.unArchiveAllDocs(projectId, function (err) { - if (err) { - if (err instanceof Errors.DocRevValueError) { - logger.warn({ err }, 'Failed to unarchive doc') - return res.sendStatus(409) - } - return next(err) + try { + await DocArchive.unArchiveAllDocs(projectId) + } catch (err) { + if (err instanceof Errors.DocRevValueError) { + logger.warn({ err }, 'Failed to unarchive doc') + return res.sendStatus(409) } - res.sendStatus(200) - }) + throw err + } + res.sendStatus(200) } -function destroyProject(req, res, next) { +async function destroyProject(req, res) { const { project_id: projectId } = req.params logger.debug({ projectId }, 'destroying all docs') - DocArchive.destroyProject(projectId, function (error) { - if (error) { - return next(error) - } - res.sendStatus(204) - }) + await DocArchive.destroyProject(projectId) + res.sendStatus(204) } -function healthCheck(req, res) { - HealthChecker.check(function (err) { - if (err) { - logger.err({ err }, 'error performing health check') - res.sendStatus(500) - } else { - res.sendStatus(200) - } - }) +async function healthCheck(req, res) { + try { + await HealthChecker.check() + } catch (err) { + logger.err({ err }, 'error performing health check') + res.sendStatus(500) + return + } + res.sendStatus(200) } module.exports = { - getDoc, - peekDoc, - isDocDeleted, - getRawDoc, - getAllDocs, - getAllDeletedDocs, - getAllRanges, - projectHasRanges, - updateDoc, - patchDoc, - archiveAllDocs, - archiveDoc, - unArchiveAllDocs, - destroyProject, - healthCheck, + getDoc: expressify(getDoc), + peekDoc: expressify(peekDoc), + isDocDeleted: expressify(isDocDeleted), + getRawDoc: expressify(getRawDoc), + getAllDocs: expressify(getAllDocs), + getAllDeletedDocs: expressify(getAllDeletedDocs), + getAllRanges: expressify(getAllRanges), + getTrackedChangesUserIds: expressify(getTrackedChangesUserIds), + getCommentThreadIds: expressify(getCommentThreadIds), + projectHasRanges: expressify(projectHasRanges), + updateDoc: expressify(updateDoc), + patchDoc: expressify(patchDoc), + archiveAllDocs: expressify(archiveAllDocs), + archiveDoc: expressify(archiveDoc), + unArchiveAllDocs: expressify(unArchiveAllDocs), + destroyProject: expressify(destroyProject), + healthCheck: expressify(healthCheck), } diff --git a/services/docstore/app/js/MongoManager.js b/services/docstore/app/js/MongoManager.js index ad1a2d2b40..ef101f91c0 100644 --- a/services/docstore/app/js/MongoManager.js +++ b/services/docstore/app/js/MongoManager.js @@ -1,7 +1,6 @@ const { db, ObjectId } = require('./mongodb') const Settings = require('@overleaf/settings') const Errors = require('./Errors') -const { callbackify } = require('node:util') const ARCHIVING_LOCK_DURATION_MS = Settings.archivingLockDurationMs @@ -241,34 +240,17 @@ async function destroyProject(projectId) { } module.exports = { - findDoc: callbackify(findDoc), - getProjectsDeletedDocs: callbackify(getProjectsDeletedDocs), - getProjectsDocs: callbackify(getProjectsDocs), - getArchivedProjectDocs: callbackify(getArchivedProjectDocs), - getNonArchivedProjectDocIds: callbackify(getNonArchivedProjectDocIds), - getNonDeletedArchivedProjectDocs: callbackify( - getNonDeletedArchivedProjectDocs - ), - upsertIntoDocCollection: callbackify(upsertIntoDocCollection), - restoreArchivedDoc: callbackify(restoreArchivedDoc), - patchDoc: callbackify(patchDoc), - getDocForArchiving: callbackify(getDocForArchiving), - markDocAsArchived: callbackify(markDocAsArchived), - checkRevUnchanged: callbackify(checkRevUnchanged), - destroyProject: callbackify(destroyProject), - promises: { - findDoc, - getProjectsDeletedDocs, - getProjectsDocs, - getArchivedProjectDocs, - getNonArchivedProjectDocIds, - getNonDeletedArchivedProjectDocs, - upsertIntoDocCollection, - restoreArchivedDoc, - patchDoc, - getDocForArchiving, - markDocAsArchived, - checkRevUnchanged, - destroyProject, - }, + findDoc, + getProjectsDeletedDocs, + getProjectsDocs, + getArchivedProjectDocs, + getNonArchivedProjectDocIds, + getNonDeletedArchivedProjectDocs, + upsertIntoDocCollection, + restoreArchivedDoc, + patchDoc, + getDocForArchiving, + markDocAsArchived, + checkRevUnchanged, + destroyProject, } diff --git a/services/docstore/app/js/RangeManager.js b/services/docstore/app/js/RangeManager.js index f36f68fe35..2fbadf9468 100644 --- a/services/docstore/app/js/RangeManager.js +++ b/services/docstore/app/js/RangeManager.js @@ -49,15 +49,25 @@ module.exports = RangeManager = { updateMetadata(change.metadata) } for (const comment of Array.from(ranges.comments || [])) { - comment.id = RangeManager._safeObjectId(comment.id) - if ((comment.op != null ? comment.op.t : undefined) != null) { - comment.op.t = RangeManager._safeObjectId(comment.op.t) - } + // Two bugs resulted in mismatched ids, prefer the thread id from the op: https://github.com/overleaf/internal/issues/23272 + comment.id = RangeManager._safeObjectId(comment.op?.t || comment.id) + if (comment.op) comment.op.t = comment.id + + // resolved property is added to comments when they are obtained from history, but this state doesn't belong in mongo docs collection + // more info: https://github.com/overleaf/internal/issues/24371#issuecomment-2913095174 + delete comment.op?.resolved updateMetadata(comment.metadata) } return ranges }, + fixCommentIds(doc) { + for (const comment of doc?.ranges?.comments || []) { + // Two bugs resulted in mismatched ids, prefer the thread id from the op: https://github.com/overleaf/internal/issues/23272 + if (comment.op?.t) comment.id = comment.op.t + } + }, + _safeObjectId(data) { try { return new ObjectId(data) diff --git a/services/docstore/app/js/StreamToBuffer.js b/services/docstore/app/js/StreamToBuffer.js index 7de146cd11..09215a7367 100644 --- a/services/docstore/app/js/StreamToBuffer.js +++ b/services/docstore/app/js/StreamToBuffer.js @@ -2,13 +2,9 @@ const { LoggerStream, WritableBuffer } = require('@overleaf/stream-utils') const Settings = require('@overleaf/settings') const logger = require('@overleaf/logger/logging-manager') const { pipeline } = require('node:stream/promises') -const { callbackify } = require('node:util') module.exports = { - streamToBuffer: callbackify(streamToBuffer), - promises: { - streamToBuffer, - }, + streamToBuffer, } async function streamToBuffer(projectId, docId, stream) { diff --git a/services/docstore/docker-compose.ci.yml b/services/docstore/docker-compose.ci.yml index ff222f6514..40decc4aea 100644 --- a/services/docstore/docker-compose.ci.yml +++ b/services/docstore/docker-compose.ci.yml @@ -27,12 +27,15 @@ services: MOCHA_GREP: ${MOCHA_GREP} NODE_ENV: test NODE_OPTIONS: "--unhandled-rejections=strict" + volumes: + - ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it depends_on: mongo: condition: service_started gcs: condition: service_healthy user: node + entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 -- command: npm run test:acceptance diff --git a/services/docstore/docker-compose.yml b/services/docstore/docker-compose.yml index 4a4fa2f10c..a58b862b9a 100644 --- a/services/docstore/docker-compose.yml +++ b/services/docstore/docker-compose.yml @@ -26,6 +26,7 @@ services: - .:/overleaf/services/docstore - ../../node_modules:/overleaf/node_modules - ../../libraries:/overleaf/libraries + - ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it working_dir: /overleaf/services/docstore environment: ELASTIC_SEARCH_DSN: es:9200 @@ -44,6 +45,7 @@ services: condition: service_started gcs: condition: service_healthy + entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 -- command: npm run --silent test:acceptance mongo: diff --git a/services/docstore/package.json b/services/docstore/package.json index e505f731d3..bf5857fd49 100644 --- a/services/docstore/package.json +++ b/services/docstore/package.json @@ -17,6 +17,7 @@ "types:check": "tsc --noEmit" }, "dependencies": { + "@overleaf/fetch-utils": "*", "@overleaf/logger": "*", "@overleaf/metrics": "*", "@overleaf/o-error": "*", diff --git a/services/docstore/test/acceptance/js/ArchiveDocsTests.js b/services/docstore/test/acceptance/js/ArchiveDocsTests.js index d9228103b6..7e254c7e84 100644 --- a/services/docstore/test/acceptance/js/ArchiveDocsTests.js +++ b/services/docstore/test/acceptance/js/ArchiveDocsTests.js @@ -1001,6 +1001,15 @@ describe('Archiving', function () { }, version: 2, } + this.fixedRanges = { + ...this.doc.ranges, + comments: [ + { + ...this.doc.ranges.comments[0], + id: this.doc.ranges.comments[0].op.t, + }, + ], + } return DocstoreClient.createDoc( this.project_id, this.doc._id, @@ -1048,7 +1057,7 @@ describe('Archiving', function () { throw error } s3Doc.lines.should.deep.equal(this.doc.lines) - const ranges = JSON.parse(JSON.stringify(this.doc.ranges)) // ObjectId -> String + const ranges = JSON.parse(JSON.stringify(this.fixedRanges)) // ObjectId -> String s3Doc.ranges.should.deep.equal(ranges) return done() } @@ -1075,7 +1084,7 @@ describe('Archiving', function () { throw error } doc.lines.should.deep.equal(this.doc.lines) - doc.ranges.should.deep.equal(this.doc.ranges) + doc.ranges.should.deep.equal(this.fixedRanges) expect(doc.inS3).not.to.exist return done() }) diff --git a/services/docstore/test/acceptance/js/GettingAllDocsTests.js b/services/docstore/test/acceptance/js/GettingAllDocsTests.js index 8fe5e7d91b..57851b2c3b 100644 --- a/services/docstore/test/acceptance/js/GettingAllDocsTests.js +++ b/services/docstore/test/acceptance/js/GettingAllDocsTests.js @@ -20,30 +20,73 @@ const DocstoreClient = require('./helpers/DocstoreClient') describe('Getting all docs', function () { beforeEach(function (done) { this.project_id = new ObjectId() + this.threadId1 = new ObjectId().toString() + this.threadId2 = new ObjectId().toString() this.docs = [ { _id: new ObjectId(), lines: ['one', 'two', 'three'], - ranges: { mock: 'one' }, + ranges: { + comments: [ + { id: new ObjectId().toString(), op: { t: this.threadId1 } }, + ], + changes: [ + { + id: new ObjectId().toString(), + metadata: { user_id: 'user-id-1' }, + }, + ], + }, rev: 2, }, { _id: new ObjectId(), lines: ['aaa', 'bbb', 'ccc'], - ranges: { mock: 'two' }, + ranges: { + changes: [ + { + id: new ObjectId().toString(), + metadata: { user_id: 'user-id-2' }, + }, + ], + }, rev: 4, }, { _id: new ObjectId(), lines: ['111', '222', '333'], - ranges: { mock: 'three' }, + ranges: { + comments: [ + { id: new ObjectId().toString(), op: { t: this.threadId2 } }, + ], + changes: [ + { + id: new ObjectId().toString(), + metadata: { user_id: 'anonymous-user' }, + }, + ], + }, rev: 6, }, ] + this.fixedRanges = this.docs.map(doc => { + if (!doc.ranges?.comments?.length) return doc.ranges + return { + ...doc.ranges, + comments: [ + { ...doc.ranges.comments[0], id: doc.ranges.comments[0].op.t }, + ], + } + }) this.deleted_doc = { _id: new ObjectId(), lines: ['deleted'], - ranges: { mock: 'four' }, + ranges: { + comments: [{ id: new ObjectId().toString(), op: { t: 'thread-id-3' } }], + changes: [ + { id: new ObjectId().toString(), metadata: { user_id: 'user-id-3' } }, + ], + }, rev: 8, } const version = 42 @@ -96,7 +139,7 @@ describe('Getting all docs', function () { }) }) - return it('getAllRanges should return all the (non-deleted) doc ranges', function (done) { + it('getAllRanges should return all the (non-deleted) doc ranges', function (done) { return DocstoreClient.getAllRanges(this.project_id, (error, res, docs) => { if (error != null) { throw error @@ -104,9 +147,38 @@ describe('Getting all docs', function () { docs.length.should.equal(this.docs.length) for (let i = 0; i < docs.length; i++) { const doc = docs[i] - doc.ranges.should.deep.equal(this.docs[i].ranges) + doc.ranges.should.deep.equal(this.fixedRanges[i]) } return done() }) }) + + it('getTrackedChangesUserIds should return all the user ids from (non-deleted) ranges', function (done) { + DocstoreClient.getTrackedChangesUserIds( + this.project_id, + (error, res, userIds) => { + if (error != null) { + throw error + } + userIds.should.deep.equal(['user-id-1', 'user-id-2']) + done() + } + ) + }) + + it('getCommentThreadIds should return all the thread ids from (non-deleted) ranges', function (done) { + DocstoreClient.getCommentThreadIds( + this.project_id, + (error, res, threadIds) => { + if (error != null) { + throw error + } + threadIds.should.deep.equal({ + [this.docs[0]._id.toString()]: [this.threadId1], + [this.docs[2]._id.toString()]: [this.threadId2], + }) + done() + } + ) + }) }) diff --git a/services/docstore/test/acceptance/js/GettingDocsTests.js b/services/docstore/test/acceptance/js/GettingDocsTests.js index 121b3c1e24..1cfc53c5c6 100644 --- a/services/docstore/test/acceptance/js/GettingDocsTests.js +++ b/services/docstore/test/acceptance/js/GettingDocsTests.js @@ -28,10 +28,26 @@ describe('Getting a doc', function () { op: { i: 'foo', p: 3 }, meta: { user_id: new ObjectId().toString(), - ts: new Date().toString(), + ts: new Date().toJSON(), }, }, ], + comments: [ + { + id: new ObjectId().toString(), + op: { c: 'comment', p: 1, t: new ObjectId().toString() }, + metadata: { + user_id: new ObjectId().toString(), + ts: new Date().toJSON(), + }, + }, + ], + } + this.fixedRanges = { + ...this.ranges, + comments: [ + { ...this.ranges.comments[0], id: this.ranges.comments[0].op.t }, + ], } return DocstoreApp.ensureRunning(() => { return DocstoreClient.createDoc( @@ -60,7 +76,7 @@ describe('Getting a doc', function () { if (error) return done(error) doc.lines.should.deep.equal(this.lines) doc.version.should.equal(this.version) - doc.ranges.should.deep.equal(this.ranges) + doc.ranges.should.deep.equal(this.fixedRanges) return done() } ) @@ -114,7 +130,7 @@ describe('Getting a doc', function () { if (error) return done(error) doc.lines.should.deep.equal(this.lines) doc.version.should.equal(this.version) - doc.ranges.should.deep.equal(this.ranges) + doc.ranges.should.deep.equal(this.fixedRanges) doc.deleted.should.equal(true) return done() } diff --git a/services/docstore/test/acceptance/js/HealthCheckerTest.js b/services/docstore/test/acceptance/js/HealthCheckerTest.js new file mode 100644 index 0000000000..b25a45312b --- /dev/null +++ b/services/docstore/test/acceptance/js/HealthCheckerTest.js @@ -0,0 +1,28 @@ +const { db } = require('../../../app/js/mongodb') +const DocstoreApp = require('./helpers/DocstoreApp') +const DocstoreClient = require('./helpers/DocstoreClient') +const { expect } = require('chai') + +describe('HealthChecker', function () { + beforeEach('start', function (done) { + DocstoreApp.ensureRunning(done) + }) + beforeEach('clear docs collection', async function () { + await db.docs.deleteMany({}) + }) + let res + beforeEach('run health check', function (done) { + DocstoreClient.healthCheck((err, _res) => { + res = _res + done(err) + }) + }) + + it('should return 200', function () { + res.statusCode.should.equal(200) + }) + + it('should not leave any cruft behind', async function () { + expect(await db.docs.find({}).toArray()).to.deep.equal([]) + }) +}) diff --git a/services/docstore/test/acceptance/js/helpers/DocstoreClient.js b/services/docstore/test/acceptance/js/helpers/DocstoreClient.js index 790ec8f237..cb8bce2579 100644 --- a/services/docstore/test/acceptance/js/helpers/DocstoreClient.js +++ b/services/docstore/test/acceptance/js/helpers/DocstoreClient.js @@ -100,6 +100,26 @@ module.exports = DocstoreClient = { ) }, + getCommentThreadIds(projectId, callback) { + request.get( + { + url: `http://127.0.0.1:${settings.internal.docstore.port}/project/${projectId}/comment-thread-ids`, + json: true, + }, + callback + ) + }, + + getTrackedChangesUserIds(projectId, callback) { + request.get( + { + url: `http://127.0.0.1:${settings.internal.docstore.port}/project/${projectId}/tracked-changes-user-ids`, + json: true, + }, + callback + ) + }, + updateDoc(projectId, docId, lines, version, ranges, callback) { return request.post( { @@ -181,6 +201,13 @@ module.exports = DocstoreClient = { ) }, + healthCheck(callback) { + request.get( + `http://127.0.0.1:${settings.internal.docstore.port}/health_check`, + callback + ) + }, + getS3Doc(projectId, docId, callback) { getStringFromPersistor( Persistor, diff --git a/services/docstore/test/unit/js/DocArchiveManagerTests.js b/services/docstore/test/unit/js/DocArchiveManagerTests.js index a57f9806c8..2ec1cb2016 100644 --- a/services/docstore/test/unit/js/DocArchiveManagerTests.js +++ b/services/docstore/test/unit/js/DocArchiveManagerTests.js @@ -4,7 +4,7 @@ const modulePath = '../../../app/js/DocArchiveManager.js' const SandboxedModule = require('sandboxed-module') const { ObjectId } = require('mongodb-legacy') const Errors = require('../../../app/js/Errors') -const StreamToBuffer = require('../../../app/js/StreamToBuffer').promises +const StreamToBuffer = require('../../../app/js/StreamToBuffer') describe('DocArchiveManager', function () { let DocArchiveManager, @@ -31,6 +31,7 @@ describe('DocArchiveManager', function () { RangeManager = { jsonRangesToMongo: sinon.stub().returns({ mongo: 'ranges' }), + fixCommentIds: sinon.stub(), } Settings = { docstore: { @@ -142,37 +143,33 @@ describe('DocArchiveManager', function () { } MongoManager = { - promises: { - markDocAsArchived: sinon.stub().resolves(), - restoreArchivedDoc: sinon.stub().resolves(), - upsertIntoDocCollection: sinon.stub().resolves(), - getProjectsDocs: sinon.stub().resolves(mongoDocs), - getNonDeletedArchivedProjectDocs: getArchivedProjectDocs, - getNonArchivedProjectDocIds, - getArchivedProjectDocs, - findDoc: sinon.stub().callsFake(fakeGetDoc), - getDocForArchiving: sinon.stub().callsFake(fakeGetDoc), - destroyProject: sinon.stub().resolves(), - }, + markDocAsArchived: sinon.stub().resolves(), + restoreArchivedDoc: sinon.stub().resolves(), + upsertIntoDocCollection: sinon.stub().resolves(), + getProjectsDocs: sinon.stub().resolves(mongoDocs), + getNonDeletedArchivedProjectDocs: getArchivedProjectDocs, + getNonArchivedProjectDocIds, + getArchivedProjectDocs, + findDoc: sinon.stub().callsFake(fakeGetDoc), + getDocForArchiving: sinon.stub().callsFake(fakeGetDoc), + destroyProject: sinon.stub().resolves(), } // Wrap streamToBuffer so that we can pass in something that it expects (in // this case, a Promise) rather than a stubbed stream object streamToBuffer = { - promises: { - streamToBuffer: async () => { - const inputStream = new Promise(resolve => { - stream.on('data', data => resolve(data)) - }) + streamToBuffer: async () => { + const inputStream = new Promise(resolve => { + stream.on('data', data => resolve(data)) + }) - const value = await StreamToBuffer.streamToBuffer( - 'testProjectId', - 'testDocId', - inputStream - ) + const value = await StreamToBuffer.streamToBuffer( + 'testProjectId', + 'testDocId', + inputStream + ) - return value - }, + return value }, } @@ -192,9 +189,13 @@ describe('DocArchiveManager', function () { describe('archiveDoc', function () { it('should resolve when passed a valid document', async function () { - await expect( - DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id) - ).to.eventually.be.fulfilled + await expect(DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id)).to + .eventually.be.fulfilled + }) + + it('should fix comment ids', async function () { + await DocArchiveManager.archiveDoc(projectId, mongoDocs[1]._id) + expect(RangeManager.fixCommentIds).to.have.been.called }) it('should throw an error if the doc has no lines', async function () { @@ -202,26 +203,26 @@ describe('DocArchiveManager', function () { doc.lines = null await expect( - DocArchiveManager.promises.archiveDoc(projectId, doc._id) + DocArchiveManager.archiveDoc(projectId, doc._id) ).to.eventually.be.rejectedWith('doc has no lines') }) it('should add the schema version', async function () { - await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[1]._id) + await DocArchiveManager.archiveDoc(projectId, mongoDocs[1]._id) expect(StreamUtils.ReadableString).to.have.been.calledWith( sinon.match(/"schema_v":1/) ) }) it('should calculate the hex md5 sum of the content', async function () { - await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id) + await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id) expect(Crypto.createHash).to.have.been.calledWith('md5') expect(HashUpdate).to.have.been.calledWith(archivedDocJson) expect(HashDigest).to.have.been.calledWith('hex') }) it('should pass the md5 hash to the object persistor for verification', async function () { - await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id) + await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id) expect(PersistorManager.sendStream).to.have.been.calledWith( sinon.match.any, @@ -232,7 +233,7 @@ describe('DocArchiveManager', function () { }) it('should pass the correct bucket and key to the persistor', async function () { - await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id) + await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id) expect(PersistorManager.sendStream).to.have.been.calledWith( Settings.docstore.bucket, @@ -241,7 +242,7 @@ describe('DocArchiveManager', function () { }) it('should create a stream from the encoded json and send it', async function () { - await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id) + await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id) expect(StreamUtils.ReadableString).to.have.been.calledWith( archivedDocJson ) @@ -253,8 +254,8 @@ describe('DocArchiveManager', function () { }) it('should mark the doc as archived', async function () { - await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id) - expect(MongoManager.promises.markDocAsArchived).to.have.been.calledWith( + await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id) + expect(MongoManager.markDocAsArchived).to.have.been.calledWith( projectId, mongoDocs[0]._id, mongoDocs[0].rev @@ -267,8 +268,8 @@ describe('DocArchiveManager', function () { }) it('should bail out early', async function () { - await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id) - expect(MongoManager.promises.getDocForArchiving).to.not.have.been.called + await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id) + expect(MongoManager.getDocForArchiving).to.not.have.been.called }) }) @@ -285,7 +286,7 @@ describe('DocArchiveManager', function () { it('should return an error', async function () { await expect( - DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id) + DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id) ).to.eventually.be.rejectedWith('null bytes detected') }) }) @@ -296,21 +297,19 @@ describe('DocArchiveManager', function () { describe('when the doc is in S3', function () { beforeEach(function () { - MongoManager.promises.findDoc = sinon - .stub() - .resolves({ inS3: true, rev }) + MongoManager.findDoc = sinon.stub().resolves({ inS3: true, rev }) docId = mongoDocs[0]._id lines = ['doc', 'lines'] rev = 123 }) it('should resolve when passed a valid document', async function () { - await expect(DocArchiveManager.promises.unarchiveDoc(projectId, docId)) - .to.eventually.be.fulfilled + await expect(DocArchiveManager.unarchiveDoc(projectId, docId)).to + .eventually.be.fulfilled }) it('should test md5 validity with the raw buffer', async function () { - await DocArchiveManager.promises.unarchiveDoc(projectId, docId) + await DocArchiveManager.unarchiveDoc(projectId, docId) expect(HashUpdate).to.have.been.calledWith( sinon.match.instanceOf(Buffer) ) @@ -319,15 +318,17 @@ describe('DocArchiveManager', function () { it('should throw an error if the md5 does not match', async function () { PersistorManager.getObjectMd5Hash.resolves('badf00d') await expect( - DocArchiveManager.promises.unarchiveDoc(projectId, docId) + DocArchiveManager.unarchiveDoc(projectId, docId) ).to.eventually.be.rejected.and.be.instanceof(Errors.Md5MismatchError) }) it('should restore the doc in Mongo', async function () { - await DocArchiveManager.promises.unarchiveDoc(projectId, docId) - expect( - MongoManager.promises.restoreArchivedDoc - ).to.have.been.calledWith(projectId, docId, archivedDoc) + await DocArchiveManager.unarchiveDoc(projectId, docId) + expect(MongoManager.restoreArchivedDoc).to.have.been.calledWith( + projectId, + docId, + archivedDoc + ) }) describe('when archiving is not configured', function () { @@ -337,15 +338,15 @@ describe('DocArchiveManager', function () { it('should error out on archived doc', async function () { await expect( - DocArchiveManager.promises.unarchiveDoc(projectId, docId) + DocArchiveManager.unarchiveDoc(projectId, docId) ).to.eventually.be.rejected.and.match( /found archived doc, but archiving backend is not configured/ ) }) it('should return early on non-archived doc', async function () { - MongoManager.promises.findDoc = sinon.stub().resolves({ rev }) - await DocArchiveManager.promises.unarchiveDoc(projectId, docId) + MongoManager.findDoc = sinon.stub().resolves({ rev }) + await DocArchiveManager.unarchiveDoc(projectId, docId) expect(PersistorManager.getObjectMd5Hash).to.not.have.been.called }) }) @@ -363,10 +364,12 @@ describe('DocArchiveManager', function () { }) it('should return the docs lines', async function () { - await DocArchiveManager.promises.unarchiveDoc(projectId, docId) - expect( - MongoManager.promises.restoreArchivedDoc - ).to.have.been.calledWith(projectId, docId, { lines, rev }) + await DocArchiveManager.unarchiveDoc(projectId, docId) + expect(MongoManager.restoreArchivedDoc).to.have.been.calledWith( + projectId, + docId, + { lines, rev } + ) }) }) @@ -385,14 +388,16 @@ describe('DocArchiveManager', function () { }) it('should return the doc lines and ranges', async function () { - await DocArchiveManager.promises.unarchiveDoc(projectId, docId) - expect( - MongoManager.promises.restoreArchivedDoc - ).to.have.been.calledWith(projectId, docId, { - lines, - ranges: { mongo: 'ranges' }, - rev: 456, - }) + await DocArchiveManager.unarchiveDoc(projectId, docId) + expect(MongoManager.restoreArchivedDoc).to.have.been.calledWith( + projectId, + docId, + { + lines, + ranges: { mongo: 'ranges' }, + rev: 456, + } + ) }) }) @@ -406,10 +411,12 @@ describe('DocArchiveManager', function () { }) it('should return only the doc lines', async function () { - await DocArchiveManager.promises.unarchiveDoc(projectId, docId) - expect( - MongoManager.promises.restoreArchivedDoc - ).to.have.been.calledWith(projectId, docId, { lines, rev: 456 }) + await DocArchiveManager.unarchiveDoc(projectId, docId) + expect(MongoManager.restoreArchivedDoc).to.have.been.calledWith( + projectId, + docId, + { lines, rev: 456 } + ) }) }) @@ -423,10 +430,12 @@ describe('DocArchiveManager', function () { }) it('should use the rev obtained from Mongo', async function () { - await DocArchiveManager.promises.unarchiveDoc(projectId, docId) - expect( - MongoManager.promises.restoreArchivedDoc - ).to.have.been.calledWith(projectId, docId, { lines, rev }) + await DocArchiveManager.unarchiveDoc(projectId, docId) + expect(MongoManager.restoreArchivedDoc).to.have.been.calledWith( + projectId, + docId, + { lines, rev } + ) }) }) @@ -441,7 +450,7 @@ describe('DocArchiveManager', function () { it('should throw an error', async function () { await expect( - DocArchiveManager.promises.unarchiveDoc(projectId, docId) + DocArchiveManager.unarchiveDoc(projectId, docId) ).to.eventually.be.rejectedWith( "I don't understand the doc format in s3" ) @@ -451,8 +460,8 @@ describe('DocArchiveManager', function () { }) it('should not do anything if the file is already unarchived', async function () { - MongoManager.promises.findDoc.resolves({ inS3: false }) - await DocArchiveManager.promises.unarchiveDoc(projectId, docId) + MongoManager.findDoc.resolves({ inS3: false }) + await DocArchiveManager.unarchiveDoc(projectId, docId) expect(PersistorManager.getObjectStream).not.to.have.been.called }) @@ -461,7 +470,7 @@ describe('DocArchiveManager', function () { .stub() .rejects(new Errors.NotFoundError()) await expect( - DocArchiveManager.promises.unarchiveDoc(projectId, docId) + DocArchiveManager.unarchiveDoc(projectId, docId) ).to.eventually.be.rejected.and.be.instanceof(Errors.NotFoundError) }) }) @@ -469,13 +478,11 @@ describe('DocArchiveManager', function () { describe('destroyProject', function () { describe('when archiving is enabled', function () { beforeEach(async function () { - await DocArchiveManager.promises.destroyProject(projectId) + await DocArchiveManager.destroyProject(projectId) }) it('should delete the project in Mongo', function () { - expect(MongoManager.promises.destroyProject).to.have.been.calledWith( - projectId - ) + expect(MongoManager.destroyProject).to.have.been.calledWith(projectId) }) it('should delete the project in the persistor', function () { @@ -489,13 +496,11 @@ describe('DocArchiveManager', function () { describe('when archiving is disabled', function () { beforeEach(async function () { Settings.docstore.backend = '' - await DocArchiveManager.promises.destroyProject(projectId) + await DocArchiveManager.destroyProject(projectId) }) it('should delete the project in Mongo', function () { - expect(MongoManager.promises.destroyProject).to.have.been.calledWith( - projectId - ) + expect(MongoManager.destroyProject).to.have.been.calledWith(projectId) }) it('should not delete the project in the persistor', function () { @@ -506,33 +511,35 @@ describe('DocArchiveManager', function () { describe('archiveAllDocs', function () { it('should resolve with valid arguments', async function () { - await expect(DocArchiveManager.promises.archiveAllDocs(projectId)).to - .eventually.be.fulfilled + await expect(DocArchiveManager.archiveAllDocs(projectId)).to.eventually.be + .fulfilled }) it('should archive all project docs which are not in s3', async function () { - await DocArchiveManager.promises.archiveAllDocs(projectId) + await DocArchiveManager.archiveAllDocs(projectId) // not inS3 - expect(MongoManager.promises.markDocAsArchived).to.have.been.calledWith( + expect(MongoManager.markDocAsArchived).to.have.been.calledWith( projectId, mongoDocs[0]._id ) - expect(MongoManager.promises.markDocAsArchived).to.have.been.calledWith( + expect(MongoManager.markDocAsArchived).to.have.been.calledWith( projectId, mongoDocs[1]._id ) - expect(MongoManager.promises.markDocAsArchived).to.have.been.calledWith( + expect(MongoManager.markDocAsArchived).to.have.been.calledWith( projectId, mongoDocs[4]._id ) // inS3 - expect( - MongoManager.promises.markDocAsArchived - ).not.to.have.been.calledWith(projectId, mongoDocs[2]._id) - expect( - MongoManager.promises.markDocAsArchived - ).not.to.have.been.calledWith(projectId, mongoDocs[3]._id) + expect(MongoManager.markDocAsArchived).not.to.have.been.calledWith( + projectId, + mongoDocs[2]._id + ) + expect(MongoManager.markDocAsArchived).not.to.have.been.calledWith( + projectId, + mongoDocs[3]._id + ) }) describe('when archiving is not configured', function () { @@ -541,21 +548,20 @@ describe('DocArchiveManager', function () { }) it('should bail out early', async function () { - await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id) - expect(MongoManager.promises.getNonArchivedProjectDocIds).to.not.have - .been.called + await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id) + expect(MongoManager.getNonArchivedProjectDocIds).to.not.have.been.called }) }) }) describe('unArchiveAllDocs', function () { it('should resolve with valid arguments', async function () { - await expect(DocArchiveManager.promises.unArchiveAllDocs(projectId)).to - .eventually.be.fulfilled + await expect(DocArchiveManager.unArchiveAllDocs(projectId)).to.eventually + .be.fulfilled }) it('should unarchive all inS3 docs', async function () { - await DocArchiveManager.promises.unArchiveAllDocs(projectId) + await DocArchiveManager.unArchiveAllDocs(projectId) for (const doc of archivedDocs) { expect(PersistorManager.getObjectStream).to.have.been.calledWith( @@ -571,9 +577,9 @@ describe('DocArchiveManager', function () { }) it('should bail out early', async function () { - await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id) - expect(MongoManager.promises.getNonDeletedArchivedProjectDocs).to.not - .have.been.called + await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id) + expect(MongoManager.getNonDeletedArchivedProjectDocs).to.not.have.been + .called }) }) }) diff --git a/services/docstore/test/unit/js/DocManagerTests.js b/services/docstore/test/unit/js/DocManagerTests.js index 8405520e6e..67a2f26547 100644 --- a/services/docstore/test/unit/js/DocManagerTests.js +++ b/services/docstore/test/unit/js/DocManagerTests.js @@ -17,25 +17,22 @@ describe('DocManager', function () { this.version = 42 this.MongoManager = { - promises: { - findDoc: sinon.stub(), - getProjectsDocs: sinon.stub(), - patchDoc: sinon.stub().resolves(), - upsertIntoDocCollection: sinon.stub().resolves(), - }, + findDoc: sinon.stub(), + getProjectsDocs: sinon.stub(), + patchDoc: sinon.stub().resolves(), + upsertIntoDocCollection: sinon.stub().resolves(), } this.DocArchiveManager = { - promises: { - unarchiveDoc: sinon.stub(), - unArchiveAllDocs: sinon.stub(), - archiveDoc: sinon.stub().resolves(), - }, + unarchiveDoc: sinon.stub(), + unArchiveAllDocs: sinon.stub(), + archiveDoc: sinon.stub().resolves(), } this.RangeManager = { jsonRangesToMongo(r) { return r }, shouldUpdateRanges: sinon.stub().returns(false), + fixCommentIds: sinon.stub(), } this.settings = { docstore: {} } @@ -52,7 +49,7 @@ describe('DocManager', function () { describe('getFullDoc', function () { beforeEach(function () { - this.DocManager.promises._getDoc = sinon.stub() + this.DocManager._getDoc = sinon.stub() this.doc = { _id: this.doc_id, lines: ['2134'], @@ -60,13 +57,10 @@ describe('DocManager', function () { }) it('should call get doc with a quick filter', async function () { - this.DocManager.promises._getDoc.resolves(this.doc) - const doc = await this.DocManager.promises.getFullDoc( - this.project_id, - this.doc_id - ) + this.DocManager._getDoc.resolves(this.doc) + const doc = await this.DocManager.getFullDoc(this.project_id, this.doc_id) doc.should.equal(this.doc) - this.DocManager.promises._getDoc + this.DocManager._getDoc .calledWith(this.project_id, this.doc_id, { lines: true, rev: true, @@ -79,27 +73,27 @@ describe('DocManager', function () { }) it('should return error when get doc errors', async function () { - this.DocManager.promises._getDoc.rejects(this.stubbedError) + this.DocManager._getDoc.rejects(this.stubbedError) await expect( - this.DocManager.promises.getFullDoc(this.project_id, this.doc_id) + this.DocManager.getFullDoc(this.project_id, this.doc_id) ).to.be.rejectedWith(this.stubbedError) }) }) describe('getRawDoc', function () { beforeEach(function () { - this.DocManager.promises._getDoc = sinon.stub() + this.DocManager._getDoc = sinon.stub() this.doc = { lines: ['2134'] } }) it('should call get doc with a quick filter', async function () { - this.DocManager.promises._getDoc.resolves(this.doc) - const doc = await this.DocManager.promises.getDocLines( + this.DocManager._getDoc.resolves(this.doc) + const content = await this.DocManager.getDocLines( this.project_id, this.doc_id ) - doc.should.equal(this.doc) - this.DocManager.promises._getDoc + content.should.equal(this.doc.lines.join('\n')) + this.DocManager._getDoc .calledWith(this.project_id, this.doc_id, { lines: true, inS3: true, @@ -108,11 +102,46 @@ describe('DocManager', function () { }) it('should return error when get doc errors', async function () { - this.DocManager.promises._getDoc.rejects(this.stubbedError) + this.DocManager._getDoc.rejects(this.stubbedError) await expect( - this.DocManager.promises.getDocLines(this.project_id, this.doc_id) + this.DocManager.getDocLines(this.project_id, this.doc_id) ).to.be.rejectedWith(this.stubbedError) }) + + it('should return error when get doc does not exist', async function () { + this.DocManager._getDoc.resolves(null) + await expect( + this.DocManager.getDocLines(this.project_id, this.doc_id) + ).to.be.rejectedWith(Errors.NotFoundError) + }) + + it('should return error when get doc has no lines', async function () { + this.DocManager._getDoc.resolves({}) + await expect( + this.DocManager.getDocLines(this.project_id, this.doc_id) + ).to.be.rejectedWith(Errors.DocWithoutLinesError) + }) + }) + + describe('_getDoc', function () { + it('should return error when get doc does not exist', async function () { + this.MongoManager.findDoc.resolves(null) + await expect( + this.DocManager._getDoc(this.project_id, this.doc_id, { inS3: true }) + ).to.be.rejectedWith(Errors.NotFoundError) + }) + + it('should fix comment ids', async function () { + this.MongoManager.findDoc.resolves({ + _id: this.doc_id, + ranges: {}, + }) + await this.DocManager._getDoc(this.project_id, this.doc_id, { + inS3: true, + ranges: true, + }) + expect(this.RangeManager.fixCommentIds).to.have.been.called + }) }) describe('getDoc', function () { @@ -128,26 +157,25 @@ describe('DocManager', function () { describe('when using a filter', function () { beforeEach(function () { - this.MongoManager.promises.findDoc.resolves(this.doc) + this.MongoManager.findDoc.resolves(this.doc) }) it('should error if inS3 is not set to true', async function () { await expect( - this.DocManager.promises._getDoc(this.project_id, this.doc_id, { + this.DocManager._getDoc(this.project_id, this.doc_id, { inS3: false, }) ).to.be.rejected }) it('should always get inS3 even when no filter is passed', async function () { - await expect( - this.DocManager.promises._getDoc(this.project_id, this.doc_id) - ).to.be.rejected - this.MongoManager.promises.findDoc.called.should.equal(false) + await expect(this.DocManager._getDoc(this.project_id, this.doc_id)).to + .be.rejected + this.MongoManager.findDoc.called.should.equal(false) }) it('should not error if inS3 is set to true', async function () { - await this.DocManager.promises._getDoc(this.project_id, this.doc_id, { + await this.DocManager._getDoc(this.project_id, this.doc_id, { inS3: true, }) }) @@ -155,8 +183,8 @@ describe('DocManager', function () { describe('when the doc is in the doc collection', function () { beforeEach(async function () { - this.MongoManager.promises.findDoc.resolves(this.doc) - this.result = await this.DocManager.promises._getDoc( + this.MongoManager.findDoc.resolves(this.doc) + this.result = await this.DocManager._getDoc( this.project_id, this.doc_id, { version: true, inS3: true } @@ -164,7 +192,7 @@ describe('DocManager', function () { }) it('should get the doc from the doc collection', function () { - this.MongoManager.promises.findDoc + this.MongoManager.findDoc .calledWith(this.project_id, this.doc_id) .should.equal(true) }) @@ -177,9 +205,9 @@ describe('DocManager', function () { describe('when MongoManager.findDoc errors', function () { it('should return the error', async function () { - this.MongoManager.promises.findDoc.rejects(this.stubbedError) + this.MongoManager.findDoc.rejects(this.stubbedError) await expect( - this.DocManager.promises._getDoc(this.project_id, this.doc_id, { + this.DocManager._getDoc(this.project_id, this.doc_id, { version: true, inS3: true, }) @@ -202,15 +230,15 @@ describe('DocManager', function () { version: 2, inS3: false, } - this.MongoManager.promises.findDoc.resolves(this.doc) - this.DocArchiveManager.promises.unarchiveDoc.callsFake( + this.MongoManager.findDoc.resolves(this.doc) + this.DocArchiveManager.unarchiveDoc.callsFake( async (projectId, docId) => { - this.MongoManager.promises.findDoc.resolves({ + this.MongoManager.findDoc.resolves({ ...this.unarchivedDoc, }) } ) - this.result = await this.DocManager.promises._getDoc( + this.result = await this.DocManager._getDoc( this.project_id, this.doc_id, { @@ -221,13 +249,13 @@ describe('DocManager', function () { }) it('should call the DocArchive to unarchive the doc', function () { - this.DocArchiveManager.promises.unarchiveDoc + this.DocArchiveManager.unarchiveDoc .calledWith(this.project_id, this.doc_id) .should.equal(true) }) it('should look up the doc twice', function () { - this.MongoManager.promises.findDoc.calledTwice.should.equal(true) + this.MongoManager.findDoc.calledTwice.should.equal(true) }) it('should return the doc', function () { @@ -239,9 +267,9 @@ describe('DocManager', function () { describe('when the doc does not exist in the docs collection', function () { it('should return a NotFoundError', async function () { - this.MongoManager.promises.findDoc.resolves(null) + this.MongoManager.findDoc.resolves(null) await expect( - this.DocManager.promises._getDoc(this.project_id, this.doc_id, { + this.DocManager._getDoc(this.project_id, this.doc_id, { version: true, inS3: true, }) @@ -262,23 +290,27 @@ describe('DocManager', function () { lines: ['mock-lines'], }, ] - this.MongoManager.promises.getProjectsDocs.resolves(this.docs) - this.DocArchiveManager.promises.unArchiveAllDocs.resolves(this.docs) - this.filter = { lines: true } - this.result = await this.DocManager.promises.getAllNonDeletedDocs( + this.MongoManager.getProjectsDocs.resolves(this.docs) + this.DocArchiveManager.unArchiveAllDocs.resolves(this.docs) + this.filter = { lines: true, ranges: true } + this.result = await this.DocManager.getAllNonDeletedDocs( this.project_id, this.filter ) }) it('should get the project from the database', function () { - this.MongoManager.promises.getProjectsDocs.should.have.been.calledWith( + this.MongoManager.getProjectsDocs.should.have.been.calledWith( this.project_id, { include_deleted: false }, this.filter ) }) + it('should fix comment ids', async function () { + expect(this.RangeManager.fixCommentIds).to.have.been.called + }) + it('should return the docs', function () { expect(this.result).to.deep.equal(this.docs) }) @@ -286,13 +318,10 @@ describe('DocManager', function () { describe('when there are no docs for the project', function () { it('should return a NotFoundError', async function () { - this.MongoManager.promises.getProjectsDocs.resolves(null) - this.DocArchiveManager.promises.unArchiveAllDocs.resolves(null) + this.MongoManager.getProjectsDocs.resolves(null) + this.DocArchiveManager.unArchiveAllDocs.resolves(null) await expect( - this.DocManager.promises.getAllNonDeletedDocs( - this.project_id, - this.filter - ) + this.DocManager.getAllNonDeletedDocs(this.project_id, this.filter) ).to.be.rejectedWith(`No docs for project ${this.project_id}`) }) }) @@ -303,7 +332,7 @@ describe('DocManager', function () { beforeEach(function () { this.lines = ['mock', 'doc', 'lines'] this.rev = 77 - this.MongoManager.promises.findDoc.resolves({ + this.MongoManager.findDoc.resolves({ _id: new ObjectId(this.doc_id), }) this.meta = {} @@ -311,7 +340,7 @@ describe('DocManager', function () { describe('standard path', function () { beforeEach(async function () { - await this.DocManager.promises.patchDoc( + await this.DocManager.patchDoc( this.project_id, this.doc_id, this.meta @@ -319,14 +348,14 @@ describe('DocManager', function () { }) it('should get the doc', function () { - expect(this.MongoManager.promises.findDoc).to.have.been.calledWith( + expect(this.MongoManager.findDoc).to.have.been.calledWith( this.project_id, this.doc_id ) }) it('should persist the meta', function () { - expect(this.MongoManager.promises.patchDoc).to.have.been.calledWith( + expect(this.MongoManager.patchDoc).to.have.been.calledWith( this.project_id, this.doc_id, this.meta @@ -339,7 +368,7 @@ describe('DocManager', function () { this.settings.docstore.archiveOnSoftDelete = false this.meta.deleted = true - await this.DocManager.promises.patchDoc( + await this.DocManager.patchDoc( this.project_id, this.doc_id, this.meta @@ -347,8 +376,7 @@ describe('DocManager', function () { }) it('should not flush the doc out of mongo', function () { - expect(this.DocArchiveManager.promises.archiveDoc).to.not.have.been - .called + expect(this.DocArchiveManager.archiveDoc).to.not.have.been.called }) }) @@ -356,7 +384,7 @@ describe('DocManager', function () { beforeEach(async function () { this.settings.docstore.archiveOnSoftDelete = false this.meta.deleted = false - await this.DocManager.promises.patchDoc( + await this.DocManager.patchDoc( this.project_id, this.doc_id, this.meta @@ -364,8 +392,7 @@ describe('DocManager', function () { }) it('should not flush the doc out of mongo', function () { - expect(this.DocArchiveManager.promises.archiveDoc).to.not.have.been - .called + expect(this.DocArchiveManager.archiveDoc).to.not.have.been.called }) }) @@ -377,7 +404,7 @@ describe('DocManager', function () { describe('when the background flush succeeds', function () { beforeEach(async function () { - await this.DocManager.promises.patchDoc( + await this.DocManager.patchDoc( this.project_id, this.doc_id, this.meta @@ -389,17 +416,18 @@ describe('DocManager', function () { }) it('should flush the doc out of mongo', function () { - expect( - this.DocArchiveManager.promises.archiveDoc - ).to.have.been.calledWith(this.project_id, this.doc_id) + expect(this.DocArchiveManager.archiveDoc).to.have.been.calledWith( + this.project_id, + this.doc_id + ) }) }) describe('when the background flush fails', function () { beforeEach(async function () { this.err = new Error('foo') - this.DocArchiveManager.promises.archiveDoc.rejects(this.err) - await this.DocManager.promises.patchDoc( + this.DocArchiveManager.archiveDoc.rejects(this.err) + await this.DocManager.patchDoc( this.project_id, this.doc_id, this.meta @@ -422,9 +450,9 @@ describe('DocManager', function () { describe('when the doc does not exist', function () { it('should return a NotFoundError', async function () { - this.MongoManager.promises.findDoc.resolves(null) + this.MongoManager.findDoc.resolves(null) await expect( - this.DocManager.promises.patchDoc(this.project_id, this.doc_id, {}) + this.DocManager.patchDoc(this.project_id, this.doc_id, {}) ).to.be.rejectedWith( `No such project/doc to delete: ${this.project_id}/${this.doc_id}` ) @@ -470,13 +498,13 @@ describe('DocManager', function () { ranges: this.originalRanges, } - this.DocManager.promises._getDoc = sinon.stub() + this.DocManager._getDoc = sinon.stub() }) describe('when only the doc lines have changed', function () { beforeEach(async function () { - this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc) - this.result = await this.DocManager.promises.updateDoc( + this.DocManager._getDoc = sinon.stub().resolves(this.doc) + this.result = await this.DocManager.updateDoc( this.project_id, this.doc_id, this.newDocLines, @@ -486,7 +514,7 @@ describe('DocManager', function () { }) it('should get the existing doc', function () { - this.DocManager.promises._getDoc + this.DocManager._getDoc .calledWith(this.project_id, this.doc_id, { version: true, rev: true, @@ -498,7 +526,7 @@ describe('DocManager', function () { }) it('should upsert the document to the doc collection', function () { - this.MongoManager.promises.upsertIntoDocCollection + this.MongoManager.upsertIntoDocCollection .calledWith(this.project_id, this.doc_id, this.rev, { lines: this.newDocLines, }) @@ -512,9 +540,9 @@ describe('DocManager', function () { describe('when the doc ranges have changed', function () { beforeEach(async function () { - this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc) + this.DocManager._getDoc = sinon.stub().resolves(this.doc) this.RangeManager.shouldUpdateRanges.returns(true) - this.result = await this.DocManager.promises.updateDoc( + this.result = await this.DocManager.updateDoc( this.project_id, this.doc_id, this.oldDocLines, @@ -524,7 +552,7 @@ describe('DocManager', function () { }) it('should upsert the ranges', function () { - this.MongoManager.promises.upsertIntoDocCollection + this.MongoManager.upsertIntoDocCollection .calledWith(this.project_id, this.doc_id, this.rev, { ranges: this.newRanges, }) @@ -538,8 +566,8 @@ describe('DocManager', function () { describe('when only the version has changed', function () { beforeEach(async function () { - this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc) - this.result = await this.DocManager.promises.updateDoc( + this.DocManager._getDoc = sinon.stub().resolves(this.doc) + this.result = await this.DocManager.updateDoc( this.project_id, this.doc_id, this.oldDocLines, @@ -549,7 +577,7 @@ describe('DocManager', function () { }) it('should update the version', function () { - this.MongoManager.promises.upsertIntoDocCollection.should.have.been.calledWith( + this.MongoManager.upsertIntoDocCollection.should.have.been.calledWith( this.project_id, this.doc_id, this.rev, @@ -564,8 +592,8 @@ describe('DocManager', function () { describe('when the doc has not changed at all', function () { beforeEach(async function () { - this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc) - this.result = await this.DocManager.promises.updateDoc( + this.DocManager._getDoc = sinon.stub().resolves(this.doc) + this.result = await this.DocManager.updateDoc( this.project_id, this.doc_id, this.oldDocLines, @@ -575,9 +603,7 @@ describe('DocManager', function () { }) it('should not update the ranges or lines or version', function () { - this.MongoManager.promises.upsertIntoDocCollection.called.should.equal( - false - ) + this.MongoManager.upsertIntoDocCollection.called.should.equal(false) }) it('should return the old rev and modified == false', function () { @@ -588,7 +614,7 @@ describe('DocManager', function () { describe('when the version is null', function () { it('should return an error', async function () { await expect( - this.DocManager.promises.updateDoc( + this.DocManager.updateDoc( this.project_id, this.doc_id, this.newDocLines, @@ -602,7 +628,7 @@ describe('DocManager', function () { describe('when the lines are null', function () { it('should return an error', async function () { await expect( - this.DocManager.promises.updateDoc( + this.DocManager.updateDoc( this.project_id, this.doc_id, null, @@ -616,7 +642,7 @@ describe('DocManager', function () { describe('when the ranges are null', function () { it('should return an error', async function () { await expect( - this.DocManager.promises.updateDoc( + this.DocManager.updateDoc( this.project_id, this.doc_id, this.newDocLines, @@ -630,9 +656,9 @@ describe('DocManager', function () { describe('when there is a generic error getting the doc', function () { beforeEach(async function () { this.error = new Error('doc could not be found') - this.DocManager.promises._getDoc = sinon.stub().rejects(this.error) + this.DocManager._getDoc = sinon.stub().rejects(this.error) await expect( - this.DocManager.promises.updateDoc( + this.DocManager.updateDoc( this.project_id, this.doc_id, this.newDocLines, @@ -643,16 +669,15 @@ describe('DocManager', function () { }) it('should not upsert the document to the doc collection', function () { - this.MongoManager.promises.upsertIntoDocCollection.should.not.have.been - .called + this.MongoManager.upsertIntoDocCollection.should.not.have.been.called }) }) describe('when the version was decremented', function () { it('should return an error', async function () { - this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc) + this.DocManager._getDoc = sinon.stub().resolves(this.doc) await expect( - this.DocManager.promises.updateDoc( + this.DocManager.updateDoc( this.project_id, this.doc_id, this.newDocLines, @@ -665,8 +690,8 @@ describe('DocManager', function () { describe('when the doc lines have not changed', function () { beforeEach(async function () { - this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc) - this.result = await this.DocManager.promises.updateDoc( + this.DocManager._getDoc = sinon.stub().resolves(this.doc) + this.result = await this.DocManager.updateDoc( this.project_id, this.doc_id, this.oldDocLines.slice(), @@ -676,9 +701,7 @@ describe('DocManager', function () { }) it('should not update the doc', function () { - this.MongoManager.promises.upsertIntoDocCollection.called.should.equal( - false - ) + this.MongoManager.upsertIntoDocCollection.called.should.equal(false) }) it('should return the existing rev', function () { @@ -688,8 +711,8 @@ describe('DocManager', function () { describe('when the doc does not exist', function () { beforeEach(async function () { - this.DocManager.promises._getDoc = sinon.stub().resolves(null) - this.result = await this.DocManager.promises.updateDoc( + this.DocManager._getDoc = sinon.stub().resolves(null) + this.result = await this.DocManager.updateDoc( this.project_id, this.doc_id, this.newDocLines, @@ -699,7 +722,7 @@ describe('DocManager', function () { }) it('should upsert the document to the doc collection', function () { - this.MongoManager.promises.upsertIntoDocCollection.should.have.been.calledWith( + this.MongoManager.upsertIntoDocCollection.should.have.been.calledWith( this.project_id, this.doc_id, undefined, @@ -718,12 +741,12 @@ describe('DocManager', function () { describe('when another update is racing', function () { beforeEach(async function () { - this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc) - this.MongoManager.promises.upsertIntoDocCollection + this.DocManager._getDoc = sinon.stub().resolves(this.doc) + this.MongoManager.upsertIntoDocCollection .onFirstCall() .rejects(new Errors.DocRevValueError()) this.RangeManager.shouldUpdateRanges.returns(true) - this.result = await this.DocManager.promises.updateDoc( + this.result = await this.DocManager.updateDoc( this.project_id, this.doc_id, this.newDocLines, @@ -733,7 +756,7 @@ describe('DocManager', function () { }) it('should upsert the doc twice', function () { - this.MongoManager.promises.upsertIntoDocCollection.should.have.been.calledWith( + this.MongoManager.upsertIntoDocCollection.should.have.been.calledWith( this.project_id, this.doc_id, this.rev, @@ -743,8 +766,7 @@ describe('DocManager', function () { version: this.version + 1, } ) - this.MongoManager.promises.upsertIntoDocCollection.should.have.been - .calledTwice + this.MongoManager.upsertIntoDocCollection.should.have.been.calledTwice }) it('should return the new rev', function () { diff --git a/services/docstore/test/unit/js/HttpControllerTests.js b/services/docstore/test/unit/js/HttpControllerTests.js index bf78696890..ab491ec150 100644 --- a/services/docstore/test/unit/js/HttpControllerTests.js +++ b/services/docstore/test/unit/js/HttpControllerTests.js @@ -14,7 +14,7 @@ describe('HttpController', function () { max_doc_length: 2 * 1024 * 1024, } this.DocArchiveManager = { - unArchiveAllDocs: sinon.stub().yields(), + unArchiveAllDocs: sinon.stub().returns(), } this.DocManager = {} this.HttpController = SandboxedModule.require(modulePath, { @@ -54,15 +54,13 @@ describe('HttpController', function () { describe('getDoc', function () { describe('without deleted docs', function () { - beforeEach(function () { + beforeEach(async function () { this.req.params = { project_id: this.projectId, doc_id: this.docId, } - this.DocManager.getFullDoc = sinon - .stub() - .callsArgWith(2, null, this.doc) - this.HttpController.getDoc(this.req, this.res, this.next) + this.DocManager.getFullDoc = sinon.stub().resolves(this.doc) + await this.HttpController.getDoc(this.req, this.res, this.next) }) it('should get the document with the version (including deleted)', function () { @@ -89,26 +87,24 @@ describe('HttpController', function () { project_id: this.projectId, doc_id: this.docId, } - this.DocManager.getFullDoc = sinon - .stub() - .callsArgWith(2, null, this.deletedDoc) + this.DocManager.getFullDoc = sinon.stub().resolves(this.deletedDoc) }) - it('should get the doc from the doc manager', function () { - this.HttpController.getDoc(this.req, this.res, this.next) + it('should get the doc from the doc manager', async function () { + await this.HttpController.getDoc(this.req, this.res, this.next) this.DocManager.getFullDoc .calledWith(this.projectId, this.docId) .should.equal(true) }) - it('should return 404 if the query string delete is not set ', function () { - this.HttpController.getDoc(this.req, this.res, this.next) + it('should return 404 if the query string delete is not set ', async function () { + await this.HttpController.getDoc(this.req, this.res, this.next) this.res.sendStatus.calledWith(404).should.equal(true) }) - it('should return the doc as JSON if include_deleted is set to true', function () { + it('should return the doc as JSON if include_deleted is set to true', async function () { this.req.query.include_deleted = 'true' - this.HttpController.getDoc(this.req, this.res, this.next) + await this.HttpController.getDoc(this.req, this.res, this.next) this.res.json .calledWith({ _id: this.docId, @@ -123,13 +119,15 @@ describe('HttpController', function () { }) describe('getRawDoc', function () { - beforeEach(function () { + beforeEach(async function () { this.req.params = { project_id: this.projectId, doc_id: this.docId, } - this.DocManager.getDocLines = sinon.stub().callsArgWith(2, null, this.doc) - this.HttpController.getRawDoc(this.req, this.res, this.next) + this.DocManager.getDocLines = sinon + .stub() + .resolves(this.doc.lines.join('\n')) + await this.HttpController.getRawDoc(this.req, this.res, this.next) }) it('should get the document without the version', function () { @@ -154,7 +152,7 @@ describe('HttpController', function () { describe('getAllDocs', function () { describe('normally', function () { - beforeEach(function () { + beforeEach(async function () { this.req.params = { project_id: this.projectId } this.docs = [ { @@ -168,10 +166,8 @@ describe('HttpController', function () { rev: 4, }, ] - this.DocManager.getAllNonDeletedDocs = sinon - .stub() - .callsArgWith(2, null, this.docs) - this.HttpController.getAllDocs(this.req, this.res, this.next) + this.DocManager.getAllNonDeletedDocs = sinon.stub().resolves(this.docs) + await this.HttpController.getAllDocs(this.req, this.res, this.next) }) it('should get all the (non-deleted) docs', function () { @@ -199,7 +195,7 @@ describe('HttpController', function () { }) describe('with null lines', function () { - beforeEach(function () { + beforeEach(async function () { this.req.params = { project_id: this.projectId } this.docs = [ { @@ -213,10 +209,8 @@ describe('HttpController', function () { rev: 4, }, ] - this.DocManager.getAllNonDeletedDocs = sinon - .stub() - .callsArgWith(2, null, this.docs) - this.HttpController.getAllDocs(this.req, this.res, this.next) + this.DocManager.getAllNonDeletedDocs = sinon.stub().resolves(this.docs) + await this.HttpController.getAllDocs(this.req, this.res, this.next) }) it('should return the doc with fallback lines', function () { @@ -238,7 +232,7 @@ describe('HttpController', function () { }) describe('with a null doc', function () { - beforeEach(function () { + beforeEach(async function () { this.req.params = { project_id: this.projectId } this.docs = [ { @@ -253,10 +247,8 @@ describe('HttpController', function () { rev: 4, }, ] - this.DocManager.getAllNonDeletedDocs = sinon - .stub() - .callsArgWith(2, null, this.docs) - this.HttpController.getAllDocs(this.req, this.res, this.next) + this.DocManager.getAllNonDeletedDocs = sinon.stub().resolves(this.docs) + await this.HttpController.getAllDocs(this.req, this.res, this.next) }) it('should return the non null docs as JSON', function () { @@ -292,7 +284,7 @@ describe('HttpController', function () { describe('getAllRanges', function () { describe('normally', function () { - beforeEach(function () { + beforeEach(async function () { this.req.params = { project_id: this.projectId } this.docs = [ { @@ -304,10 +296,8 @@ describe('HttpController', function () { ranges: { mock_ranges: 'two' }, }, ] - this.DocManager.getAllNonDeletedDocs = sinon - .stub() - .callsArgWith(2, null, this.docs) - this.HttpController.getAllRanges(this.req, this.res, this.next) + this.DocManager.getAllNonDeletedDocs = sinon.stub().resolves(this.docs) + await this.HttpController.getAllRanges(this.req, this.res, this.next) }) it('should get all the (non-deleted) doc ranges', function () { @@ -342,16 +332,17 @@ describe('HttpController', function () { }) describe('when the doc lines exist and were updated', function () { - beforeEach(function () { + beforeEach(async function () { this.req.body = { lines: (this.lines = ['hello', 'world']), version: (this.version = 42), ranges: (this.ranges = { changes: 'mock' }), } + this.rev = 5 this.DocManager.updateDoc = sinon .stub() - .yields(null, true, (this.rev = 5)) - this.HttpController.updateDoc(this.req, this.res, this.next) + .resolves({ modified: true, rev: this.rev }) + await this.HttpController.updateDoc(this.req, this.res, this.next) }) it('should update the document', function () { @@ -374,16 +365,17 @@ describe('HttpController', function () { }) describe('when the doc lines exist and were not updated', function () { - beforeEach(function () { + beforeEach(async function () { this.req.body = { lines: (this.lines = ['hello', 'world']), version: (this.version = 42), ranges: {}, } + this.rev = 5 this.DocManager.updateDoc = sinon .stub() - .yields(null, false, (this.rev = 5)) - this.HttpController.updateDoc(this.req, this.res, this.next) + .resolves({ modified: false, rev: this.rev }) + await this.HttpController.updateDoc(this.req, this.res, this.next) }) it('should return a modified status', function () { @@ -394,10 +386,12 @@ describe('HttpController', function () { }) describe('when the doc lines are not provided', function () { - beforeEach(function () { + beforeEach(async function () { this.req.body = { version: 42, ranges: {} } - this.DocManager.updateDoc = sinon.stub().yields(null, false) - this.HttpController.updateDoc(this.req, this.res, this.next) + this.DocManager.updateDoc = sinon + .stub() + .resolves({ modified: false, rev: 0 }) + await this.HttpController.updateDoc(this.req, this.res, this.next) }) it('should not update the document', function () { @@ -410,10 +404,12 @@ describe('HttpController', function () { }) describe('when the doc version are not provided', function () { - beforeEach(function () { + beforeEach(async function () { this.req.body = { version: 42, lines: ['hello world'] } - this.DocManager.updateDoc = sinon.stub().yields(null, false) - this.HttpController.updateDoc(this.req, this.res, this.next) + this.DocManager.updateDoc = sinon + .stub() + .resolves({ modified: false, rev: 0 }) + await this.HttpController.updateDoc(this.req, this.res, this.next) }) it('should not update the document', function () { @@ -426,10 +422,12 @@ describe('HttpController', function () { }) describe('when the doc ranges is not provided', function () { - beforeEach(function () { + beforeEach(async function () { this.req.body = { lines: ['foo'], version: 42 } - this.DocManager.updateDoc = sinon.stub().yields(null, false) - this.HttpController.updateDoc(this.req, this.res, this.next) + this.DocManager.updateDoc = sinon + .stub() + .resolves({ modified: false, rev: 0 }) + await this.HttpController.updateDoc(this.req, this.res, this.next) }) it('should not update the document', function () { @@ -442,13 +440,20 @@ describe('HttpController', function () { }) describe('when the doc body is too large', function () { - beforeEach(function () { + beforeEach(async function () { this.req.body = { lines: (this.lines = Array(2049).fill('a'.repeat(1024))), version: (this.version = 42), ranges: (this.ranges = { changes: 'mock' }), } - this.HttpController.updateDoc(this.req, this.res, this.next) + this.DocManager.updateDoc = sinon + .stub() + .resolves({ modified: false, rev: 0 }) + await this.HttpController.updateDoc(this.req, this.res, this.next) + }) + + it('should not update the document', function () { + this.DocManager.updateDoc.called.should.equal(false) }) it('should return a 413 (too large) response', function () { @@ -462,14 +467,14 @@ describe('HttpController', function () { }) describe('patchDoc', function () { - beforeEach(function () { + beforeEach(async function () { this.req.params = { project_id: this.projectId, doc_id: this.docId, } this.req.body = { name: 'foo.tex' } - this.DocManager.patchDoc = sinon.stub().yields(null) - this.HttpController.patchDoc(this.req, this.res, this.next) + this.DocManager.patchDoc = sinon.stub().resolves() + await this.HttpController.patchDoc(this.req, this.res, this.next) }) it('should delete the document', function () { @@ -484,11 +489,11 @@ describe('HttpController', function () { }) describe('with an invalid payload', function () { - beforeEach(function () { + beforeEach(async function () { this.req.body = { cannot: 'happen' } - this.DocManager.patchDoc = sinon.stub().yields(null) - this.HttpController.patchDoc(this.req, this.res, this.next) + this.DocManager.patchDoc = sinon.stub().resolves() + await this.HttpController.patchDoc(this.req, this.res, this.next) }) it('should log a message', function () { @@ -509,10 +514,10 @@ describe('HttpController', function () { }) describe('archiveAllDocs', function () { - beforeEach(function () { + beforeEach(async function () { this.req.params = { project_id: this.projectId } - this.DocArchiveManager.archiveAllDocs = sinon.stub().callsArg(1) - this.HttpController.archiveAllDocs(this.req, this.res, this.next) + this.DocArchiveManager.archiveAllDocs = sinon.stub().resolves() + await this.HttpController.archiveAllDocs(this.req, this.res, this.next) }) it('should archive the project', function () { @@ -532,9 +537,12 @@ describe('HttpController', function () { }) describe('on success', function () { - beforeEach(function (done) { - this.res.sendStatus.callsFake(() => done()) - this.HttpController.unArchiveAllDocs(this.req, this.res, this.next) + beforeEach(async function () { + await this.HttpController.unArchiveAllDocs( + this.req, + this.res, + this.next + ) }) it('returns a 200', function () { @@ -543,12 +551,15 @@ describe('HttpController', function () { }) describe("when the archived rev doesn't match", function () { - beforeEach(function (done) { - this.res.sendStatus.callsFake(() => done()) - this.DocArchiveManager.unArchiveAllDocs.yields( + beforeEach(async function () { + this.DocArchiveManager.unArchiveAllDocs.rejects( new Errors.DocRevValueError('bad rev') ) - this.HttpController.unArchiveAllDocs(this.req, this.res, this.next) + await this.HttpController.unArchiveAllDocs( + this.req, + this.res, + this.next + ) }) it('returns a 409', function () { @@ -558,10 +569,10 @@ describe('HttpController', function () { }) describe('destroyProject', function () { - beforeEach(function () { + beforeEach(async function () { this.req.params = { project_id: this.projectId } - this.DocArchiveManager.destroyProject = sinon.stub().callsArg(1) - this.HttpController.destroyProject(this.req, this.res, this.next) + this.DocArchiveManager.destroyProject = sinon.stub().resolves() + await this.HttpController.destroyProject(this.req, this.res, this.next) }) it('should destroy the docs', function () { diff --git a/services/docstore/test/unit/js/MongoManagerTests.js b/services/docstore/test/unit/js/MongoManagerTests.js index 4f8467db76..b96b661df4 100644 --- a/services/docstore/test/unit/js/MongoManagerTests.js +++ b/services/docstore/test/unit/js/MongoManagerTests.js @@ -41,7 +41,7 @@ describe('MongoManager', function () { this.doc = { name: 'mock-doc' } this.db.docs.findOne = sinon.stub().resolves(this.doc) this.filter = { lines: true } - this.result = await this.MongoManager.promises.findDoc( + this.result = await this.MongoManager.findDoc( this.projectId, this.docId, this.filter @@ -70,11 +70,7 @@ describe('MongoManager', function () { describe('patchDoc', function () { beforeEach(async function () { this.meta = { name: 'foo.tex' } - await this.MongoManager.promises.patchDoc( - this.projectId, - this.docId, - this.meta - ) + await this.MongoManager.patchDoc(this.projectId, this.docId, this.meta) }) it('should pass the parameter along', function () { @@ -104,7 +100,7 @@ describe('MongoManager', function () { describe('with included_deleted = false', function () { beforeEach(async function () { - this.result = await this.MongoManager.promises.getProjectsDocs( + this.result = await this.MongoManager.getProjectsDocs( this.projectId, { include_deleted: false }, this.filter @@ -132,7 +128,7 @@ describe('MongoManager', function () { describe('with included_deleted = true', function () { beforeEach(async function () { - this.result = await this.MongoManager.promises.getProjectsDocs( + this.result = await this.MongoManager.getProjectsDocs( this.projectId, { include_deleted: true }, this.filter @@ -167,7 +163,7 @@ describe('MongoManager', function () { this.db.docs.find = sinon.stub().returns({ toArray: sinon.stub().resolves([this.doc1, this.doc2, this.doc3]), }) - this.result = await this.MongoManager.promises.getProjectsDeletedDocs( + this.result = await this.MongoManager.getProjectsDeletedDocs( this.projectId, this.filter ) @@ -203,7 +199,7 @@ describe('MongoManager', function () { }) it('should upsert the document', async function () { - await this.MongoManager.promises.upsertIntoDocCollection( + await this.MongoManager.upsertIntoDocCollection( this.projectId, this.docId, this.oldRev, @@ -223,7 +219,7 @@ describe('MongoManager', function () { it('should handle update error', async function () { this.db.docs.updateOne.rejects(this.stubbedErr) await expect( - this.MongoManager.promises.upsertIntoDocCollection( + this.MongoManager.upsertIntoDocCollection( this.projectId, this.docId, this.rev, @@ -235,7 +231,7 @@ describe('MongoManager', function () { }) it('should insert without a previous rev', async function () { - await this.MongoManager.promises.upsertIntoDocCollection( + await this.MongoManager.upsertIntoDocCollection( this.projectId, this.docId, null, @@ -254,7 +250,7 @@ describe('MongoManager', function () { it('should handle generic insert error', async function () { this.db.docs.insertOne.rejects(this.stubbedErr) await expect( - this.MongoManager.promises.upsertIntoDocCollection( + this.MongoManager.upsertIntoDocCollection( this.projectId, this.docId, null, @@ -266,7 +262,7 @@ describe('MongoManager', function () { it('should handle duplicate insert error', async function () { this.db.docs.insertOne.rejects({ code: 11000 }) await expect( - this.MongoManager.promises.upsertIntoDocCollection( + this.MongoManager.upsertIntoDocCollection( this.projectId, this.docId, null, @@ -280,7 +276,7 @@ describe('MongoManager', function () { beforeEach(async function () { this.projectId = new ObjectId() this.db.docs.deleteMany = sinon.stub().resolves() - await this.MongoManager.promises.destroyProject(this.projectId) + await this.MongoManager.destroyProject(this.projectId) }) it('should destroy all docs', function () { @@ -297,13 +293,13 @@ describe('MongoManager', function () { it('should not error when the rev has not changed', async function () { this.db.docs.findOne = sinon.stub().resolves({ rev: 1 }) - await this.MongoManager.promises.checkRevUnchanged(this.doc) + await this.MongoManager.checkRevUnchanged(this.doc) }) it('should return an error when the rev has changed', async function () { this.db.docs.findOne = sinon.stub().resolves({ rev: 2 }) await expect( - this.MongoManager.promises.checkRevUnchanged(this.doc) + this.MongoManager.checkRevUnchanged(this.doc) ).to.be.rejectedWith(Errors.DocModifiedError) }) @@ -311,14 +307,14 @@ describe('MongoManager', function () { this.db.docs.findOne = sinon.stub().resolves({ rev: 2 }) this.doc = { _id: new ObjectId(), name: 'mock-doc', rev: NaN } await expect( - this.MongoManager.promises.checkRevUnchanged(this.doc) + this.MongoManager.checkRevUnchanged(this.doc) ).to.be.rejectedWith(Errors.DocRevValueError) }) it('should return a value error if checked doc rev is NaN', async function () { this.db.docs.findOne = sinon.stub().resolves({ rev: NaN }) await expect( - this.MongoManager.promises.checkRevUnchanged(this.doc) + this.MongoManager.checkRevUnchanged(this.doc) ).to.be.rejectedWith(Errors.DocRevValueError) }) }) @@ -334,7 +330,7 @@ describe('MongoManager', function () { describe('complete doc', function () { beforeEach(async function () { - await this.MongoManager.promises.restoreArchivedDoc( + await this.MongoManager.restoreArchivedDoc( this.projectId, this.docId, this.archivedDoc @@ -364,7 +360,7 @@ describe('MongoManager', function () { describe('without ranges', function () { beforeEach(async function () { delete this.archivedDoc.ranges - await this.MongoManager.promises.restoreArchivedDoc( + await this.MongoManager.restoreArchivedDoc( this.projectId, this.docId, this.archivedDoc @@ -395,7 +391,7 @@ describe('MongoManager', function () { it('throws a DocRevValueError', async function () { this.db.docs.updateOne.resolves({ matchedCount: 0 }) await expect( - this.MongoManager.promises.restoreArchivedDoc( + this.MongoManager.restoreArchivedDoc( this.projectId, this.docId, this.archivedDoc diff --git a/services/docstore/test/unit/js/RangeManagerTests.js b/services/docstore/test/unit/js/RangeManagerTests.js index 7a2de7352e..ba99280a7a 100644 --- a/services/docstore/test/unit/js/RangeManagerTests.js +++ b/services/docstore/test/unit/js/RangeManagerTests.js @@ -30,7 +30,7 @@ describe('RangeManager', function () { }) describe('jsonRangesToMongo', function () { - it('should convert ObjectIds and dates to proper objects', function () { + it('should convert ObjectIds and dates to proper objects and fix comment id', function () { const changeId = new ObjectId().toString() const commentId = new ObjectId().toString() const userId = new ObjectId().toString() @@ -66,7 +66,7 @@ describe('RangeManager', function () { ], comments: [ { - id: new ObjectId(commentId), + id: new ObjectId(threadId), op: { c: 'foo', p: 3, t: new ObjectId(threadId) }, }, ], @@ -110,7 +110,6 @@ describe('RangeManager', function () { return it('should be consistent when transformed through json -> mongo -> json', function () { const changeId = new ObjectId().toString() - const commentId = new ObjectId().toString() const userId = new ObjectId().toString() const threadId = new ObjectId().toString() const ts = new Date().toJSON() @@ -127,7 +126,7 @@ describe('RangeManager', function () { ], comments: [ { - id: commentId, + id: threadId, op: { c: 'foo', p: 3, t: threadId }, }, ], @@ -142,6 +141,7 @@ describe('RangeManager', function () { return describe('shouldUpdateRanges', function () { beforeEach(function () { + const threadId = new ObjectId() this.ranges = { changes: [ { @@ -155,8 +155,8 @@ describe('RangeManager', function () { ], comments: [ { - id: new ObjectId(), - op: { c: 'foo', p: 3, t: new ObjectId() }, + id: threadId, + op: { c: 'foo', p: 3, t: threadId }, }, ], } diff --git a/services/document-updater/app/js/DiffCodec.js b/services/document-updater/app/js/DiffCodec.js index 8c574cff70..17da409386 100644 --- a/services/document-updater/app/js/DiffCodec.js +++ b/services/document-updater/app/js/DiffCodec.js @@ -1,3 +1,4 @@ +const OError = require('@overleaf/o-error') const DMP = require('diff-match-patch') const { TextOperation } = require('overleaf-editor-core') const dmp = new DMP() @@ -38,23 +39,62 @@ module.exports = { return ops }, - diffAsHistoryV1EditOperation(before, after) { - const diffs = dmp.diff_main(before, after) + /** + * @param {import("overleaf-editor-core").StringFileData} file + * @param {string} after + * @return {TextOperation} + */ + diffAsHistoryOTEditOperation(file, after) { + const beforeWithoutTrackedDeletes = file.getContent({ + filterTrackedDeletes: true, + }) + const diffs = dmp.diff_main(beforeWithoutTrackedDeletes, after) dmp.diff_cleanupSemantic(diffs) + const trackedChanges = file.trackedChanges.asSorted() + let nextTc = trackedChanges.shift() + const op = new TextOperation() for (const diff of diffs) { - const [type, content] = diff + let [type, content] = diff if (type === this.ADDED) { op.insert(content) - } else if (type === this.REMOVED) { - op.remove(content.length) - } else if (type === this.UNCHANGED) { - op.retain(content.length) + } else if (type === this.REMOVED || type === this.UNCHANGED) { + while (op.baseLength + content.length > nextTc?.range.start) { + if (nextTc.tracking.type === 'delete') { + const untilRange = nextTc.range.start - op.baseLength + if (type === this.REMOVED) { + op.remove(untilRange) + } else if (type === this.UNCHANGED) { + op.retain(untilRange) + } + op.retain(nextTc.range.end - nextTc.range.start) + content = content.slice(untilRange) + } + nextTc = trackedChanges.shift() + } + if (type === this.REMOVED) { + op.remove(content.length) + } else if (type === this.UNCHANGED) { + op.retain(content.length) + } } else { throw new Error('Unknown type') } } + while (nextTc) { + if ( + nextTc.tracking.type !== 'delete' || + nextTc.range.start !== op.baseLength + ) { + throw new OError( + 'StringFileData.trackedChanges out of sync: unexpected range after end of diff', + { nextTc, baseLength: op.baseLength } + ) + } + op.retain(nextTc.range.end - nextTc.range.start) + nextTc = trackedChanges.shift() + } return op }, } diff --git a/services/document-updater/app/js/DocumentManager.js b/services/document-updater/app/js/DocumentManager.js index 4803056423..3fb3d10a6e 100644 --- a/services/document-updater/app/js/DocumentManager.js +++ b/services/document-updater/app/js/DocumentManager.js @@ -194,9 +194,8 @@ const DocumentManager = { let op if (type === 'history-ot') { const file = StringFileData.fromRaw(oldLines) - const operation = DiffCodec.diffAsHistoryV1EditOperation( - // TODO(24596): tc support for history-ot - file.getContent({ filterTrackedDeletes: true }), + const operation = DiffCodec.diffAsHistoryOTEditOperation( + file, newLines.join('\n') ) if (operation.isNoop()) { @@ -536,11 +535,6 @@ const DocumentManager = { if (opts.historyRangesMigration) { historyRangesSupport = opts.historyRangesMigration === 'forwards' } - if (!Array.isArray(lines)) { - const file = StringFileData.fromRaw(lines) - // TODO(24596): tc support for history-ot - lines = file.getLines() - } await ProjectHistoryRedisManager.promises.queueResyncDocContent( projectId, diff --git a/services/document-updater/app/js/Limits.js b/services/document-updater/app/js/Limits.js index 268ccd3f9b..cbd9293042 100644 --- a/services/document-updater/app/js/Limits.js +++ b/services/document-updater/app/js/Limits.js @@ -28,4 +28,19 @@ module.exports = { // since we didn't hit the limit in the loop, the document is within the allowed length return false }, + + /** + * @param {StringFileRawData} raw + * @param {number} maxDocLength + */ + stringFileDataContentIsTooLarge(raw, maxDocLength) { + let n = raw.content.length + if (n <= maxDocLength) return false // definitely under the limit, no need to calculate the total size + for (const tc of raw.trackedChanges ?? []) { + if (tc.tracking.type !== 'delete') continue + n -= tc.range.length + if (n <= maxDocLength) return false // under the limit now, no need to calculate the exact size + } + return true + }, } diff --git a/services/document-updater/app/js/ProjectHistoryRedisManager.js b/services/document-updater/app/js/ProjectHistoryRedisManager.js index 9a9985d99a..78e9c2ea4c 100644 --- a/services/document-updater/app/js/ProjectHistoryRedisManager.js +++ b/services/document-updater/app/js/ProjectHistoryRedisManager.js @@ -8,13 +8,14 @@ const rclient = require('@overleaf/redis-wrapper').createClient( ) const logger = require('@overleaf/logger') const metrics = require('./Metrics') -const { docIsTooLarge } = require('./Limits') +const { docIsTooLarge, stringFileDataContentIsTooLarge } = require('./Limits') const { addTrackedDeletesToContent, extractOriginOrSource } = require('./Utils') const HistoryConversions = require('./HistoryConversions') const OError = require('@overleaf/o-error') /** * @import { Ranges } from './types' + * @import { StringFileRawData } from 'overleaf-editor-core/lib/types' */ const ProjectHistoryRedisManager = { @@ -180,7 +181,7 @@ const ProjectHistoryRedisManager = { * @param {string} projectId * @param {string} projectHistoryId * @param {string} docId - * @param {string[]} lines + * @param {string[] | StringFileRawData} lines * @param {Ranges} ranges * @param {string[]} resolvedCommentIds * @param {number} version @@ -204,13 +205,8 @@ const ProjectHistoryRedisManager = { 'queue doc content resync' ) - let content = lines.join('\n') - if (historyRangesSupport) { - content = addTrackedDeletesToContent(content, ranges.changes ?? []) - } - const projectUpdate = { - resyncDocContent: { content, version }, + resyncDocContent: { version }, projectHistoryId, path: pathname, doc: docId, @@ -219,17 +215,38 @@ const ProjectHistoryRedisManager = { }, } - if (historyRangesSupport) { - projectUpdate.resyncDocContent.ranges = - HistoryConversions.toHistoryRanges(ranges) - projectUpdate.resyncDocContent.resolvedCommentIds = resolvedCommentIds + let content = '' + if (Array.isArray(lines)) { + content = lines.join('\n') + if (historyRangesSupport) { + content = addTrackedDeletesToContent(content, ranges.changes ?? []) + projectUpdate.resyncDocContent.ranges = + HistoryConversions.toHistoryRanges(ranges) + projectUpdate.resyncDocContent.resolvedCommentIds = resolvedCommentIds + } + } else { + content = lines.content + projectUpdate.resyncDocContent.historyOTRanges = { + comments: lines.comments, + trackedChanges: lines.trackedChanges, + } } + projectUpdate.resyncDocContent.content = content const jsonUpdate = JSON.stringify(projectUpdate) // Do an optimised size check on the docLines using the serialised // project update length as an upper bound const sizeBound = jsonUpdate.length - if (docIsTooLarge(sizeBound, lines, Settings.max_doc_length)) { + if (Array.isArray(lines)) { + if (docIsTooLarge(sizeBound, lines, Settings.max_doc_length)) { + throw new OError( + 'blocking resync doc content insert into project history queue: doc is too large', + { projectId, docId, docSize: sizeBound } + ) + } + } else if ( + stringFileDataContentIsTooLarge(lines, Settings.max_doc_length) + ) { throw new OError( 'blocking resync doc content insert into project history queue: doc is too large', { projectId, docId, docSize: sizeBound } diff --git a/services/document-updater/docker-compose.ci.yml b/services/document-updater/docker-compose.ci.yml index 2fe97bd9b3..ca15f35fef 100644 --- a/services/document-updater/docker-compose.ci.yml +++ b/services/document-updater/docker-compose.ci.yml @@ -28,12 +28,15 @@ services: MOCHA_GREP: ${MOCHA_GREP} NODE_ENV: test NODE_OPTIONS: "--unhandled-rejections=strict" + volumes: + - ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it depends_on: mongo: condition: service_started redis: condition: service_healthy user: node + entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 -- command: npm run test:acceptance @@ -45,7 +48,7 @@ services: command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs . user: root redis: - image: redis + image: redis:7.4.3 healthcheck: test: ping="$$(redis-cli ping)" && [ "$$ping" = 'PONG' ] interval: 1s diff --git a/services/document-updater/docker-compose.yml b/services/document-updater/docker-compose.yml index 8a94d1a24c..3688d21d0b 100644 --- a/services/document-updater/docker-compose.yml +++ b/services/document-updater/docker-compose.yml @@ -26,6 +26,7 @@ services: - .:/overleaf/services/document-updater - ../../node_modules:/overleaf/node_modules - ../../libraries:/overleaf/libraries + - ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it working_dir: /overleaf/services/document-updater environment: ELASTIC_SEARCH_DSN: es:9200 @@ -45,10 +46,11 @@ services: condition: service_started redis: condition: service_healthy + entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 -- command: npm run --silent test:acceptance redis: - image: redis + image: redis:7.4.3 healthcheck: test: ping=$$(redis-cli ping) && [ "$$ping" = 'PONG' ] interval: 1s diff --git a/services/document-updater/scripts/check_redis_mongo_sync_state.js b/services/document-updater/scripts/check_redis_mongo_sync_state.js index 08209400aa..51db47af4d 100644 --- a/services/document-updater/scripts/check_redis_mongo_sync_state.js +++ b/services/document-updater/scripts/check_redis_mongo_sync_state.js @@ -15,6 +15,7 @@ const request = require('requestretry').defaults({ retryDelay: 10, }) +const ONLY_PROJECT_ID = process.env.ONLY_PROJECT_ID const AUTO_FIX_VERSION_MISMATCH = process.env.AUTO_FIX_VERSION_MISMATCH === 'true' const AUTO_FIX_PARTIALLY_DELETED_DOC_METADATA = @@ -319,10 +320,12 @@ async function processProject(projectId) { * @return {Promise<{perIterationOutOfSync: number, done: boolean}>} */ async function scanOnce(processed, outOfSync) { - const projectIds = await ProjectFlusher.promises.flushAllProjects({ - limit: LIMIT, - dryRun: true, - }) + const projectIds = ONLY_PROJECT_ID + ? [ONLY_PROJECT_ID] + : await ProjectFlusher.promises.flushAllProjects({ + limit: LIMIT, + dryRun: true, + }) let perIterationOutOfSync = 0 for (const projectId of projectIds) { diff --git a/services/document-updater/test/acceptance/js/SettingADocumentTests.js b/services/document-updater/test/acceptance/js/SettingADocumentTests.js index fd1851a221..e1bc54dc90 100644 --- a/services/document-updater/test/acceptance/js/SettingADocumentTests.js +++ b/services/document-updater/test/acceptance/js/SettingADocumentTests.js @@ -686,4 +686,285 @@ describe('Setting a document', function () { }) }) }) + + describe('with track changes (history-ot)', function () { + const lines = ['one', 'one and a half', 'two', 'three'] + const userId = DocUpdaterClient.randomId() + const ts = new Date().toISOString() + beforeEach(function (done) { + numberOfReceivedUpdates = 0 + this.newLines = ['one', 'two', 'three'] + this.project_id = DocUpdaterClient.randomId() + this.doc_id = DocUpdaterClient.randomId() + this.historyOTUpdate = { + doc: this.doc_id, + op: [ + { + textOperation: [ + 4, + { + r: 'one and a half\n'.length, + tracking: { + type: 'delete', + userId, + ts, + }, + }, + 9, + ], + }, + ], + v: this.version, + meta: { source: 'random-publicId' }, + } + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines, + version: this.version, + otMigrationStage: 1, + }) + DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, error => { + if (error) { + throw error + } + DocUpdaterClient.sendUpdate( + this.project_id, + this.doc_id, + this.historyOTUpdate, + error => { + if (error) { + throw error + } + DocUpdaterClient.waitForPendingUpdates( + this.project_id, + this.doc_id, + done + ) + } + ) + }) + }) + + afterEach(function () { + MockProjectHistoryApi.flushProject.resetHistory() + MockWebApi.setDocument.resetHistory() + }) + it('should record tracked changes', function (done) { + docUpdaterRedis.get( + Keys.docLines({ doc_id: this.doc_id }), + (error, data) => { + if (error) { + throw error + } + expect(JSON.parse(data)).to.deep.equal({ + content: lines.join('\n'), + trackedChanges: [ + { + range: { + pos: 4, + length: 15, + }, + tracking: { + ts, + type: 'delete', + userId, + }, + }, + ], + }) + done() + } + ) + }) + + it('should apply the change', function (done) { + DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, data) => { + if (error) { + throw error + } + expect(data.lines).to.deep.equal(this.newLines) + done() + } + ) + }) + const cases = [ + { + name: 'when resetting the content', + lines, + want: { + content: 'one\none and a half\none and a half\ntwo\nthree', + trackedChanges: [ + { + range: { + pos: 'one and a half\n'.length + 4, + length: 15, + }, + tracking: { + ts, + type: 'delete', + userId, + }, + }, + ], + }, + }, + { + name: 'when adding content before a tracked delete', + lines: ['one', 'INSERT', 'two', 'three'], + want: { + content: 'one\nINSERT\none and a half\ntwo\nthree', + trackedChanges: [ + { + range: { + pos: 'INSERT\n'.length + 4, + length: 15, + }, + tracking: { + ts, + type: 'delete', + userId, + }, + }, + ], + }, + }, + { + name: 'when adding content after a tracked delete', + lines: ['one', 'two', 'INSERT', 'three'], + want: { + content: 'one\none and a half\ntwo\nINSERT\nthree', + trackedChanges: [ + { + range: { + pos: 4, + length: 15, + }, + tracking: { + ts, + type: 'delete', + userId, + }, + }, + ], + }, + }, + { + name: 'when deleting content before a tracked delete', + lines: ['two', 'three'], + want: { + content: 'one and a half\ntwo\nthree', + trackedChanges: [ + { + range: { + pos: 0, + length: 15, + }, + tracking: { + ts, + type: 'delete', + userId, + }, + }, + ], + }, + }, + { + name: 'when deleting content after a tracked delete', + lines: ['one', 'two'], + want: { + content: 'one\none and a half\ntwo', + trackedChanges: [ + { + range: { + pos: 4, + length: 15, + }, + tracking: { + ts, + type: 'delete', + userId, + }, + }, + ], + }, + }, + { + name: 'when deleting content immediately after a tracked delete', + lines: ['one', 'three'], + want: { + content: 'one\none and a half\nthree', + trackedChanges: [ + { + range: { + pos: 4, + length: 15, + }, + tracking: { + ts, + type: 'delete', + userId, + }, + }, + ], + }, + }, + { + name: 'when deleting content across a tracked delete', + lines: ['onethree'], + want: { + content: 'oneone and a half\nthree', + trackedChanges: [ + { + range: { + pos: 3, + length: 15, + }, + tracking: { + ts, + type: 'delete', + userId, + }, + }, + ], + }, + }, + ] + + for (const { name, lines, want } of cases) { + describe(name, function () { + beforeEach(function (done) { + DocUpdaterClient.setDocLines( + this.project_id, + this.doc_id, + lines, + this.source, + userId, + false, + (error, res, body) => { + if (error) { + return done(error) + } + this.statusCode = res.statusCode + this.body = body + done() + } + ) + }) + it('should update accordingly', function (done) { + docUpdaterRedis.get( + Keys.docLines({ doc_id: this.doc_id }), + (error, data) => { + if (error) { + throw error + } + expect(JSON.parse(data)).to.deep.equal(want) + done() + } + ) + }) + }) + } + }) }) diff --git a/services/document-updater/test/unit/js/Limits/LimitsTests.js b/services/document-updater/test/unit/js/Limits/LimitsTests.js index 34a5c13c26..11ca38746a 100644 --- a/services/document-updater/test/unit/js/Limits/LimitsTests.js +++ b/services/document-updater/test/unit/js/Limits/LimitsTests.js @@ -81,4 +81,88 @@ describe('Limits', function () { }) }) }) + + describe('stringFileDataContentIsTooLarge', function () { + it('should handle small docs', function () { + expect( + this.Limits.stringFileDataContentIsTooLarge({ content: '' }, 123) + ).to.equal(false) + }) + it('should handle docs at the limit', function () { + expect( + this.Limits.stringFileDataContentIsTooLarge( + { content: 'x'.repeat(123) }, + 123 + ) + ).to.equal(false) + }) + it('should handle docs above the limit', function () { + expect( + this.Limits.stringFileDataContentIsTooLarge( + { content: 'x'.repeat(123 + 1) }, + 123 + ) + ).to.equal(true) + }) + it('should handle docs above the limit and below with tracked-deletes removed', function () { + expect( + this.Limits.stringFileDataContentIsTooLarge( + { + content: 'x'.repeat(123 + 1), + trackedChanges: [ + { + range: { pos: 1, length: 1 }, + tracking: { + type: 'delete', + ts: '2025-06-16T14:31:44.910Z', + userId: 'user-id', + }, + }, + ], + }, + 123 + ) + ).to.equal(false) + }) + it('should handle docs above the limit and above with tracked-deletes removed', function () { + expect( + this.Limits.stringFileDataContentIsTooLarge( + { + content: 'x'.repeat(123 + 2), + trackedChanges: [ + { + range: { pos: 1, length: 1 }, + tracking: { + type: 'delete', + ts: '2025-06-16T14:31:44.910Z', + userId: 'user-id', + }, + }, + ], + }, + 123 + ) + ).to.equal(true) + }) + it('should handle docs above the limit and with tracked-inserts', function () { + expect( + this.Limits.stringFileDataContentIsTooLarge( + { + content: 'x'.repeat(123 + 1), + trackedChanges: [ + { + range: { pos: 1, length: 1 }, + tracking: { + type: 'insert', + ts: '2025-06-16T14:31:44.910Z', + userId: 'user-id', + }, + }, + ], + }, + 123 + ) + ).to.equal(true) + }) + }) }) diff --git a/services/document-updater/test/unit/js/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js b/services/document-updater/test/unit/js/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js index 760385b176..ad6c121dfb 100644 --- a/services/document-updater/test/unit/js/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js +++ b/services/document-updater/test/unit/js/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js @@ -15,6 +15,7 @@ describe('ProjectHistoryRedisManager', function () { this.Limits = { docIsTooLarge: sinon.stub().returns(false), + stringFileDataContentIsTooLarge: sinon.stub().returns(false), } this.ProjectHistoryRedisManager = SandboxedModule.require(modulePath, { @@ -61,22 +62,18 @@ describe('ProjectHistoryRedisManager', function () { }) it('should queue an update', function () { - this.multi.rpush - .calledWithExactly( - `ProjectHistory:Ops:${this.project_id}`, - this.ops[0], - this.ops[1] - ) - .should.equal(true) + this.multi.rpush.should.have.been.calledWithExactly( + `ProjectHistory:Ops:${this.project_id}`, + this.ops[0], + this.ops[1] + ) }) it('should set the queue timestamp if not present', function () { - this.multi.setnx - .calledWithExactly( - `ProjectHistory:FirstOpTimestamp:${this.project_id}`, - Date.now() - ) - .should.equal(true) + this.multi.setnx.should.have.been.calledWithExactly( + `ProjectHistory:FirstOpTimestamp:${this.project_id}`, + Date.now() + ) }) }) @@ -118,9 +115,10 @@ describe('ProjectHistoryRedisManager', function () { file: this.file_id, } - this.ProjectHistoryRedisManager.promises.queueOps - .calledWithExactly(this.project_id, JSON.stringify(update)) - .should.equal(true) + this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly( + this.project_id, + JSON.stringify(update) + ) }) }) @@ -166,9 +164,10 @@ describe('ProjectHistoryRedisManager', function () { doc: this.doc_id, } - this.ProjectHistoryRedisManager.promises.queueOps - .calledWithExactly(this.project_id, JSON.stringify(update)) - .should.equal(true) + this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly( + this.project_id, + JSON.stringify(update) + ) }) it('should queue an update with file metadata', async function () { @@ -350,9 +349,10 @@ describe('ProjectHistoryRedisManager', function () { doc: this.doc_id, } - this.ProjectHistoryRedisManager.promises.queueOps - .calledWithExactly(this.project_id, JSON.stringify(update)) - .should.equal(true) + this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly( + this.project_id, + JSON.stringify(update) + ) }) it('should not forward ranges if history ranges support is undefined', async function () { @@ -402,9 +402,10 @@ describe('ProjectHistoryRedisManager', function () { doc: this.doc_id, } - this.ProjectHistoryRedisManager.promises.queueOps - .calledWithExactly(this.project_id, JSON.stringify(update)) - .should.equal(true) + this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly( + this.project_id, + JSON.stringify(update) + ) }) it('should pass "false" as the createdBlob field if not provided', async function () { @@ -432,9 +433,10 @@ describe('ProjectHistoryRedisManager', function () { doc: this.doc_id, } - this.ProjectHistoryRedisManager.promises.queueOps - .calledWithExactly(this.project_id, JSON.stringify(update)) - .should.equal(true) + this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly( + this.project_id, + JSON.stringify(update) + ) }) it('should pass through the value of the createdBlob field', async function () { @@ -463,9 +465,10 @@ describe('ProjectHistoryRedisManager', function () { doc: this.doc_id, } - this.ProjectHistoryRedisManager.promises.queueOps - .calledWithExactly(this.project_id, JSON.stringify(update)) - .should.equal(true) + this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly( + this.project_id, + JSON.stringify(update) + ) }) }) @@ -493,8 +496,8 @@ describe('ProjectHistoryRedisManager', function () { beforeEach(async function () { this.update = { resyncDocContent: { - content: 'one\ntwo', version: this.version, + content: 'one\ntwo', }, projectHistoryId: this.projectHistoryId, path: this.pathname, @@ -516,19 +519,18 @@ describe('ProjectHistoryRedisManager', function () { }) it('should check if the doc is too large', function () { - this.Limits.docIsTooLarge - .calledWith( - JSON.stringify(this.update).length, - this.lines, - this.settings.max_doc_length - ) - .should.equal(true) + this.Limits.docIsTooLarge.should.have.been.calledWith( + JSON.stringify(this.update).length, + this.lines, + this.settings.max_doc_length + ) }) it('should queue an update', function () { - this.ProjectHistoryRedisManager.promises.queueOps - .calledWithExactly(this.project_id, JSON.stringify(this.update)) - .should.equal(true) + this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly( + this.project_id, + JSON.stringify(this.update) + ) }) }) @@ -551,9 +553,8 @@ describe('ProjectHistoryRedisManager', function () { }) it('should not queue an update if the doc is too large', function () { - this.ProjectHistoryRedisManager.promises.queueOps.called.should.equal( - false - ) + this.ProjectHistoryRedisManager.promises.queueOps.should.not.have.been + .called }) }) @@ -561,10 +562,10 @@ describe('ProjectHistoryRedisManager', function () { beforeEach(async function () { this.update = { resyncDocContent: { - content: 'onedeleted\ntwo', version: this.version, ranges: this.ranges, resolvedCommentIds: this.resolvedCommentIds, + content: 'onedeleted\ntwo', }, projectHistoryId: this.projectHistoryId, path: this.pathname, @@ -601,9 +602,76 @@ describe('ProjectHistoryRedisManager', function () { }) it('should queue an update', function () { - this.ProjectHistoryRedisManager.promises.queueOps - .calledWithExactly(this.project_id, JSON.stringify(this.update)) - .should.equal(true) + this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly( + this.project_id, + JSON.stringify(this.update) + ) + }) + }) + + describe('history-ot', function () { + beforeEach(async function () { + this.lines = { + content: 'onedeleted\ntwo', + comments: [{ id: 'id1', ranges: [{ pos: 0, length: 3 }] }], + trackedChanges: [ + { + range: { pos: 3, length: 7 }, + tracking: { + type: 'delete', + userId: 'user-id', + ts: '2025-06-16T14:31:44.910Z', + }, + }, + ], + } + this.update = { + resyncDocContent: { + version: this.version, + historyOTRanges: { + comments: this.lines.comments, + trackedChanges: this.lines.trackedChanges, + }, + content: this.lines.content, + }, + projectHistoryId: this.projectHistoryId, + path: this.pathname, + doc: this.doc_id, + meta: { ts: new Date() }, + } + + await this.ProjectHistoryRedisManager.promises.queueResyncDocContent( + this.project_id, + this.projectHistoryId, + this.doc_id, + this.lines, + this.ranges, + this.resolvedCommentIds, + this.version, + this.pathname, + true + ) + }) + + it('should include tracked deletes in the update', function () { + this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly( + this.project_id, + JSON.stringify(this.update) + ) + }) + + it('should check the doc length without tracked deletes', function () { + this.Limits.stringFileDataContentIsTooLarge.should.have.been.calledWith( + this.lines, + this.settings.max_doc_length + ) + }) + + it('should queue an update', function () { + this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly( + this.project_id, + JSON.stringify(this.update) + ) }) }) }) diff --git a/services/history-v1/api/app/rollout.js b/services/history-v1/api/app/rollout.js new file mode 100644 index 0000000000..24ca0409f8 --- /dev/null +++ b/services/history-v1/api/app/rollout.js @@ -0,0 +1,76 @@ +const crypto = require('node:crypto') + +class Rollout { + constructor(config) { + // The history buffer level is used to determine whether to queue changes + // in Redis or persist them directly to the chunk store. + // If defaults to 0 (no queuing) if not set. + this.historyBufferLevel = config.has('historyBufferLevel') + ? parseInt(config.get('historyBufferLevel'), 10) + : 0 + // The forcePersistBuffer flag will ensure the buffer is fully persisted before + // any persist operation. Set this to true if you want to make the persisted-version + // in Redis match the endVersion of the latest chunk. This should be set to true + // when downgrading from a history buffer level that queues changes in Redis + // without persisting them immediately. + this.forcePersistBuffer = config.has('forcePersistBuffer') + ? config.get('forcePersistBuffer') === 'true' + : false + + // Support gradual rollout of the next history buffer level + // with a percentage of projects using it. + this.nextHistoryBufferLevel = config.has('nextHistoryBufferLevel') + ? parseInt(config.get('nextHistoryBufferLevel'), 10) + : null + this.nextHistoryBufferLevelRolloutPercentage = config.has( + 'nextHistoryBufferLevelRolloutPercentage' + ) + ? parseInt(config.get('nextHistoryBufferLevelRolloutPercentage'), 10) + : 0 + } + + report(logger) { + logger.info( + { + historyBufferLevel: this.historyBufferLevel, + forcePersistBuffer: this.forcePersistBuffer, + nextHistoryBufferLevel: this.nextHistoryBufferLevel, + nextHistoryBufferLevelRolloutPercentage: + this.nextHistoryBufferLevelRolloutPercentage, + }, + this.historyBufferLevel > 0 || this.forcePersistBuffer + ? 'using history buffer' + : 'history buffer disabled' + ) + } + + /** + * Get the history buffer level for a project. + * @param {string} projectId + * @returns {Object} - An object containing the history buffer level and force persist buffer flag. + * @property {number} historyBufferLevel - The history buffer level to use for processing changes. + * @property {boolean} forcePersistBuffer - If true, forces the buffer to be persisted before any operation. + */ + getHistoryBufferLevelOptions(projectId) { + if ( + this.nextHistoryBufferLevel > this.historyBufferLevel && + this.nextHistoryBufferLevelRolloutPercentage > 0 + ) { + const hash = crypto.createHash('sha1').update(projectId).digest('hex') + const percentage = parseInt(hash.slice(0, 8), 16) % 100 + // If the project is in the rollout percentage, we use the next history buffer level. + if (percentage < this.nextHistoryBufferLevelRolloutPercentage) { + return { + historyBufferLevel: this.nextHistoryBufferLevel, + forcePersistBuffer: this.forcePersistBuffer, + } + } + } + return { + historyBufferLevel: this.historyBufferLevel, + forcePersistBuffer: this.forcePersistBuffer, + } + } +} + +module.exports = Rollout diff --git a/services/history-v1/api/controllers/project_import.js b/services/history-v1/api/controllers/project_import.js index edffb19a25..638873d105 100644 --- a/services/history-v1/api/controllers/project_import.js +++ b/services/history-v1/api/controllers/project_import.js @@ -2,6 +2,7 @@ 'use strict' +const config = require('config') const { expressify } = require('@overleaf/promise-utils') const HTTPStatus = require('http-status') @@ -21,10 +22,15 @@ const BatchBlobStore = storage.BatchBlobStore const BlobStore = storage.BlobStore const chunkStore = storage.chunkStore const HashCheckBlobStore = storage.HashCheckBlobStore -const persistChanges = storage.persistChanges +const commitChanges = storage.commitChanges +const persistBuffer = storage.persistBuffer const InvalidChangeError = storage.InvalidChangeError const render = require('./render') +const Rollout = require('../app/rollout') + +const rollout = new Rollout(config) +rollout.report(logger) // display the rollout configuration in the logs async function importSnapshot(req, res) { const projectId = req.swagger.params.project_id.value @@ -35,6 +41,7 @@ async function importSnapshot(req, res) { try { snapshot = Snapshot.fromRaw(rawSnapshot) } catch (err) { + logger.warn({ err, projectId }, 'failed to import snapshot') return render.unprocessableEntity(res) } @@ -43,6 +50,7 @@ async function importSnapshot(req, res) { historyId = await chunkStore.initializeProject(projectId, snapshot) } catch (err) { if (err instanceof chunkStore.AlreadyInitialized) { + logger.warn({ err, projectId }, 'already initialized') return render.conflict(res) } else { throw err @@ -108,7 +116,12 @@ async function importChanges(req, res, next) { let result try { - result = await persistChanges(projectId, changes, limits, endVersion) + const { historyBufferLevel, forcePersistBuffer } = + rollout.getHistoryBufferLevelOptions(projectId) + result = await commitChanges(projectId, changes, limits, endVersion, { + historyBufferLevel, + forcePersistBuffer, + }) } catch (err) { if ( err instanceof Chunk.ConflictingEndVersion || @@ -141,5 +154,29 @@ async function importChanges(req, res, next) { } } +async function flushChanges(req, res, next) { + const projectId = req.swagger.params.project_id.value + // Use the same limits importChanges, since these are passed to persistChanges + const farFuture = new Date() + farFuture.setTime(farFuture.getTime() + 7 * 24 * 3600 * 1000) + const limits = { + maxChanges: 0, + minChangeTimestamp: farFuture, + maxChangeTimestamp: farFuture, + autoResync: true, + } + try { + await persistBuffer(projectId, limits) + res.status(HTTPStatus.OK).end() + } catch (err) { + if (err instanceof Chunk.NotFoundError) { + render.notFound(res) + } else { + throw err + } + } +} + exports.importSnapshot = expressify(importSnapshot) exports.importChanges = expressify(importChanges) +exports.flushChanges = expressify(flushChanges) diff --git a/services/history-v1/api/controllers/projects.js b/services/history-v1/api/controllers/projects.js index 47a1d959ad..031833688c 100644 --- a/services/history-v1/api/controllers/projects.js +++ b/services/history-v1/api/controllers/projects.js @@ -34,6 +34,7 @@ async function initializeProject(req, res, next) { res.status(HTTPStatus.OK).json({ projectId }) } catch (err) { if (err instanceof chunkStore.AlreadyInitialized) { + logger.warn({ err, projectId }, 'failed to initialize') render.conflict(res) } else { throw err @@ -242,11 +243,15 @@ async function createProjectBlob(req, res, next) { const sizeLimit = new StreamSizeLimit(maxUploadSize) await pipeline(req, sizeLimit, fs.createWriteStream(tmpPath)) if (sizeLimit.sizeLimitExceeded) { + logger.warn( + { projectId, expectedHash, maxUploadSize }, + 'blob exceeds size threshold' + ) return render.requestEntityTooLarge(res) } const hash = await blobHash.fromFile(tmpPath) if (hash !== expectedHash) { - logger.debug({ hash, expectedHash }, 'Hash mismatch') + logger.warn({ projectId, hash, expectedHash }, 'Hash mismatch') return render.conflict(res, 'File hash mismatch') } @@ -343,6 +348,10 @@ async function copyProjectBlob(req, res, next) { targetBlobStore.getBlob(blobHash), ]) if (!sourceBlob) { + logger.warn( + { sourceProjectId, targetProjectId, blobHash }, + 'missing source blob when copying across projects' + ) return render.notFound(res) } // Exit early if the blob exists in the target project. diff --git a/services/history-v1/api/swagger/project_import.js b/services/history-v1/api/swagger/project_import.js index a93f42d27e..6103eed74b 100644 --- a/services/history-v1/api/swagger/project_import.js +++ b/services/history-v1/api/swagger/project_import.js @@ -139,9 +139,45 @@ const getChanges = { ], } +const flushChanges = { + 'x-swagger-router-controller': 'project_import', + operationId: 'flushChanges', + tags: ['ProjectImport'], + description: 'Flush project changes from buffer to the chunk store.', + parameters: [ + { + name: 'project_id', + in: 'path', + description: 'project id', + required: true, + type: 'string', + }, + ], + responses: { + 200: { + description: 'Success', + schema: { + $ref: '#/definitions/Project', + }, + }, + 404: { + description: 'Not Found', + schema: { + $ref: '#/definitions/Error', + }, + }, + }, + security: [ + { + basic: [], + }, + ], +} + exports.paths = { '/projects/{project_id}/import': { post: importSnapshot }, '/projects/{project_id}/legacy_import': { post: importSnapshot }, '/projects/{project_id}/changes': { get: getChanges, post: importChanges }, '/projects/{project_id}/legacy_changes': { post: importChanges }, + '/projects/{project_id}/flush': { post: flushChanges }, } diff --git a/services/history-v1/app.js b/services/history-v1/app.js index 261f1001b6..dd991c1a6d 100644 --- a/services/history-v1/app.js +++ b/services/history-v1/app.js @@ -100,11 +100,13 @@ function setupErrorHandling() { }) } if (err.code === 'ENUM_MISMATCH') { + logger.warn({ err, projectId }, err.message) return res.status(HTTPStatus.UNPROCESSABLE_ENTITY).json({ message: 'invalid enum value: ' + err.paramName, }) } if (err.code === 'REQUIRED') { + logger.warn({ err, projectId }, err.message) return res.status(HTTPStatus.UNPROCESSABLE_ENTITY).json({ message: err.message, }) diff --git a/services/history-v1/config/custom-environment-variables.json b/services/history-v1/config/custom-environment-variables.json index d07ae2925a..686ca25407 100644 --- a/services/history-v1/config/custom-environment-variables.json +++ b/services/history-v1/config/custom-environment-variables.json @@ -84,6 +84,10 @@ "maxFileUploadSize": "MAX_FILE_UPLOAD_SIZE", "httpsOnly": "HTTPS_ONLY", "httpRequestTimeout": "HTTP_REQUEST_TIMEOUT", + "historyBufferLevel": "HISTORY_BUFFER_LEVEL", + "forcePersistBuffer": "FORCE_PERSIST_BUFFER", + "nextHistoryBufferLevel": "NEXT_HISTORY_BUFFER_LEVEL", + "nextHistoryBufferLevelRolloutPercentage": "NEXT_HISTORY_BUFFER_LEVEL_ROLLOUT_PERCENTAGE", "redis": { "queue": { "host": "QUEUES_REDIS_HOST", @@ -100,5 +104,9 @@ "password": "REDIS_PASSWORD", "port": "REDIS_PORT" } + }, + "projectHistory": { + "host": "PROJECT_HISTORY_HOST", + "port": "PROJECT_HISTORY_PORT" } } diff --git a/services/history-v1/config/default.json b/services/history-v1/config/default.json index 5222b84d87..e7732fe3f7 100644 --- a/services/history-v1/config/default.json +++ b/services/history-v1/config/default.json @@ -39,5 +39,8 @@ "databasePoolMin": "2", "databasePoolMax": "10", "httpsOnly": "false", - "httpRequestTimeout": "300000" + "httpRequestTimeout": "300000", + "projectHistory": { + "port": "3054" + } } diff --git a/services/history-v1/docker-compose.ci.yml b/services/history-v1/docker-compose.ci.yml index 0dfe8b99d3..9128451c4f 100644 --- a/services/history-v1/docker-compose.ci.yml +++ b/services/history-v1/docker-compose.ci.yml @@ -39,6 +39,7 @@ services: NODE_OPTIONS: "--unhandled-rejections=strict" volumes: - ./test/acceptance/certs:/certs + - ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it depends_on: mongo: condition: service_started @@ -55,6 +56,7 @@ services: gcs: condition: service_healthy user: node + entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 -- command: npm run test:acceptance @@ -66,7 +68,7 @@ services: command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs . user: root redis: - image: redis + image: redis:7.4.3 healthcheck: test: ping="$$(redis-cli ping)" && [ "$$ping" = 'PONG' ] interval: 1s diff --git a/services/history-v1/docker-compose.yml b/services/history-v1/docker-compose.yml index b87d859e1e..cda379fb14 100644 --- a/services/history-v1/docker-compose.yml +++ b/services/history-v1/docker-compose.yml @@ -33,6 +33,7 @@ services: - ../../node_modules:/overleaf/node_modules - ../../libraries:/overleaf/libraries - ./test/acceptance/certs:/certs + - ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it working_dir: /overleaf/services/history-v1 environment: ELASTIC_SEARCH_DSN: es:9200 @@ -71,10 +72,11 @@ services: condition: service_completed_successfully gcs: condition: service_healthy + entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 -- command: npm run --silent test:acceptance redis: - image: redis + image: redis:7.4.3 healthcheck: test: ping=$$(redis-cli ping) && [ "$$ping" = 'PONG' ] interval: 1s diff --git a/services/history-v1/package.json b/services/history-v1/package.json index 1fdfd95c45..4796cafd03 100644 --- a/services/history-v1/package.json +++ b/services/history-v1/package.json @@ -7,6 +7,7 @@ "private": true, "dependencies": { "@google-cloud/secret-manager": "^5.6.0", + "@overleaf/fetch-utils": "*", "@overleaf/logger": "*", "@overleaf/metrics": "*", "@overleaf/mongo-utils": "*", @@ -36,6 +37,7 @@ "mongodb": "6.12.0", "overleaf-editor-core": "*", "p-limit": "^6.2.0", + "p-queue": "^8.1.0", "pg": "^8.7.1", "pg-query-stream": "^4.2.4", "swagger-tools": "^0.10.4", diff --git a/services/history-v1/storage/index.js b/services/history-v1/storage/index.js index 2aa492f46e..82a51583be 100644 --- a/services/history-v1/storage/index.js +++ b/services/history-v1/storage/index.js @@ -8,6 +8,9 @@ exports.mongodb = require('./lib/mongodb') exports.redis = require('./lib/redis') exports.persistChanges = require('./lib/persist_changes') exports.persistor = require('./lib/persistor') +exports.persistBuffer = require('./lib/persist_buffer') +exports.commitChanges = require('./lib/commit_changes') +exports.queueChanges = require('./lib/queue_changes') exports.ProjectArchive = require('./lib/project_archive') exports.streams = require('./lib/streams') exports.temp = require('./lib/temp') diff --git a/services/history-v1/storage/lib/chunk_store/index.js b/services/history-v1/storage/lib/chunk_store/index.js index 6dab84f929..286a8d8764 100644 --- a/services/history-v1/storage/lib/chunk_store/index.js +++ b/services/history-v1/storage/lib/chunk_store/index.js @@ -151,23 +151,48 @@ async function loadAtVersion(projectId, version, opts = {}) { const backend = getBackend(projectId) const blobStore = new BlobStore(projectId) const batchBlobStore = new BatchBlobStore(blobStore) + const latestChunkMetadata = await getLatestChunkMetadata(projectId) - const chunkRecord = await backend.getChunkForVersion(projectId, version, { - preferNewer: opts.preferNewer, - }) + // When loading a chunk for a version there are three cases to consider: + // 1. If `persistedOnly` is true, we always use the requested version + // to fetch the chunk. + // 2. If `persistedOnly` is false and the requested version is in the + // persisted chunk version range, we use the requested version. + // 3. If `persistedOnly` is false and the requested version is ahead of + // the persisted chunk versions, we fetch the latest chunk and see if + // the non-persisted changes include the requested version. + const targetChunkVersion = opts.persistedOnly + ? version + : Math.min(latestChunkMetadata.endVersion, version) + + const chunkRecord = await backend.getChunkForVersion( + projectId, + targetChunkVersion, + { + preferNewer: opts.preferNewer, + } + ) const rawHistory = await historyStore.loadRaw(projectId, chunkRecord.id) const history = History.fromRaw(rawHistory) + const startVersion = chunkRecord.endVersion - history.countChanges() if (!opts.persistedOnly) { + // Try to extend the chunk with any non-persisted changes that + // follow the chunk's end version. const nonPersistedChanges = await getChunkExtension( projectId, chunkRecord.endVersion ) history.pushChanges(nonPersistedChanges) + + // Check that the changes do actually contain the requested version + if (version > chunkRecord.endVersion + nonPersistedChanges.length) { + throw new Chunk.VersionNotFoundError(projectId, version) + } } await lazyLoadHistoryFiles(history, batchBlobStore) - return new Chunk(history, chunkRecord.endVersion - history.countChanges()) + return new Chunk(history, startVersion) } /** @@ -190,6 +215,7 @@ async function loadAtTimestamp(projectId, timestamp, opts = {}) { const chunkRecord = await backend.getChunkForTimestamp(projectId, timestamp) const rawHistory = await historyStore.loadRaw(projectId, chunkRecord.id) const history = History.fromRaw(rawHistory) + const startVersion = chunkRecord.endVersion - history.countChanges() if (!opts.persistedOnly) { const nonPersistedChanges = await getChunkExtension( @@ -200,7 +226,7 @@ async function loadAtTimestamp(projectId, timestamp, opts = {}) { } await lazyLoadHistoryFiles(history, batchBlobStore) - return new Chunk(history, chunkRecord.endVersion - history.countChanges()) + return new Chunk(history, startVersion) } /** diff --git a/services/history-v1/storage/lib/chunk_store/mongo.js b/services/history-v1/storage/lib/chunk_store/mongo.js index 26c1bc48ec..49020c6be4 100644 --- a/services/history-v1/storage/lib/chunk_store/mongo.js +++ b/services/history-v1/storage/lib/chunk_store/mongo.js @@ -286,6 +286,27 @@ async function updateProjectRecord( ) } +/** + * @param {number} historyId + * @return {Promise} + */ +async function lookupMongoProjectIdFromHistoryId(historyId) { + const project = await mongodb.projects.findOne( + // string for Object ids, number for postgres ids + { 'overleaf.history.id': historyId }, + { projection: { _id: 1 } } + ) + if (!project) { + // should not happen: We flush before allowing a project to be soft-deleted. + throw new OError('mongo project not found by history id', { historyId }) + } + return project._id.toString() +} + +async function resolveHistoryIdToMongoProjectId(projectId) { + return projectId +} + /** * Record that a chunk was replaced by a new one. * @@ -533,4 +554,6 @@ module.exports = { deleteProjectChunks, getOldChunksBatch, deleteOldChunks, + lookupMongoProjectIdFromHistoryId, + resolveHistoryIdToMongoProjectId, } diff --git a/services/history-v1/storage/lib/chunk_store/postgres.js b/services/history-v1/storage/lib/chunk_store/postgres.js index bfb5c6954a..8906db38e1 100644 --- a/services/history-v1/storage/lib/chunk_store/postgres.js +++ b/services/history-v1/storage/lib/chunk_store/postgres.js @@ -5,7 +5,10 @@ const assert = require('../assert') const knex = require('../knex') const knexReadOnly = require('../knex_read_only') const { ChunkVersionConflictError } = require('./errors') -const { updateProjectRecord } = require('./mongo') +const { + updateProjectRecord, + lookupMongoProjectIdFromHistoryId, +} = require('./mongo') const DUPLICATE_KEY_ERROR_CODE = '23505' @@ -472,6 +475,10 @@ async function generateProjectId() { return record.doc_id.toString() } +async function resolveHistoryIdToMongoProjectId(projectId) { + return await lookupMongoProjectIdFromHistoryId(parseInt(projectId, 10)) +} + module.exports = { getLatestChunk, getFirstChunkBeforeTimestamp, @@ -488,4 +495,5 @@ module.exports = { getOldChunksBatch, deleteOldChunks, generateProjectId, + resolveHistoryIdToMongoProjectId, } diff --git a/services/history-v1/storage/lib/chunk_store/redis.js b/services/history-v1/storage/lib/chunk_store/redis.js index 0ae7cee2e5..59bfd81e39 100644 --- a/services/history-v1/storage/lib/chunk_store/redis.js +++ b/services/history-v1/storage/lib/chunk_store/redis.js @@ -480,11 +480,12 @@ async function getNonPersistedChanges(projectId, baseVersion) { } rclient.defineCommand('set_persisted_version', { - numberOfKeys: 3, + numberOfKeys: 4, lua: ` local headVersionKey = KEYS[1] local persistedVersionKey = KEYS[2] - local changesKey = KEYS[3] + local persistTimeKey = KEYS[3] + local changesKey = KEYS[4] local newPersistedVersion = tonumber(ARGV[1]) local maxPersistedChanges = tonumber(ARGV[2]) @@ -501,9 +502,19 @@ rclient.defineCommand('set_persisted_version', { return 'too_low' end + -- Refuse to set a persisted version that is higher than the head version + if newPersistedVersion > headVersion then + return 'too_high' + end + -- Set the persisted version redis.call('SET', persistedVersionKey, newPersistedVersion) + -- Clear the persist time if the persisted version now matches the head version + if newPersistedVersion == headVersion then + redis.call('DEL', persistTimeKey) + end + -- Calculate the starting index, to keep only maxPersistedChanges beyond the persisted version -- Using negative indexing to count backwards from the end of the list local startIndex = newPersistedVersion - headVersion - maxPersistedChanges @@ -530,6 +541,7 @@ async function setPersistedVersion(projectId, persistedVersion) { const keys = [ keySchema.headVersion({ projectId }), keySchema.persistedVersion({ projectId }), + keySchema.persistTime({ projectId }), keySchema.changes({ projectId }), ] @@ -541,6 +553,13 @@ async function setPersistedVersion(projectId, persistedVersion) { status, }) + if (status === 'too_high') { + throw new VersionOutOfBoundsError( + 'Persisted version cannot be higher than head version', + { projectId, persistedVersion } + ) + } + return status } catch (err) { metrics.inc('chunk_store.redis.set_persisted_version', 1, { @@ -631,6 +650,7 @@ async function expireProject(projectId) { metrics.inc('chunk_store.redis.set_persisted_version', 1, { status, }) + return status } catch (err) { metrics.inc('chunk_store.redis.set_persisted_version', 1, { status: 'error', diff --git a/services/history-v1/storage/lib/commit_changes.js b/services/history-v1/storage/lib/commit_changes.js new file mode 100644 index 0000000000..5749e5fc0e --- /dev/null +++ b/services/history-v1/storage/lib/commit_changes.js @@ -0,0 +1,159 @@ +// @ts-check + +'use strict' + +const metrics = require('@overleaf/metrics') +const redisBackend = require('./chunk_store/redis') +const logger = require('@overleaf/logger') +const queueChanges = require('./queue_changes') +const persistChanges = require('./persist_changes') +const persistBuffer = require('./persist_buffer') + +/** + * @typedef {import('overleaf-editor-core').Change} Change + */ + +/** + * Handle incoming changes by processing them according to the specified options. + * @param {string} projectId + * @param {Change[]} changes + * @param {Object} limits + * @param {number} endVersion + * @param {Object} options + * @param {number} [options.historyBufferLevel] - The history buffer level to use for processing changes. + * @param {Boolean} [options.forcePersistBuffer] - If true, forces the buffer to be persisted before any operation. + * @return {Promise.} + */ + +async function commitChanges( + projectId, + changes, + limits, + endVersion, + options = {} +) { + const { historyBufferLevel, forcePersistBuffer } = options + + // Force the buffer to be persisted if specified. + if (forcePersistBuffer) { + try { + const status = await redisBackend.expireProject(projectId) // clear the project from Redis if it is persisted, returns 'not-persisted' if it was not persisted + if (status === 'not-persisted') { + await persistBuffer(projectId, limits) + await redisBackend.expireProject(projectId) // clear the project from Redis after persisting + metrics.inc('persist_buffer_force', 1, { status: 'persisted' }) + } + } catch (err) { + metrics.inc('persist_buffer_force', 1, { status: 'error' }) + logger.error( + { err, projectId }, + 'failed to persist buffer before committing changes' + ) + } + } + + metrics.inc('commit_changes', 1, { + history_buffer_level: historyBufferLevel || 0, + }) + + // Now handle the changes based on the configured history buffer level. + switch (historyBufferLevel) { + case 4: // Queue changes and only persist them in the background + await queueChanges(projectId, changes, endVersion) + return {} + case 3: // Queue changes and immediately persist with persistBuffer + await queueChanges(projectId, changes, endVersion) + return await persistBuffer(projectId, limits) + case 2: // Equivalent to queueChangesInRedis:true + await queueChangesFake(projectId, changes, endVersion) + return await persistChanges(projectId, changes, limits, endVersion) + case 1: // Queue changes with fake persist only for projects in redis already + await queueChangesFakeOnlyIfExists(projectId, changes, endVersion) + return await persistChanges(projectId, changes, limits, endVersion) + case 0: // Persist changes directly to the chunk store + return await persistChanges(projectId, changes, limits, endVersion) + default: + throw new Error(`Invalid history buffer level: ${historyBufferLevel}`) + } +} + +/** + * Queues a set of changes in redis as if they had been persisted, ignoring any errors. + * @param {string} projectId + * @param {Change[]} changes + * @param {number} endVersion + * @param {Object} [options] + * @param {boolean} [options.onlyIfExists] - If true, only queue changes if the project + * already exists in Redis. + */ + +async function queueChangesFake(projectId, changes, endVersion, options = {}) { + try { + await queueChanges(projectId, changes, endVersion) + await fakePersistRedisChanges(projectId, changes, endVersion) + } catch (err) { + logger.error({ err }, 'Chunk buffer verification failed') + } +} + +/** + * Queues changes in Redis, simulating persistence, but only if the project already exists. + * @param {string} projectId - The ID of the project. + * @param {Change[]} changes - An array of changes to be queued. + * @param {number} endVersion - The expected version of the project before these changes are applied. + */ + +async function queueChangesFakeOnlyIfExists(projectId, changes, endVersion) { + await queueChangesFake(projectId, changes, endVersion, { + onlyIfExists: true, + }) +} + +/** + * Simulates the persistence of changes by verifying a given set of changes against + * what is currently stored as non-persisted in Redis, and then updates the + * persisted version number in Redis. + * + * @async + * @param {string} projectId - The ID of the project. + * @param {Change[]} changesToPersist - An array of changes that are expected to be + * persisted. These are used for verification + * against the changes currently in Redis. + * @param {number} baseVersion - The base version number from which to calculate + * the new persisted version. + * @returns {Promise} A promise that resolves when the persisted version + * in Redis has been updated. + */ +async function fakePersistRedisChanges( + projectId, + changesToPersist, + baseVersion +) { + const nonPersistedChanges = await redisBackend.getNonPersistedChanges( + projectId, + baseVersion + ) + + if ( + serializeChanges(nonPersistedChanges) === serializeChanges(changesToPersist) + ) { + metrics.inc('persist_redis_changes_verification', 1, { status: 'match' }) + } else { + logger.warn({ projectId }, 'mismatch of non-persisted changes from Redis') + metrics.inc('persist_redis_changes_verification', 1, { + status: 'mismatch', + }) + } + + const persistedVersion = baseVersion + nonPersistedChanges.length + await redisBackend.setPersistedVersion(projectId, persistedVersion) +} + +/** + * @param {Change[]} changes + */ +function serializeChanges(changes) { + return JSON.stringify(changes.map(change => change.toRaw())) +} + +module.exports = commitChanges diff --git a/services/history-v1/storage/lib/persist_buffer.js b/services/history-v1/storage/lib/persist_buffer.js new file mode 100644 index 0000000000..d562388f87 --- /dev/null +++ b/services/history-v1/storage/lib/persist_buffer.js @@ -0,0 +1,206 @@ +// @ts-check +'use strict' + +const logger = require('@overleaf/logger') +const metrics = require('@overleaf/metrics') +const OError = require('@overleaf/o-error') +const assert = require('./assert') +const chunkStore = require('./chunk_store') +const { BlobStore } = require('./blob_store') +const BatchBlobStore = require('./batch_blob_store') +const persistChanges = require('./persist_changes') +const resyncProject = require('./resync_project') +const redisBackend = require('./chunk_store/redis') + +/** + * Persist the changes from Redis buffer to the main storage + * + * Algorithm Outline: + * 1. Get the latest chunk's endVersion from the database + * 2. Get non-persisted changes from Redis that are after this endVersion. + * 3. If no such changes, exit. + * 4. Load file blobs for these Redis changes. + * 5. Run the persistChanges() algorithm to store these changes into a new chunk(s) in GCS. + * - This must not decrease the endVersion. If changes were processed, it must advance. + * 6. Set the new persisted version (endVersion of the latest persisted chunk) in Redis. + * + * @param {string} projectId + * @param {Object} limits + * @throws {Error | OError} If a critical error occurs during persistence. + */ +async function persistBuffer(projectId, limits) { + assert.projectId(projectId) + logger.debug({ projectId }, 'starting persistBuffer operation') + + // 1. Get the latest chunk's endVersion from GCS/main store + let endVersion + const latestChunkMetadata = await chunkStore.getLatestChunkMetadata(projectId) + + if (latestChunkMetadata) { + endVersion = latestChunkMetadata.endVersion + } else { + endVersion = 0 // No chunks found, start from version 0 + logger.debug({ projectId }, 'no existing chunks found in main storage') + } + + logger.debug({ projectId, endVersion }, 'got latest persisted chunk') + + // 2. Get non-persisted changes from Redis + const changesToPersist = await redisBackend.getNonPersistedChanges( + projectId, + endVersion + ) + + if (changesToPersist.length === 0) { + logger.debug( + { projectId, endVersion }, + 'no new changes in Redis buffer to persist' + ) + metrics.inc('persist_buffer', 1, { status: 'no_changes' }) + // No changes to persist, update the persisted version in Redis + // to match the current endVersion. This shouldn't be needed + // unless a worker failed to update the persisted version. + await redisBackend.setPersistedVersion(projectId, endVersion) + const { chunk } = await chunkStore.loadByChunkRecord( + projectId, + latestChunkMetadata + ) + // Return the result in the same format as persistChanges + // so that the caller can handle it uniformly. + return { + numberOfChangesPersisted: changesToPersist.length, + originalEndVersion: endVersion, + currentChunk: chunk, + } + } + + logger.debug( + { + projectId, + endVersion, + count: changesToPersist.length, + }, + 'found changes in Redis to persist' + ) + + // 4. Load file blobs for these Redis changes. Errors will propagate. + const blobStore = new BlobStore(projectId) + const batchBlobStore = new BatchBlobStore(blobStore) + + const blobHashes = new Set() + for (const change of changesToPersist) { + change.findBlobHashes(blobHashes) + } + if (blobHashes.size > 0) { + await batchBlobStore.preload(Array.from(blobHashes)) + } + for (const change of changesToPersist) { + await change.loadFiles('lazy', blobStore) + } + + // 5. Run the persistChanges() algorithm. Errors will propagate. + logger.debug( + { + projectId, + endVersion, + changeCount: changesToPersist.length, + }, + 'calling persistChanges' + ) + + const persistResult = await persistChanges( + projectId, + changesToPersist, + limits, + endVersion + ) + + if (!persistResult || !persistResult.currentChunk) { + metrics.inc('persist_buffer', 1, { status: 'no-chunk-error' }) + throw new OError( + 'persistChanges did not produce a new chunk for non-empty changes', + { + projectId, + endVersion, + changeCount: changesToPersist.length, + } + ) + } + + const newPersistedChunk = persistResult.currentChunk + const newEndVersion = newPersistedChunk.getEndVersion() + + if (newEndVersion <= endVersion) { + metrics.inc('persist_buffer', 1, { status: 'chunk-version-mismatch' }) + throw new OError( + 'persisted chunk endVersion must be greater than current persisted chunk end version for non-empty changes', + { + projectId, + newEndVersion, + endVersion, + changeCount: changesToPersist.length, + } + ) + } + + logger.debug( + { + projectId, + oldVersion: endVersion, + newVersion: newEndVersion, + }, + 'successfully persisted changes from Redis to main storage' + ) + + // 6. Set the persisted version in Redis. Errors will propagate. + const status = await redisBackend.setPersistedVersion( + projectId, + newEndVersion + ) + + if (status !== 'ok') { + metrics.inc('persist_buffer', 1, { status: 'error-on-persisted-version' }) + throw new OError('failed to update persisted version in Redis', { + projectId, + newEndVersion, + status, + }) + } + + logger.debug( + { projectId, newEndVersion }, + 'updated persisted version in Redis' + ) + + // 7. Resync the project if content hash validation failed + if (limits.autoResync && persistResult.resyncNeeded) { + if ( + changesToPersist.some( + change => change.getOrigin()?.getKind() === 'history-resync' + ) + ) { + // To avoid an infinite loop, do not resync if the current batch of + // changes contains a history resync. + logger.warn( + { projectId }, + 'content hash validation failed while persisting a history resync, skipping additional resync' + ) + } else { + const backend = chunkStore.getBackend(projectId) + const mongoProjectId = + await backend.resolveHistoryIdToMongoProjectId(projectId) + await resyncProject(mongoProjectId) + } + } + + logger.debug( + { projectId, finalPersistedVersion: newEndVersion }, + 'persistBuffer operation completed successfully' + ) + + metrics.inc('persist_buffer', 1, { status: 'persisted' }) + + return persistResult +} + +module.exports = persistBuffer diff --git a/services/history-v1/storage/lib/persist_changes.js b/services/history-v1/storage/lib/persist_changes.js index 5b80285eb0..d2ca00053f 100644 --- a/services/history-v1/storage/lib/persist_changes.js +++ b/services/history-v1/storage/lib/persist_changes.js @@ -4,7 +4,6 @@ const _ = require('lodash') const logger = require('@overleaf/logger') -const metrics = require('@overleaf/metrics') const core = require('overleaf-editor-core') const Chunk = core.Chunk @@ -15,7 +14,6 @@ const chunkStore = require('./chunk_store') const { BlobStore } = require('./blob_store') const { InvalidChangeError } = require('./errors') const { getContentHash } = require('./content_hash') -const redisBackend = require('./chunk_store/redis') function countChangeBytes(change) { // Note: This is not quite accurate, because the raw change may contain raw @@ -202,45 +200,6 @@ async function persistChanges(projectId, allChanges, limits, clientEndVersion) { currentSnapshot.applyAll(currentChunk.getChanges()) } - async function queueChangesInRedis() { - const hollowSnapshot = currentSnapshot.clone() - // We're transforming a lazy snapshot to a hollow snapshot, so loadFiles() - // doesn't really need a blobStore, but its signature still requires it. - const blobStore = new BlobStore(projectId) - await hollowSnapshot.loadFiles('hollow', blobStore) - hollowSnapshot.applyAll(changesToPersist, { strict: true }) - const baseVersion = currentChunk.getEndVersion() - await redisBackend.queueChanges( - projectId, - hollowSnapshot, - baseVersion, - changesToPersist - ) - } - - async function fakePersistRedisChanges() { - const baseVersion = currentChunk.getEndVersion() - const nonPersistedChanges = await redisBackend.getNonPersistedChanges( - projectId, - baseVersion - ) - - if ( - serializeChanges(nonPersistedChanges) === - serializeChanges(changesToPersist) - ) { - metrics.inc('persist_redis_changes_verification', 1, { status: 'match' }) - } else { - logger.warn({ projectId }, 'mismatch of non-persisted changes from Redis') - metrics.inc('persist_redis_changes_verification', 1, { - status: 'mismatch', - }) - } - - const persistedVersion = baseVersion + nonPersistedChanges.length - await redisBackend.setPersistedVersion(projectId, persistedVersion) - } - async function extendLastChunkIfPossible() { const timer = new Timer() const changesPushed = await fillChunk(currentChunk, changesToPersist) @@ -289,12 +248,6 @@ async function persistChanges(projectId, allChanges, limits, clientEndVersion) { const numberOfChangesToPersist = oldChanges.length await loadLatestChunk() - try { - await queueChangesInRedis() - await fakePersistRedisChanges() - } catch (err) { - logger.error({ err }, 'Chunk buffer verification failed') - } await extendLastChunkIfPossible() await createNewChunksAsNeeded() @@ -309,11 +262,4 @@ async function persistChanges(projectId, allChanges, limits, clientEndVersion) { } } -/** - * @param {core.Change[]} changes - */ -function serializeChanges(changes) { - return JSON.stringify(changes.map(change => change.toRaw())) -} - module.exports = persistChanges diff --git a/services/history-v1/storage/lib/queue_changes.js b/services/history-v1/storage/lib/queue_changes.js new file mode 100644 index 0000000000..6b8d4b22b4 --- /dev/null +++ b/services/history-v1/storage/lib/queue_changes.js @@ -0,0 +1,75 @@ +// @ts-check + +'use strict' + +const redisBackend = require('./chunk_store/redis') +const { BlobStore } = require('./blob_store') +const chunkStore = require('./chunk_store') +const core = require('overleaf-editor-core') +const Chunk = core.Chunk + +/** + * Queues an incoming set of changes after validating them against the current snapshot. + * + * @async + * @function queueChanges + * @param {string} projectId - The project to queue changes for. + * @param {Array} changesToQueue - An array of change objects to be applied and queued. + * @param {number} endVersion - The expected version of the project before these changes are applied. + * This is used for optimistic concurrency control. + * @param {Object} [opts] - Additional options for queuing changes. + * @throws {Chunk.ConflictingEndVersion} If the provided `endVersion` does not match the + * current version of the project. + * @returns {Promise} A promise that resolves with the status returned by the + * `redisBackend.queueChanges` operation. + */ +async function queueChanges(projectId, changesToQueue, endVersion, opts) { + const result = await redisBackend.getHeadSnapshot(projectId) + let currentSnapshot = null + let currentVersion = null + if (result) { + // If we have a snapshot in redis, we can use it to check the current state + // of the project and apply changes to it. + currentSnapshot = result.snapshot + currentVersion = result.version + } else { + // Otherwise, load the latest chunk from the chunk store. + const latestChunk = await chunkStore.loadLatest(projectId, { + persistedOnly: true, + }) + // Throw an error if no latest chunk is found, indicating the project has not been initialised. + if (!latestChunk) { + throw new Chunk.NotFoundError(projectId) + } + currentSnapshot = latestChunk.getSnapshot() + currentSnapshot.applyAll(latestChunk.getChanges()) + currentVersion = latestChunk.getEndVersion() + } + + // Ensure the endVersion matches the current version of the project. + if (endVersion !== currentVersion) { + throw new Chunk.ConflictingEndVersion(endVersion, currentVersion) + } + + // Compute the new hollow snapshot to be saved to redis. + const hollowSnapshot = currentSnapshot + const blobStore = new BlobStore(projectId) + await hollowSnapshot.loadFiles('hollow', blobStore) + // Clone the changes to avoid modifying the original ones when computing the hollow snapshot. + const hollowChanges = changesToQueue.map(change => change.clone()) + for (const change of hollowChanges) { + await change.loadFiles('hollow', blobStore) + } + hollowSnapshot.applyAll(hollowChanges, { strict: true }) + const baseVersion = currentVersion + const status = await redisBackend.queueChanges( + projectId, + hollowSnapshot, + baseVersion, + changesToQueue, + opts + ) + return status +} + +module.exports = queueChanges diff --git a/services/history-v1/storage/lib/resync_project.js b/services/history-v1/storage/lib/resync_project.js new file mode 100644 index 0000000000..3ec680bb5b --- /dev/null +++ b/services/history-v1/storage/lib/resync_project.js @@ -0,0 +1,14 @@ +// @ts-check + +const config = require('config') +const { fetchNothing } = require('@overleaf/fetch-utils') + +const PROJECT_HISTORY_URL = `http://${config.projectHistory.host}:${config.projectHistory.port}` + +async function resyncProject(projectId) { + await fetchNothing(`${PROJECT_HISTORY_URL}/project/${projectId}/resync`, { + method: 'POST', + }) +} + +module.exports = resyncProject diff --git a/services/history-v1/storage/lib/scan.js b/services/history-v1/storage/lib/scan.js index 1f2a335254..d55f5362c1 100644 --- a/services/history-v1/storage/lib/scan.js +++ b/services/history-v1/storage/lib/scan.js @@ -1,3 +1,7 @@ +// @ts-check + +'use strict' + const logger = require('@overleaf/logger') const { JobNotFoundError, JobNotReadyError } = require('./chunk_store/errors') const BATCH_SIZE = 1000 // Default batch size for SCAN diff --git a/services/history-v1/storage/scripts/expire_redis_chunks.js b/services/history-v1/storage/scripts/expire_redis_chunks.js index af2be097b6..60ce4c66f6 100644 --- a/services/history-v1/storage/scripts/expire_redis_chunks.js +++ b/services/history-v1/storage/scripts/expire_redis_chunks.js @@ -14,12 +14,9 @@ logger.initialize('expire-redis-chunks') async function expireProjectAction(projectId) { const job = await claimExpireJob(projectId) - try { - await expireProject(projectId) - } finally { - if (job && job.close) { - await job.close() - } + await expireProject(projectId) + if (job && job.close) { + await job.close() } } diff --git a/services/history-v1/storage/scripts/persist_and_expire_queues.sh b/services/history-v1/storage/scripts/persist_and_expire_queues.sh new file mode 100644 index 0000000000..d5789541da --- /dev/null +++ b/services/history-v1/storage/scripts/persist_and_expire_queues.sh @@ -0,0 +1,3 @@ +#!/bin/sh +node storage/scripts/persist_redis_chunks.mjs --queue --max-time 270 +node storage/scripts/expire_redis_chunks.js diff --git a/services/history-v1/storage/scripts/persist_redis_chunks.mjs b/services/history-v1/storage/scripts/persist_redis_chunks.mjs new file mode 100644 index 0000000000..dd7e9f3a51 --- /dev/null +++ b/services/history-v1/storage/scripts/persist_redis_chunks.mjs @@ -0,0 +1,181 @@ +import config from 'config' +import PQueue from 'p-queue' +import { fetchNothing } from '@overleaf/fetch-utils' +import logger from '@overleaf/logger' +import commandLineArgs from 'command-line-args' +import * as redis from '../lib/redis.js' +import knex from '../lib/knex.js' +import knexReadOnly from '../lib/knex_read_only.js' +import { client } from '../lib/mongodb.js' +import { scanAndProcessDueItems } from '../lib/scan.js' +import persistBuffer from '../lib/persist_buffer.js' +import { claimPersistJob } from '../lib/chunk_store/redis.js' +import { loadGlobalBlobs } from '../lib/blob_store/index.js' +import { EventEmitter } from 'node:events' +import { fileURLToPath } from 'node:url' + +// Something is registering 11 listeners, over the limit of 10, which generates +// a lot of warning noise. +EventEmitter.defaultMaxListeners = 11 + +const rclient = redis.rclientHistory + +const optionDefinitions = [ + { name: 'dry-run', alias: 'd', type: Boolean }, + { name: 'queue', type: Boolean }, + { name: 'max-time', type: Number }, + { name: 'min-rate', type: Number, defaultValue: 1 }, +] +const options = commandLineArgs(optionDefinitions) +const DRY_RUN = options['dry-run'] || false +const USE_QUEUE = options.queue || false +const MAX_TIME = options['max-time'] || null +const MIN_RATE = options['min-rate'] +const HISTORY_V1_URL = `http://${process.env.HISTORY_V1_HOST || 'localhost'}:${process.env.PORT || 3100}` +let isShuttingDown = false + +logger.initialize('persist-redis-chunks') + +async function persistProjectAction(projectId) { + const job = await claimPersistJob(projectId) + // Set limits to force us to persist all of the changes. + const farFuture = new Date() + farFuture.setTime(farFuture.getTime() + 7 * 24 * 3600 * 1000) + const limits = { + maxChanges: 0, + minChangeTimestamp: farFuture, + maxChangeTimestamp: farFuture, + autoResync: true, + } + await persistBuffer(projectId, limits) + if (job && job.close) { + await job.close() + } +} + +async function requestProjectFlush(projectId) { + const job = await claimPersistJob(projectId) + logger.debug({ projectId }, 'sending project flush request') + const url = `${HISTORY_V1_URL}/api/projects/${projectId}/flush` + const credentials = Buffer.from( + `staging:${config.get('basicHttpAuth.password')}` + ).toString('base64') + await fetchNothing(url, { + method: 'POST', + headers: { + Authorization: `Basic ${credentials}`, + }, + }) + if (job && job.close) { + await job.close() + } +} + +async function persistQueuedProjects(queuedProjects) { + const totalCount = queuedProjects.size + // Compute the rate at which we need to dispatch requests + const targetRate = MAX_TIME > 0 ? Math.ceil(totalCount / MAX_TIME) : 0 + // Rate limit to spread the requests over the interval. + const queue = new PQueue({ + intervalCap: Math.max(MIN_RATE, targetRate), + interval: 1000, // use a 1 second interval + }) + logger.info( + { totalCount, targetRate, minRate: MIN_RATE, maxTime: MAX_TIME }, + 'dispatching project flush requests' + ) + const startTime = Date.now() + let dispatchedCount = 0 + for (const projectId of queuedProjects) { + if (isShuttingDown) { + logger.info('Shutting down, stopping project flush requests') + queue.clear() + break + } + queue.add(async () => { + try { + await requestProjectFlush(projectId) + } catch (err) { + logger.error({ err, projectId }, 'error while flushing project') + } + }) + dispatchedCount++ + if (dispatchedCount % 1000 === 0) { + logger.info( + { count: dispatchedCount }, + 'dispatched project flush requests' + ) + } + await queue.onEmpty() + } + const elapsedTime = Math.floor((Date.now() - startTime) / 1000) + logger.info( + { count: totalCount, elapsedTime }, + 'dispatched project flush requests' + ) + await queue.onIdle() +} + +async function runPersistChunks() { + const queuedProjects = new Set() + + async function queueProjectAction(projectId) { + queuedProjects.add(projectId) + } + + await loadGlobalBlobs() + await scanAndProcessDueItems( + rclient, + 'persistChunks', + 'persist-time', + USE_QUEUE ? queueProjectAction : persistProjectAction, + DRY_RUN + ) + + if (USE_QUEUE) { + if (isShuttingDown) { + logger.info('Shutting down, skipping queued project persistence') + return + } + logger.info( + { count: queuedProjects.size }, + 'queued projects for persistence' + ) + await persistQueuedProjects(queuedProjects) + } +} + +async function main() { + try { + await runPersistChunks() + } catch (err) { + logger.fatal( + { err, taskName: 'persistChunks' }, + 'Unhandled error in runPersistChunks' + ) + process.exit(1) + } finally { + await redis.disconnect() + await client.close() + await knex.destroy() + await knexReadOnly.destroy() + } +} + +function gracefulShutdown() { + if (isShuttingDown) { + return + } + isShuttingDown = true + logger.info({ isShuttingDown }, 'received shutdown signal, cleaning up...') +} + +// Check if the module is being run directly +const currentScriptPath = fileURLToPath(import.meta.url) +if (process.argv[1] === currentScriptPath) { + process.on('SIGINT', gracefulShutdown) + process.on('SIGTERM', gracefulShutdown) + main() +} + +export { runPersistChunks } diff --git a/services/history-v1/storage/scripts/show_buffer.js b/services/history-v1/storage/scripts/show_buffer.js new file mode 100644 index 0000000000..1d80ee227d --- /dev/null +++ b/services/history-v1/storage/scripts/show_buffer.js @@ -0,0 +1,117 @@ +#!/usr/bin/env node +// @ts-check + +const { rclientHistory: rclient } = require('../lib/redis') +const { keySchema } = require('../lib/chunk_store/redis') +const commandLineArgs = require('command-line-args') + +const optionDefinitions = [ + { name: 'historyId', type: String, defaultOption: true }, +] + +// Column width for key display alignment; can be overridden with COL_WIDTH env variable +const COLUMN_WIDTH = process.env.COL_WIDTH + ? parseInt(process.env.COL_WIDTH, 10) + : 45 + +let options +try { + options = commandLineArgs(optionDefinitions) +} catch (e) { + console.error( + 'Error parsing command line arguments:', + e instanceof Error ? e.message : String(e) + ) + console.error('Usage: ./show_buffer.js ') + process.exit(1) +} + +const { historyId } = options + +if (!historyId) { + console.error('Usage: ./show_buffer.js ') + process.exit(1) +} + +function format(str, indent = COLUMN_WIDTH + 2) { + const lines = str.split('\n') + for (let i = 1; i < lines.length; i++) { + lines[i] = ' '.repeat(indent) + lines[i] + } + return lines.join('\n') +} + +async function displayKeyValue( + rclient, + key, + { parseJson = false, formatDate = false } = {} +) { + const value = await rclient.get(key) + let displayValue = '(nil)' + if (value) { + if (parseJson) { + try { + displayValue = format(JSON.stringify(JSON.parse(value), null, 2)) + } catch (e) { + displayValue = ` Raw value: ${value}` + } + } else if (formatDate) { + const ts = parseInt(value, 10) + displayValue = `${new Date(ts).toISOString()} (${value})` + } else { + displayValue = value + } + } + console.log(`${key.padStart(COLUMN_WIDTH)}: ${displayValue}`) +} + +async function displayBuffer(projectId) { + console.log(`Buffer for history ID: ${projectId}`) + console.log('--------------------------------------------------') + + try { + const headKey = keySchema.head({ projectId }) + const headVersionKey = keySchema.headVersion({ projectId }) + const persistedVersionKey = keySchema.persistedVersion({ projectId }) + const expireTimeKey = keySchema.expireTime({ projectId }) + const persistTimeKey = keySchema.persistTime({ projectId }) + const changesKey = keySchema.changes({ projectId }) + + await displayKeyValue(rclient, headKey, { parseJson: true }) + await displayKeyValue(rclient, headVersionKey) + await displayKeyValue(rclient, persistedVersionKey) + await displayKeyValue(rclient, expireTimeKey, { formatDate: true }) + await displayKeyValue(rclient, persistTimeKey, { formatDate: true }) + + const changesList = await rclient.lrange(changesKey, 0, -1) + + // 6. changes + let changesListDisplay = '(nil)' + if (changesList) { + changesListDisplay = changesList.length + ? format( + changesList + .map((change, index) => `[${index}]: ${change}`) + .join('\n') + ) + : '(empty list)' + } + console.log(`${changesKey.padStart(COLUMN_WIDTH)}: ${changesListDisplay}`) + } catch (error) { + console.error('Error fetching data from Redis:', error) + throw error + } +} + +;(async () => { + let errorOccurred = false + try { + await displayBuffer(historyId) + } catch (error) { + errorOccurred = true + } finally { + rclient.quit(() => { + process.exit(errorOccurred ? 1 : 0) + }) + } +})() diff --git a/services/history-v1/test/acceptance/js/api/project_flush.test.js b/services/history-v1/test/acceptance/js/api/project_flush.test.js new file mode 100644 index 0000000000..f8d0b23d8e --- /dev/null +++ b/services/history-v1/test/acceptance/js/api/project_flush.test.js @@ -0,0 +1,66 @@ +'use strict' + +const BPromise = require('bluebird') +const { expect } = require('chai') +const HTTPStatus = require('http-status') +const fetch = require('node-fetch') +const fs = BPromise.promisifyAll(require('node:fs')) + +const cleanup = require('../storage/support/cleanup') +const fixtures = require('../storage/support/fixtures') +const testFiles = require('../storage/support/test_files') +const testProjects = require('./support/test_projects') +const testServer = require('./support/test_server') + +const { Change, File, Operation } = require('overleaf-editor-core') +const queueChanges = require('../../../../storage/lib/queue_changes') +const { getState } = require('../../../../storage/lib/chunk_store/redis') + +describe('project flush', function () { + beforeEach(cleanup.everything) + beforeEach(fixtures.create) + + it('persists queued changes to the chunk store', async function () { + const basicAuthClient = testServer.basicAuthClient + const projectId = await testProjects.createEmptyProject() + + // upload an empty file + const response = await fetch( + testServer.url( + `/api/projects/${projectId}/blobs/${File.EMPTY_FILE_HASH}`, + { qs: { pathname: 'main.tex' } } + ), + { + method: 'PUT', + body: fs.createReadStream(testFiles.path('empty.tex')), + headers: { + Authorization: testServer.basicAuthHeader, + }, + } + ) + expect(response.ok).to.be.true + + const testFile = File.fromHash(File.EMPTY_FILE_HASH) + const testChange = new Change( + [Operation.addFile('main.tex', testFile)], + new Date() + ) + await queueChanges(projectId, [testChange], 0) + + // Verify that the changes are queued and not yet persisted + const initialState = await getState(projectId) + expect(initialState.persistedVersion).to.be.null + expect(initialState.changes).to.have.lengthOf(1) + + const importResponse = + await basicAuthClient.apis.ProjectImport.flushChanges({ + project_id: projectId, + }) + + expect(importResponse.status).to.equal(HTTPStatus.OK) + + // Verify that the changes were persisted to the chunk store + const finalState = await getState(projectId) + expect(finalState.persistedVersion).to.equal(1) + }) +}) diff --git a/services/history-v1/test/acceptance/js/api/rollout.test.js b/services/history-v1/test/acceptance/js/api/rollout.test.js new file mode 100644 index 0000000000..f1a65e5aff --- /dev/null +++ b/services/history-v1/test/acceptance/js/api/rollout.test.js @@ -0,0 +1,115 @@ +const config = require('config') +const sinon = require('sinon') +const { expect } = require('chai') + +const cleanup = require('../storage/support/cleanup') +const Rollout = require('../../../../api/app/rollout') + +describe('rollout', function () { + beforeEach(cleanup.everything) + beforeEach('Set up stubs', function () { + sinon.stub(config, 'has').callThrough() + sinon.stub(config, 'get').callThrough() + }) + afterEach(sinon.restore) + + it('should return a valid history buffer level', function () { + setMockConfig('historyBufferLevel', '2') + setMockConfig('forcePersistBuffer', 'false') + + const rollout = new Rollout(config) + const { historyBufferLevel, forcePersistBuffer } = + rollout.getHistoryBufferLevelOptions('test-project-id') + expect(historyBufferLevel).to.equal(2) + expect(forcePersistBuffer).to.be.false + }) + + it('should return a valid history buffer level and force persist buffer options', function () { + setMockConfig('historyBufferLevel', '1') + setMockConfig('forcePersistBuffer', 'true') + const rollout = new Rollout(config) + const { historyBufferLevel, forcePersistBuffer } = + rollout.getHistoryBufferLevelOptions('test-project-id') + expect(historyBufferLevel).to.equal(1) + expect(forcePersistBuffer).to.be.true + }) + + describe('with a higher next history buffer level rollout', function () { + beforeEach(function () { + setMockConfig('historyBufferLevel', '2') + setMockConfig('forcePersistBuffer', 'false') + setMockConfig('nextHistoryBufferLevel', '3') + }) + it('should return the expected history buffer level when the rollout percentage is zero', function () { + setMockConfig('nextHistoryBufferLevelRolloutPercentage', '0') + const rollout = new Rollout(config) + for (let i = 0; i < 1000; i++) { + const { historyBufferLevel, forcePersistBuffer } = + rollout.getHistoryBufferLevelOptions(`test-project-id-${i}`) + expect(historyBufferLevel).to.equal(2) + expect(forcePersistBuffer).to.be.false + } + }) + + it('should return the expected distribution of levels when the rollout percentage is 10%', function () { + setMockConfig('nextHistoryBufferLevelRolloutPercentage', '10') + const rollout = new Rollout(config) + let currentLevel = 0 + let nextLevel = 0 + for (let i = 0; i < 1000; i++) { + const { historyBufferLevel } = rollout.getHistoryBufferLevelOptions( + `test-project-id-${i}` + ) + switch (historyBufferLevel) { + case 2: + currentLevel++ + break + case 3: + nextLevel++ + break + default: + expect.fail( + `Unexpected history buffer level: ${historyBufferLevel}` + ) + } + } + const twoPercentage = (currentLevel / 1000) * 100 + const threePercentage = (nextLevel / 1000) * 100 + expect(twoPercentage).to.be.closeTo(90, 5) // 90% for level 2 + expect(threePercentage).to.be.closeTo(10, 5) // 10% for level 3 + }) + }) + describe('with a next history buffer level lower than the current level', function () { + beforeEach(function () { + setMockConfig('historyBufferLevel', '3') + setMockConfig('forcePersistBuffer', 'false') + setMockConfig('nextHistoryBufferLevel', '2') + }) + it('should always return the current level when the rollout percentage is zero', function () { + setMockConfig('nextHistoryBufferLevelRolloutPercentage', '0') + const rollout = new Rollout(config) + for (let i = 0; i < 1000; i++) { + const { historyBufferLevel, forcePersistBuffer } = + rollout.getHistoryBufferLevelOptions(`test-project-id-${i}`) + expect(historyBufferLevel).to.equal(3) + expect(forcePersistBuffer).to.be.false + } + }) + + it('should always return the current level regardless of the rollout percentage', function () { + setMockConfig('nextHistoryBufferLevelRolloutPercentage', '10') + const rollout = new Rollout(config) + for (let i = 0; i < 1000; i++) { + const { historyBufferLevel } = rollout.getHistoryBufferLevelOptions( + `test-project-id-${i}` + ) + expect(historyBufferLevel).to.equal(3) + } + }) + }) +}) + +function setMockConfig(path, value) { + config.has.withArgs(path).returns(true) + config.get.withArgs(path).returns(value) +} diff --git a/services/history-v1/test/acceptance/js/storage/chunk_store.test.js b/services/history-v1/test/acceptance/js/storage/chunk_store.test.js index da70467934..8b06b8e412 100644 --- a/services/history-v1/test/acceptance/js/storage/chunk_store.test.js +++ b/services/history-v1/test/acceptance/js/storage/chunk_store.test.js @@ -470,6 +470,8 @@ describe('chunkStore', function () { describe('with changes queued in the Redis buffer', function () { let queuedChanges + const firstQueuedChangeTimestamp = new Date('2017-01-01T00:01:00') + const lastQueuedChangeTimestamp = new Date('2017-01-01T00:02:00') beforeEach(async function () { const snapshot = thirdChunk.getSnapshot() @@ -481,7 +483,15 @@ describe('chunkStore', function () { 'in-redis.tex', File.createLazyFromBlobs(blob) ), - new Date() + firstQueuedChangeTimestamp + ), + makeChange( + // Add a second change to make the buffer more interesting + Operation.editFile( + 'in-redis.tex', + TextOperation.fromJSON({ textOperation: ['hello'] }) + ), + lastQueuedChangeTimestamp ), ] await redisBackend.queueChanges( @@ -498,6 +508,15 @@ describe('chunkStore', function () { .getChanges() .concat(queuedChanges) expect(chunk.getChanges()).to.deep.equal(expectedChanges) + expect(chunk.getStartVersion()).to.equal( + thirdChunk.getStartVersion() + ) + expect(chunk.getEndVersion()).to.equal( + thirdChunk.getEndVersion() + queuedChanges.length + ) + expect(chunk.getEndTimestamp()).to.deep.equal( + lastQueuedChangeTimestamp + ) }) it('includes the queued changes when getting the latest chunk by timestamp', async function () { @@ -509,6 +528,12 @@ describe('chunkStore', function () { .getChanges() .concat(queuedChanges) expect(chunk.getChanges()).to.deep.equal(expectedChanges) + expect(chunk.getStartVersion()).to.equal( + thirdChunk.getStartVersion() + ) + expect(chunk.getEndVersion()).to.equal( + thirdChunk.getEndVersion() + queuedChanges.length + ) }) it("doesn't include the queued changes when getting another chunk by timestamp", async function () { @@ -518,6 +543,11 @@ describe('chunkStore', function () { ) const expectedChanges = secondChunk.getChanges() expect(chunk.getChanges()).to.deep.equal(expectedChanges) + expect(chunk.getStartVersion()).to.equal( + secondChunk.getStartVersion() + ) + expect(chunk.getEndVersion()).to.equal(secondChunk.getEndVersion()) + expect(chunk.getEndTimestamp()).to.deep.equal(secondChunkTimestamp) }) it('includes the queued changes when getting the latest chunk by version', async function () { @@ -529,6 +559,15 @@ describe('chunkStore', function () { .getChanges() .concat(queuedChanges) expect(chunk.getChanges()).to.deep.equal(expectedChanges) + expect(chunk.getStartVersion()).to.equal( + thirdChunk.getStartVersion() + ) + expect(chunk.getEndVersion()).to.equal( + thirdChunk.getEndVersion() + queuedChanges.length + ) + expect(chunk.getEndTimestamp()).to.deep.equal( + lastQueuedChangeTimestamp + ) }) it("doesn't include the queued changes when getting another chunk by version", async function () { @@ -538,6 +577,47 @@ describe('chunkStore', function () { ) const expectedChanges = secondChunk.getChanges() expect(chunk.getChanges()).to.deep.equal(expectedChanges) + expect(chunk.getStartVersion()).to.equal( + secondChunk.getStartVersion() + ) + expect(chunk.getEndVersion()).to.equal(secondChunk.getEndVersion()) + expect(chunk.getEndTimestamp()).to.deep.equal(secondChunkTimestamp) + }) + + it('loads a version that is only in the Redis buffer', async function () { + const versionInRedis = thirdChunk.getEndVersion() + 1 // the first change in Redis + const chunk = await chunkStore.loadAtVersion( + projectId, + versionInRedis + ) + // The chunk should contain changes from the thirdChunk and the queuedChanges + const expectedChanges = thirdChunk + .getChanges() + .concat(queuedChanges) + expect(chunk.getChanges()).to.deep.equal(expectedChanges) + expect(chunk.getStartVersion()).to.equal( + thirdChunk.getStartVersion() + ) + expect(chunk.getEndVersion()).to.equal( + thirdChunk.getEndVersion() + queuedChanges.length + ) + expect(chunk.getEndTimestamp()).to.deep.equal( + lastQueuedChangeTimestamp + ) + }) + + it('throws an error when loading a version beyond the Redis buffer', async function () { + const versionBeyondRedis = + thirdChunk.getEndVersion() + queuedChanges.length + 1 + await expect( + chunkStore.loadAtVersion(projectId, versionBeyondRedis) + ) + .to.be.rejectedWith(chunkStore.VersionOutOfBoundsError) + .and.eventually.satisfy(err => { + expect(err.info).to.have.property('projectId', projectId) + expect(err.info).to.have.property('version', versionBeyondRedis) + return true + }) }) }) diff --git a/services/history-v1/test/acceptance/js/storage/chunk_store_redis_backend.test.js b/services/history-v1/test/acceptance/js/storage/chunk_store_redis_backend.test.js index 2b13343fc4..d34cd701d0 100644 --- a/services/history-v1/test/acceptance/js/storage/chunk_store_redis_backend.test.js +++ b/services/history-v1/test/acceptance/js/storage/chunk_store_redis_backend.test.js @@ -699,6 +699,8 @@ describe('chunk buffer Redis backend', function () { }) describe('setPersistedVersion', function () { + const persistTime = Date.now() + 60 * 1000 // 1 minute from now + it('should return not_found when project does not exist', async function () { const result = await redisBackend.setPersistedVersion(projectId, 5) expect(result).to.equal('not_found') @@ -709,15 +711,41 @@ describe('chunk buffer Redis backend', function () { await setupState(projectId, { headVersion: 5, persistedVersion: null, + persistTime, changes: 5, }) }) it('should set the persisted version', async function () { - await redisBackend.setPersistedVersion(projectId, 3) + const status = await redisBackend.setPersistedVersion(projectId, 3) + expect(status).to.equal('ok') const state = await redisBackend.getState(projectId) expect(state.persistedVersion).to.equal(3) }) + + it('should leave the persist time if the persisted version is not current', async function () { + const status = await redisBackend.setPersistedVersion(projectId, 3) + expect(status).to.equal('ok') + const state = await redisBackend.getState(projectId) + expect(state.persistTime).to.deep.equal(persistTime) // Persist time should remain unchanged + }) + + it('should refuse to set a persisted version greater than the head version', async function () { + await expect( + redisBackend.setPersistedVersion(projectId, 10) + ).to.be.rejectedWith(VersionOutOfBoundsError) + // Ensure persisted version remains unchanged + const state = await redisBackend.getState(projectId) + expect(state.persistedVersion).to.be.null + }) + + it('should clear the persist time when the persisted version is current', async function () { + const status = await redisBackend.setPersistedVersion(projectId, 5) + expect(status).to.equal('ok') + const state = await redisBackend.getState(projectId) + expect(state.persistedVersion).to.equal(5) + expect(state.persistTime).to.be.null // Persist time should be cleared + }) }) describe('when the persisted version is set', function () { @@ -725,18 +753,46 @@ describe('chunk buffer Redis backend', function () { await setupState(projectId, { headVersion: 5, persistedVersion: 3, + persistTime, changes: 5, }) }) it('should set the persisted version', async function () { - await redisBackend.setPersistedVersion(projectId, 5) + const status = await redisBackend.setPersistedVersion(projectId, 5) + expect(status).to.equal('ok') const state = await redisBackend.getState(projectId) expect(state.persistedVersion).to.equal(5) }) + it('should clear the persist time when the persisted version is current', async function () { + const status = await redisBackend.setPersistedVersion(projectId, 5) + expect(status).to.equal('ok') + const state = await redisBackend.getState(projectId) + expect(state.persistedVersion).to.equal(5) + expect(state.persistTime).to.be.null // Persist time should be cleared + }) + + it('should leave the persist time if the persisted version is not current', async function () { + const status = await redisBackend.setPersistedVersion(projectId, 4) + expect(status).to.equal('ok') + const state = await redisBackend.getState(projectId) + expect(state.persistedVersion).to.equal(4) + expect(state.persistTime).to.deep.equal(persistTime) // Persist time should remain unchanged + }) + it('should not decrease the persisted version', async function () { - await redisBackend.setPersistedVersion(projectId, 2) + const status = await redisBackend.setPersistedVersion(projectId, 2) + expect(status).to.equal('too_low') + const state = await redisBackend.getState(projectId) + expect(state.persistedVersion).to.equal(3) + }) + + it('should refuse to set a persisted version greater than the head version', async function () { + await expect( + redisBackend.setPersistedVersion(projectId, 10) + ).to.be.rejectedWith(VersionOutOfBoundsError) + // Ensure persisted version remains unchanged const state = await redisBackend.getState(projectId) expect(state.persistedVersion).to.equal(3) }) @@ -1162,6 +1218,8 @@ function makeChange() { * @param {object} params * @param {number} params.headVersion * @param {number | null} params.persistedVersion + * @param {number | null} params.persistTime - time when the project should be persisted + * @param {number | null} params.expireTime - time when the project should expire * @param {number} params.changes - number of changes to create * @return {Promise} dummy changes that have been created */ @@ -1173,7 +1231,12 @@ async function setupState(projectId, params) { params.persistedVersion ) } - + if (params.persistTime) { + await rclient.set(keySchema.persistTime({ projectId }), params.persistTime) + } + if (params.expireTime) { + await rclient.set(keySchema.expireTime({ projectId }), params.expireTime) + } const changes = [] for (let i = 1; i <= params.changes; i++) { const change = new Change( diff --git a/services/history-v1/test/acceptance/js/storage/expire_redis_chunks.test.js b/services/history-v1/test/acceptance/js/storage/expire_redis_chunks.test.js index b657991dda..f8a5943c43 100644 --- a/services/history-v1/test/acceptance/js/storage/expire_redis_chunks.test.js +++ b/services/history-v1/test/acceptance/js/storage/expire_redis_chunks.test.js @@ -1,91 +1,13 @@ 'use strict' const { expect } = require('chai') -const { promisify } = require('node:util') -const { execFile } = require('node:child_process') -const { Snapshot, Author, Change } = require('overleaf-editor-core') +const { Author, Change } = require('overleaf-editor-core') const cleanup = require('./support/cleanup') -const redisBackend = require('../../../../storage/lib/chunk_store/redis') -const redis = require('../../../../storage/lib/redis') -const rclient = redis.rclientHistory -const keySchema = redisBackend.keySchema +const { setupProjectState, rclient, keySchema } = require('./support/redis') +const { runScript } = require('./support/runscript') const SCRIPT_PATH = 'storage/scripts/expire_redis_chunks.js' -async function runExpireScript() { - const TIMEOUT = 10 * 1000 // 10 seconds - let result - try { - result = await promisify(execFile)('node', [SCRIPT_PATH], { - encoding: 'utf-8', - timeout: TIMEOUT, - env: { - ...process.env, - LOG_LEVEL: 'debug', // Override LOG_LEVEL for script output - }, - }) - result.status = 0 - } catch (err) { - const { stdout, stderr, code } = err - if (typeof code !== 'number') { - console.error('Error running expire script:', err) - throw err - } - result = { stdout, stderr, status: code } - } - // The script might exit with status 1 if it finds no keys to process, which is ok - if (result.status !== 0 && result.status !== 1) { - console.error('Expire script failed:', result.stderr) - throw new Error(`expire script failed with status ${result.status}`) - } - return result -} - -// Helper to set up a basic project state in Redis -async function setupProjectState( - projectId, - { - headVersion = 0, - persistedVersion = null, - expireTime = null, - persistTime = null, - changes = [], - } -) { - const headSnapshot = new Snapshot() - await rclient.set( - keySchema.head({ projectId }), - JSON.stringify(headSnapshot.toRaw()) - ) - await rclient.set( - keySchema.headVersion({ projectId }), - headVersion.toString() - ) - - if (persistedVersion !== null) { - await rclient.set( - keySchema.persistedVersion({ projectId }), - persistedVersion.toString() - ) - } - if (expireTime !== null) { - await rclient.set( - keySchema.expireTime({ projectId }), - expireTime.toString() - ) - } - if (persistTime !== null) { - await rclient.set( - keySchema.persistTime({ projectId }), - persistTime.toString() - ) - } - if (changes.length > 0) { - const rawChanges = changes.map(c => JSON.stringify(c.toRaw())) - await rclient.rpush(keySchema.changes({ projectId }), ...rawChanges) - } -} - function makeChange() { const timestamp = new Date() const author = new Author(123, 'test@example.com', 'Test User') @@ -150,7 +72,7 @@ describe('expire_redis_chunks script', function () { }) // Run the expire script once after all projects are set up - await runExpireScript() + await runScript(SCRIPT_PATH) }) async function checkProjectStatus(projectId) { diff --git a/services/history-v1/test/acceptance/js/storage/persist_buffer.test.mjs b/services/history-v1/test/acceptance/js/storage/persist_buffer.test.mjs new file mode 100644 index 0000000000..138a70e626 --- /dev/null +++ b/services/history-v1/test/acceptance/js/storage/persist_buffer.test.mjs @@ -0,0 +1,519 @@ +'use strict' + +import fs from 'node:fs' +import { expect } from 'chai' +import { + Change, + Snapshot, + File, + TextOperation, + AddFileOperation, + EditFileOperation, // Added EditFileOperation +} from 'overleaf-editor-core' +import persistBuffer from '../../../../storage/lib/persist_buffer.js' +import chunkStore from '../../../../storage/lib/chunk_store/index.js' +import redisBackend from '../../../../storage/lib/chunk_store/redis.js' +import persistChanges from '../../../../storage/lib/persist_changes.js' +import cleanup from './support/cleanup.js' +import fixtures from './support/fixtures.js' +import testFiles from './support/test_files.js' + +describe('persistBuffer', function () { + let projectId + const initialVersion = 0 + let limitsToPersistImmediately + + before(function () { + const farFuture = new Date() + farFuture.setTime(farFuture.getTime() + 7 * 24 * 3600 * 1000) + limitsToPersistImmediately = { + minChangeTimestamp: farFuture, + maxChangeTimestamp: farFuture, + maxChunkChanges: 10, + } + }) + + beforeEach(cleanup.everything) + beforeEach(fixtures.create) + + beforeEach(async function () { + projectId = fixtures.docs.uninitializedProject.id + await chunkStore.initializeProject(projectId) + }) + + describe('with an empty initial chunk (new project)', function () { + it('should persist changes from Redis to a new chunk', async function () { + // create an initial snapshot and add the empty file `main.tex` + const HELLO_TXT = fs.readFileSync(testFiles.path('hello.txt')).toString() + + const createFile = new Change( + [new AddFileOperation('main.tex', File.fromString(HELLO_TXT))], + new Date(), + [] + ) + + await persistChanges( + projectId, + [createFile], + limitsToPersistImmediately, + 0 + ) + // Now queue some changes in Redis + const op1 = new TextOperation().insert('Hello').retain(HELLO_TXT.length) + const change1 = new Change( + [new EditFileOperation('main.tex', op1)], + new Date() + ) + + const op2 = new TextOperation() + .retain('Hello'.length) + .insert(' World') + .retain(HELLO_TXT.length) + const change2 = new Change( + [new EditFileOperation('main.tex', op2)], + new Date() + ) + + const changesToQueue = [change1, change2] + + const finalHeadVersion = initialVersion + 1 + changesToQueue.length + + const now = Date.now() + await redisBackend.queueChanges( + projectId, + new Snapshot(), // dummy snapshot + 1, + changesToQueue, + { + persistTime: now + redisBackend.MAX_PERSIST_DELAY_MS, + expireTime: now + redisBackend.PROJECT_TTL_MS, + } + ) + await redisBackend.setPersistedVersion(projectId, initialVersion) + + // Persist the changes from Redis to the chunk store + const persistResult = await persistBuffer( + projectId, + limitsToPersistImmediately + ) + + // Check the return value of persistBuffer + expect(persistResult).to.exist + expect(persistResult).to.have.property('numberOfChangesPersisted') + expect(persistResult).to.have.property('originalEndVersion') + expect(persistResult).to.have.property('currentChunk') + expect(persistResult).to.have.property('resyncNeeded') + expect(persistResult.numberOfChangesPersisted).to.equal( + changesToQueue.length + ) + expect(persistResult.originalEndVersion).to.equal(initialVersion + 1) + expect(persistResult.resyncNeeded).to.be.false + + const latestChunk = await chunkStore.loadLatest(projectId, { + persistedOnly: true, + }) + expect(latestChunk).to.exist + expect(latestChunk.getStartVersion()).to.equal(initialVersion) + expect(latestChunk.getEndVersion()).to.equal(finalHeadVersion) + expect(latestChunk.getChanges().length).to.equal( + changesToQueue.length + 1 + ) + // Check that chunk returned by persistBuffer matches the latest chunk + expect(latestChunk).to.deep.equal(persistResult.currentChunk) + + const chunkSnapshot = latestChunk.getSnapshot() + expect(Object.keys(chunkSnapshot.getFileMap()).length).to.equal(1) + + const persistedVersionInRedis = (await redisBackend.getState(projectId)) + .persistedVersion + expect(persistedVersionInRedis).to.equal(finalHeadVersion) + + const nonPersisted = await redisBackend.getNonPersistedChanges( + projectId, + finalHeadVersion + ) + expect(nonPersisted).to.be.an('array').that.is.empty + }) + }) + + describe('with an existing chunk and new changes in Redis', function () { + it('should persist new changes from Redis, appending to existing history', async function () { + const initialContent = 'Initial document content.\n' + + const addInitialFileChange = new Change( + [new AddFileOperation('main.tex', File.fromString(initialContent))], + new Date(), + [] + ) + + await persistChanges( + projectId, + [addInitialFileChange], + limitsToPersistImmediately, + initialVersion + ) + const versionAfterInitialSetup = initialVersion + 1 // Now version is 1 + + const opForChunk1 = new TextOperation() + .retain(initialContent.length) + .insert(' First addition.') + const changesForChunk1 = [ + new Change( + [new EditFileOperation('main.tex', opForChunk1)], + new Date(), + [] + ), + ] + + await persistChanges( + projectId, + changesForChunk1, + limitsToPersistImmediately, // Original limits for this step + versionAfterInitialSetup // Correct clientEndVersion + ) + // Update persistedChunkEndVersion: 1 (from setup) + 1 (from changesForChunk1) = 2 + const persistedChunkEndVersion = + versionAfterInitialSetup + changesForChunk1.length + const contentAfterChunk1 = initialContent + ' First addition.' + + const opVersion2 = new TextOperation() + .retain(contentAfterChunk1.length) + .insert(' Second addition.') + const changeVersion2 = new Change( + [new EditFileOperation('main.tex', opVersion2)], + new Date(), + [] + ) + + const contentAfterChange2 = contentAfterChunk1 + ' Second addition.' + const opVersion3 = new TextOperation() + .retain(contentAfterChange2.length) + .insert(' Third addition.') + const changeVersion3 = new Change( + [new EditFileOperation('main.tex', opVersion3)], + new Date(), + [] + ) + + const redisChangesToPush = [changeVersion2, changeVersion3] + const finalHeadVersionAfterRedisPush = + persistedChunkEndVersion + redisChangesToPush.length + const now = Date.now() + + await redisBackend.queueChanges( + projectId, + new Snapshot(), // Use new Snapshot() like in the first test + persistedChunkEndVersion, + redisChangesToPush, + { + persistTime: now + redisBackend.MAX_PERSIST_DELAY_MS, + expireTime: now + redisBackend.PROJECT_TTL_MS, + } + ) + await redisBackend.setPersistedVersion( + projectId, + persistedChunkEndVersion + ) + + const persistResult = await persistBuffer( + projectId, + limitsToPersistImmediately + ) + + // Check the return value of persistBuffer + expect(persistResult).to.exist + expect(persistResult).to.have.property('numberOfChangesPersisted') + expect(persistResult).to.have.property('originalEndVersion') + expect(persistResult).to.have.property('currentChunk') + expect(persistResult).to.have.property('resyncNeeded') + expect(persistResult.numberOfChangesPersisted).to.equal( + redisChangesToPush.length + ) + expect(persistResult.originalEndVersion).to.equal( + persistedChunkEndVersion + ) + expect(persistResult.resyncNeeded).to.be.false + + const latestChunk = await chunkStore.loadLatest(projectId, { + persistedOnly: true, + }) + expect(latestChunk).to.exist + expect(latestChunk.getStartVersion()).to.equal(0) + expect(latestChunk.getEndVersion()).to.equal( + finalHeadVersionAfterRedisPush + ) + expect(latestChunk.getChanges().length).to.equal( + persistedChunkEndVersion + redisChangesToPush.length + ) + + const persistedVersionInRedisAfter = ( + await redisBackend.getState(projectId) + ).persistedVersion + expect(persistedVersionInRedisAfter).to.equal( + finalHeadVersionAfterRedisPush + ) + + // Check that chunk returned by persistBuffer matches the latest chunk + expect(persistResult.currentChunk).to.deep.equal(latestChunk) + + const nonPersisted = await redisBackend.getNonPersistedChanges( + projectId, + finalHeadVersionAfterRedisPush + ) + expect(nonPersisted).to.be.an('array').that.is.empty + }) + }) + + describe('when Redis has no new changes', function () { + let persistedChunkEndVersion + let changesForChunk1 + + beforeEach(async function () { + const initialContent = 'Content.' + + const addInitialFileChange = new Change( + [new AddFileOperation('main.tex', File.fromString(initialContent))], + new Date(), + [] + ) + + // Replace chunkStore.create with persistChanges + // clientEndVersion is initialVersion (0). This advances version to 1. + await persistChanges( + projectId, + [addInitialFileChange], + limitsToPersistImmediately, + initialVersion + ) + const versionAfterInitialSetup = initialVersion + 1 // Now version is 1 + + const opForChunk1 = new TextOperation() + .retain(initialContent.length) + .insert(' More.') + changesForChunk1 = [ + new Change( + [new EditFileOperation('main.tex', opForChunk1)], + new Date(), + [] + ), + ] + // Corrected persistChanges call: clientEndVersion is versionAfterInitialSetup (1) + await persistChanges( + projectId, + changesForChunk1, + limitsToPersistImmediately, // Original limits for this step + versionAfterInitialSetup // Correct clientEndVersion + ) + // Update persistedChunkEndVersion: 1 (from setup) + 1 (from changesForChunk1) = 2 + persistedChunkEndVersion = + versionAfterInitialSetup + changesForChunk1.length + }) + + it('should leave the persisted version and stored chunks unchanged', async function () { + const now = Date.now() + await redisBackend.queueChanges( + projectId, + new Snapshot(), + persistedChunkEndVersion - 1, + changesForChunk1, + { + persistTime: now + redisBackend.MAX_PERSIST_DELAY_MS, + expireTime: now + redisBackend.PROJECT_TTL_MS, + } + ) + await redisBackend.setPersistedVersion( + projectId, + persistedChunkEndVersion + ) + + const chunksBefore = await chunkStore.getProjectChunks(projectId) + + const persistResult = await persistBuffer( + projectId, + limitsToPersistImmediately + ) + + const currentChunk = await chunkStore.loadLatest(projectId, { + persistedOnly: true, + }) + expect(persistResult).to.deep.equal({ + numberOfChangesPersisted: 0, + originalEndVersion: persistedChunkEndVersion, + currentChunk, + }) + + const chunksAfter = await chunkStore.getProjectChunks(projectId) + expect(chunksAfter.length).to.equal(chunksBefore.length) + expect(chunksAfter).to.deep.equal(chunksBefore) + + const finalPersistedVersionInRedis = ( + await redisBackend.getState(projectId) + ).persistedVersion + expect(finalPersistedVersionInRedis).to.equal(persistedChunkEndVersion) + }) + + it('should update the persisted version if it is behind the chunk store end version', async function () { + const now = Date.now() + + await redisBackend.queueChanges( + projectId, + new Snapshot(), + persistedChunkEndVersion - 1, + changesForChunk1, + { + persistTime: now + redisBackend.MAX_PERSIST_DELAY_MS, + expireTime: now + redisBackend.PROJECT_TTL_MS, + } + ) + // Force the persisted version in Redis to lag behind the chunk store, + // simulating the situation where a worker has persisted changes to the + // chunk store but failed to update the version in redis. + await redisBackend.setPersistedVersion( + projectId, + persistedChunkEndVersion - 1 + ) + + const chunksBefore = await chunkStore.getProjectChunks(projectId) + + // Persist buffer (which should do nothing as there are no new changes) + const persistResult = await persistBuffer( + projectId, + limitsToPersistImmediately + ) + + // Check the return value + const currentChunk = await chunkStore.loadLatest(projectId, { + persistedOnly: true, + }) + expect(persistResult).to.deep.equal({ + numberOfChangesPersisted: 0, + originalEndVersion: persistedChunkEndVersion, + currentChunk, + }) + + const chunksAfter = await chunkStore.getProjectChunks(projectId) + expect(chunksAfter.length).to.equal(chunksBefore.length) + expect(chunksAfter).to.deep.equal(chunksBefore) + + const finalPersistedVersionInRedis = ( + await redisBackend.getState(projectId) + ).persistedVersion + expect(finalPersistedVersionInRedis).to.equal(persistedChunkEndVersion) + }) + }) + + describe('when limits restrict the number of changes to persist', function () { + it('should persist only a subset of changes and update persistedVersion accordingly', async function () { + const now = Date.now() + const oneDayAgo = now - 1000 * 60 * 60 * 24 + const oneHourAgo = now - 1000 * 60 * 60 + const twoHoursAgo = now - 1000 * 60 * 60 * 2 + const threeHoursAgo = now - 1000 * 60 * 60 * 3 + + // Create an initial file with some content + const initialContent = 'Initial content.' + const addInitialFileChange = new Change( + [new AddFileOperation('main.tex', File.fromString(initialContent))], + new Date(oneDayAgo), + [] + ) + + await persistChanges( + projectId, + [addInitialFileChange], + limitsToPersistImmediately, + initialVersion + ) + const versionAfterInitialSetup = initialVersion + 1 // Version is 1 + + // Queue three additional changes in Redis + const op1 = new TextOperation() + .retain(initialContent.length) + .insert(' Change 1.') + const change1 = new Change( + [new EditFileOperation('main.tex', op1)], + new Date(threeHoursAgo) + ) + const contentAfterC1 = initialContent + ' Change 1.' + + const op2 = new TextOperation() + .retain(contentAfterC1.length) + .insert(' Change 2.') + const change2 = new Change( + [new EditFileOperation('main.tex', op2)], + new Date(twoHoursAgo) + ) + const contentAfterC2 = contentAfterC1 + ' Change 2.' + + const op3 = new TextOperation() + .retain(contentAfterC2.length) + .insert(' Change 3.') + const change3 = new Change( + [new EditFileOperation('main.tex', op3)], + new Date(oneHourAgo) + ) + + const changesToQueue = [change1, change2, change3] + await redisBackend.queueChanges( + projectId, + new Snapshot(), // dummy snapshot + versionAfterInitialSetup, // startVersion for queued changes + changesToQueue, + { + persistTime: now + redisBackend.MAX_PERSIST_DELAY_MS, + expireTime: now + redisBackend.PROJECT_TTL_MS, + } + ) + await redisBackend.setPersistedVersion( + projectId, + versionAfterInitialSetup + ) + + // Define limits to only persist 2 additional changes (on top of the initial file creation), + // which should leave the final change (change3) in the redis buffer. + const restrictiveLimits = { + minChangeTimestamp: new Date(oneHourAgo), // only changes more than 1 hour old are considered + maxChangeTimestamp: new Date(twoHoursAgo), // they will be persisted if any change is older than 2 hours + } + + const persistResult = await persistBuffer(projectId, restrictiveLimits) + + // Check the return value of persistBuffer + expect(persistResult).to.exist + expect(persistResult).to.have.property('numberOfChangesPersisted') + expect(persistResult).to.have.property('originalEndVersion') + expect(persistResult).to.have.property('currentChunk') + expect(persistResult).to.have.property('resyncNeeded') + expect(persistResult.numberOfChangesPersisted).to.equal(2) // change1 + change2 + expect(persistResult.originalEndVersion).to.equal( + versionAfterInitialSetup + ) + expect(persistResult.resyncNeeded).to.be.false + + // Check the latest persisted chunk, it should only have the initial file and the first two changes + const latestChunk = await chunkStore.loadLatest(projectId, { + persistedOnly: true, + }) + expect(latestChunk).to.exist + expect(latestChunk.getChanges().length).to.equal(3) // addInitialFileChange + change1 + change2 + expect(latestChunk.getStartVersion()).to.equal(initialVersion) + const expectedEndVersion = versionAfterInitialSetup + 2 // Persisted two changes from the queue + expect(latestChunk.getEndVersion()).to.equal(expectedEndVersion) + + // Check that chunk returned by persistBuffer matches the latest chunk + expect(persistResult.currentChunk).to.deep.equal(latestChunk) + + // Check persisted version in Redis + const state = await redisBackend.getState(projectId) + expect(state.persistedVersion).to.equal(expectedEndVersion) + + // Check non-persisted changes in Redis + const nonPersisted = await redisBackend.getNonPersistedChanges( + projectId, + expectedEndVersion + ) + expect(nonPersisted).to.be.an('array').with.lengthOf(1) // change3 should remain + expect(nonPersisted).to.deep.equal([change3]) + }) + }) +}) diff --git a/services/history-v1/test/acceptance/js/storage/persist_redis_chunks.test.js b/services/history-v1/test/acceptance/js/storage/persist_redis_chunks.test.js new file mode 100644 index 0000000000..58261703bb --- /dev/null +++ b/services/history-v1/test/acceptance/js/storage/persist_redis_chunks.test.js @@ -0,0 +1,262 @@ +'use strict' + +const { expect } = require('chai') +const { + Change, + AddFileOperation, + EditFileOperation, + TextOperation, + File, +} = require('overleaf-editor-core') +const cleanup = require('./support/cleanup') +const fixtures = require('./support/fixtures') +const chunkStore = require('../../../../storage/lib/chunk_store') +const { getState } = require('../../../../storage/lib/chunk_store/redis') +const { setupProjectState } = require('./support/redis') +const { runScript } = require('./support/runscript') +const persistChanges = require('../../../../storage/lib/persist_changes') + +const SCRIPT_PATH = 'storage/scripts/persist_redis_chunks.mjs' + +describe('persist_redis_chunks script', function () { + before(cleanup.everything) + + let now, past, future + let projectIdsStore // To store the generated project IDs, keyed by scenario name + let limitsToPersistImmediately + + before(async function () { + const farFuture = new Date() + farFuture.setTime(farFuture.getTime() + 7 * 24 * 3600 * 1000) + limitsToPersistImmediately = { + minChangeTimestamp: farFuture, + maxChangeTimestamp: farFuture, + maxChunkChanges: 100, // Allow enough changes for setup + } + + await fixtures.create() + + now = Date.now() + past = now - 10000 // 10 seconds ago + future = now + 60000 // 1 minute in the future + + projectIdsStore = {} + + // Scenario 1: project_due_for_persistence + // Goal: Has initial persisted content (v1), Redis has new changes (v1->v2) due for persistence. + // Expected: Script persists Redis changes, persistedVersion becomes 2. + { + const dueProjectId = await chunkStore.initializeProject() + projectIdsStore.project_due_for_persistence = dueProjectId + const initialContent = 'Initial content for due project.' + const initialChange = new Change( + [new AddFileOperation('main.tex', File.fromString(initialContent))], + new Date(now - 30000), // 30 seconds ago + [] + ) + await persistChanges( + dueProjectId, + [initialChange], + limitsToPersistImmediately, + 0 + ) + const secondChangeDue = new Change( + [ + new EditFileOperation( + 'main.tex', + new TextOperation() + .retain(initialContent.length) + .insert(' More content.') + ), + ], + new Date(now - 20000), // 20 seconds ago + [] + ) + await setupProjectState(dueProjectId, { + persistTime: past, + headVersion: 2, // After secondChangeDue + persistedVersion: 1, // Initial content is at v1 + changes: [secondChangeDue], // New changes in Redis (v1->v2) + expireTimeFuture: true, + }) + } + + // Scenario 2: project_not_due_for_persistence + // Goal: Has initial persisted content (v1), Redis has no new changes, not due. + // Expected: Script does nothing, persistedVersion remains 1. + { + const notDueProjectId = await chunkStore.initializeProject() + projectIdsStore.project_not_due_for_persistence = notDueProjectId + const initialContent = 'Initial content for not_due project.' + const initialChange = new Change( + [new AddFileOperation('main.tex', File.fromString(initialContent))], + new Date(now - 30000), // 30 seconds ago + [] + ) + await persistChanges( + notDueProjectId, + [initialChange], + limitsToPersistImmediately, + 0 + ) // Persisted: v0 -> v1 + await setupProjectState(notDueProjectId, { + persistTime: future, + headVersion: 1, // Matches persisted version + persistedVersion: 1, + changes: [], // No new changes in Redis + expireTimeFuture: true, + }) + } + + // Scenario 3: project_no_persist_time + // Goal: Has initial persisted content (v1), Redis has no new changes, no persistTime. + // Expected: Script does nothing, persistedVersion remains 1. + { + const noPersistTimeProjectId = await chunkStore.initializeProject() + projectIdsStore.project_no_persist_time = noPersistTimeProjectId + const initialContent = 'Initial content for no_persist_time project.' + const initialChange = new Change( + [new AddFileOperation('main.tex', File.fromString(initialContent))], + new Date(now - 30000), // 30 seconds ago + [] + ) + await persistChanges( + noPersistTimeProjectId, + [initialChange], + limitsToPersistImmediately, + 0 + ) // Persisted: v0 -> v1 + await setupProjectState(noPersistTimeProjectId, { + persistTime: null, + headVersion: 1, // Matches persisted version + persistedVersion: 1, + changes: [], // No new changes in Redis + expireTimeFuture: true, + }) + } + + // Scenario 4: project_due_fully_persisted + // Goal: Has content persisted up to v2, Redis reflects this (head=2, persisted=2), due for check. + // Expected: Script clears persistTime, persistedVersion remains 2. + { + const dueFullyPersistedId = await chunkStore.initializeProject() + projectIdsStore.project_due_fully_persisted = dueFullyPersistedId + const initialContent = 'Content part 1 for fully persisted.' + const change1 = new Change( + [new AddFileOperation('main.tex', File.fromString(initialContent))], + new Date(now - 40000), // 40 seconds ago + [] + ) + const change2 = new Change( + [ + new EditFileOperation( + 'main.tex', + new TextOperation() + .retain(initialContent.length) + .insert(' Content part 2.') + ), + ], + new Date(now - 30000), // 30 seconds ago + [] + ) + await persistChanges( + dueFullyPersistedId, + [change1, change2], + limitsToPersistImmediately, + 0 + ) + await setupProjectState(dueFullyPersistedId, { + persistTime: past, + headVersion: 2, + persistedVersion: 2, + changes: [], // No new unpersisted changes in Redis + expireTimeFuture: true, + }) + } + + // Scenario 5: project_fails_to_persist + // Goal: Has initial persisted content (v1), Redis has new changes (v1->v2) due for persistence, but these changes will cause an error. + // Expected: Script attempts to persist, fails, and persistTime is NOT cleared. + { + const failsToPersistProjectId = await chunkStore.initializeProject() + projectIdsStore.project_fails_to_persist = failsToPersistProjectId + const initialContent = 'Initial content for failure case.' + const initialChange = new Change( + [new AddFileOperation('main.tex', File.fromString(initialContent))], + new Date(now - 30000), // 30 seconds ago + [] + ) + await persistChanges( + failsToPersistProjectId, + [initialChange], + limitsToPersistImmediately, + 0 + ) + // This change will fail because it tries to insert at a non-existent offset + // assuming the initial content is shorter than 1000 characters. + const conflictingChange = new Change( + [ + new EditFileOperation( + 'main.tex', + new TextOperation().retain(1000).insert('This will fail.') + ), + ], + new Date(now - 20000), // 20 seconds ago + [] + ) + await setupProjectState(failsToPersistProjectId, { + persistTime: past, // Due for persistence + headVersion: 2, // After conflictingChange + persistedVersion: 1, // Initial content is at v1 + changes: [conflictingChange], // New changes in Redis (v1->v2) + expireTimeFuture: true, + }) + } + + await runScript(SCRIPT_PATH) + }) + + describe('when the buffer has new changes', function () { + it('should update persisted-version when the persist-time is in the past', async function () { + const projectId = projectIdsStore.project_due_for_persistence + const state = await getState(projectId) + // console.log('State after running script (project_due_for_persistence):', state) + expect(state.persistTime).to.be.null + expect(state.persistedVersion).to.equal(2) + }) + + it('should not perform any operations when the persist-time is in the future', async function () { + const projectId = projectIdsStore.project_not_due_for_persistence + const state = await getState(projectId) + expect(state.persistTime).to.equal(future) + expect(state.persistedVersion).to.equal(1) + }) + }) + + describe('when the changes in the buffer are already persisted', function () { + it('should delete persist-time for a project when the persist-time is in the past', async function () { + const projectId = projectIdsStore.project_due_fully_persisted + const state = await getState(projectId) + expect(state.persistTime).to.be.null + expect(state.persistedVersion).to.equal(2) + }) + }) + + describe('when there is no persist-time set', function () { + it('should not change redis when there is no persist-time set initially', async function () { + const projectId = projectIdsStore.project_no_persist_time + const state = await getState(projectId) + expect(state.persistTime).to.be.null + expect(state.persistedVersion).to.equal(1) + }) + }) + + describe('when persistence fails due to conflicting changes', function () { + it('should not clear persist-time and not update persisted-version', async function () { + const projectId = projectIdsStore.project_fails_to_persist + const state = await getState(projectId) + expect(state.persistTime).to.be.greaterThan(now) // persistTime should be pushed to the future by RETRY_DELAY_MS + expect(state.persistedVersion).to.equal(1) // persistedVersion should not change + }) + }) +}) diff --git a/services/history-v1/test/acceptance/js/storage/queue_changes.test.js b/services/history-v1/test/acceptance/js/storage/queue_changes.test.js new file mode 100644 index 0000000000..dbfe8c7e56 --- /dev/null +++ b/services/history-v1/test/acceptance/js/storage/queue_changes.test.js @@ -0,0 +1,416 @@ +'use strict' + +const { expect } = require('chai') +const sinon = require('sinon') + +const cleanup = require('./support/cleanup') +const fixtures = require('./support/fixtures') +const testFiles = require('./support/test_files.js') +const storage = require('../../../../storage') +const chunkStore = storage.chunkStore +const queueChanges = storage.queueChanges +const redisBackend = require('../../../../storage/lib/chunk_store/redis') + +const core = require('overleaf-editor-core') +const AddFileOperation = core.AddFileOperation +const EditFileOperation = core.EditFileOperation +const TextOperation = core.TextOperation +const Change = core.Change +const Chunk = core.Chunk +const File = core.File +const Snapshot = core.Snapshot +const BlobStore = storage.BlobStore +const persistChanges = storage.persistChanges + +describe('queueChanges', function () { + let limitsToPersistImmediately + before(function () { + // Used to provide a limit which forces us to persist all of the changes + const farFuture = new Date() + farFuture.setTime(farFuture.getTime() + 7 * 24 * 3600 * 1000) + limitsToPersistImmediately = { + minChangeTimestamp: farFuture, + maxChangeTimestamp: farFuture, + maxChanges: 10, + maxChunkChanges: 10, + } + }) + + beforeEach(cleanup.everything) + beforeEach(fixtures.create) + afterEach(function () { + sinon.restore() + }) + + it('queues changes when redis has no snapshot (falls back to chunkStore with an empty chunk)', async function () { + // Start with an empty chunk store for the project + const projectId = fixtures.docs.uninitializedProject.id + await chunkStore.initializeProject(projectId) + + // Ensure that the initial state in redis is empty + const initialRedisState = await redisBackend.getState(projectId) + expect(initialRedisState.headVersion).to.be.null + expect(initialRedisState.headSnapshot).to.be.null + expect(initialRedisState.changes).to.be.an('array').that.is.empty + + // Add a test file to the blob store + const blobStore = new BlobStore(projectId) + await blobStore.putFile(testFiles.path('hello.txt')) + + // Prepare an initial change to add a single file to an empty project + const change = new Change( + [ + new AddFileOperation( + 'test.tex', + File.fromHash(testFiles.HELLO_TXT_HASH) + ), + ], + new Date(), + [] + ) + const changesToQueue = [change] + const endVersion = 0 + + // Queue the changes to add the test file + const status = await queueChanges(projectId, changesToQueue, endVersion) + expect(status).to.equal('ok') + + // Verify that we now have some state in redis + const redisState = await redisBackend.getState(projectId) + expect(redisState).to.not.be.null + + // Compute the expected snapshot after applying the changes + const expectedSnapshot = new Snapshot() + await expectedSnapshot.loadFiles('hollow', blobStore) + for (const change of changesToQueue) { + const hollowChange = change.clone() + await hollowChange.loadFiles('hollow', blobStore) + hollowChange.applyTo(expectedSnapshot, { strict: true }) + } + + // Confirm that state in redis matches the expected snapshot and changes queue + const expectedVersionInRedis = endVersion + changesToQueue.length + expect(redisState.headVersion).to.equal(expectedVersionInRedis) + expect(redisState.headSnapshot).to.deep.equal(expectedSnapshot.toRaw()) + expect(redisState.changes).to.deep.equal(changesToQueue.map(c => c.toRaw())) + }) + + it('queues changes when redis has no snapshot (falls back to chunkStore with an existing chunk)', async function () { + const projectId = fixtures.docs.uninitializedProject.id + + // Initialise the project in the chunk store using the "Hello World" test file + await chunkStore.initializeProject(projectId) + const blobStore = new BlobStore(projectId) + await blobStore.putFile(testFiles.path('hello.txt')) + const change = new Change( + [ + new AddFileOperation( + 'hello.tex', + File.fromHash(testFiles.HELLO_TXT_HASH) + ), + ], + new Date(), + [] + ) + const initialChanges = [change] + const initialVersion = 0 + + const result = await persistChanges( + projectId, + initialChanges, + limitsToPersistImmediately, + initialVersion + ) + // Compute the state after the initial changes are persisted for later comparison + const endVersion = initialVersion + initialChanges.length + const { currentChunk } = result + const originalSnapshot = result.currentChunk.getSnapshot() + await originalSnapshot.loadFiles('hollow', blobStore) + originalSnapshot.applyAll(currentChunk.getChanges()) + + // Ensure that the initial state in redis is empty + const initialRedisState = await redisBackend.getState(projectId) + expect(initialRedisState.headVersion).to.be.null + expect(initialRedisState.headSnapshot).to.be.null + expect(initialRedisState.changes).to.be.an('array').that.is.empty + + // Prepare a change to edit the existing file + const editFileOp = new EditFileOperation( + 'hello.tex', + new TextOperation() + .insert('Hello') + .retain(testFiles.HELLO_TXT_UTF8_LENGTH) + ) + const editFileChange = new Change([editFileOp], new Date(), []) + const changesToQueue = [editFileChange] + + // Queue the changes to edit the existing file + const status = await queueChanges(projectId, changesToQueue, endVersion) + expect(status).to.equal('ok') + + // Verify that we now have some state in redis + const redisState = await redisBackend.getState(projectId) + expect(redisState).to.not.be.null + + // Compute the expected snapshot after applying the changes + const expectedSnapshot = originalSnapshot.clone() + await expectedSnapshot.loadFiles('hollow', blobStore) + expectedSnapshot.applyAll(changesToQueue) + + // Confirm that state in redis matches the expected snapshot and changes queue + const expectedVersionInRedis = endVersion + changesToQueue.length + expect(redisState.headVersion).to.equal(expectedVersionInRedis) + expect(redisState.headSnapshot).to.deep.equal(expectedSnapshot.toRaw()) + expect(redisState.changes).to.deep.equal(changesToQueue.map(c => c.toRaw())) + }) + + it('queues changes when redis has a snapshot with existing changes', async function () { + const projectId = fixtures.docs.uninitializedProject.id + + // Initialise the project in redis using the "Hello World" test file + await chunkStore.initializeProject(projectId) + const blobStore = new BlobStore(projectId) + await blobStore.putFile(testFiles.path('hello.txt')) + const initialChangeOp = new AddFileOperation( + 'existing.tex', + File.fromHash(testFiles.HELLO_TXT_HASH) + ) + const initialChange = new Change([initialChangeOp], new Date(), []) + const initialChangesToQueue = [initialChange] + const versionBeforeInitialQueue = 0 + + // Queue the initial changes + const status = await queueChanges( + projectId, + initialChangesToQueue, + versionBeforeInitialQueue + ) + // Confirm that the initial changes were queued successfully + expect(status).to.equal('ok') + const versionAfterInitialQueue = + versionBeforeInitialQueue + initialChangesToQueue.length + + // Compute the snapshot after the initial changes for later use + const initialSnapshot = new Snapshot() + await initialSnapshot.loadFiles('hollow', blobStore) + for (const change of initialChangesToQueue) { + const hollowChange = change.clone() + await hollowChange.loadFiles('hollow', blobStore) + hollowChange.applyTo(initialSnapshot, { strict: true }) + } + + // Now prepare some subsequent changes for the queue + await blobStore.putFile(testFiles.path('graph.png')) + const addFileOp = new AddFileOperation( + 'graph.png', + File.fromHash(testFiles.GRAPH_PNG_HASH) + ) + const addFileChange = new Change([addFileOp], new Date(), []) + const editFileOp = new EditFileOperation( + 'existing.tex', + new TextOperation() + .insert('Hello') + .retain(testFiles.HELLO_TXT_UTF8_LENGTH) + ) + const editFileChange = new Change([editFileOp], new Date(), []) + + const subsequentChangesToQueue = [addFileChange, editFileChange] + const versionBeforeSubsequentQueue = versionAfterInitialQueue + + // Queue the subsequent changes + const subsequentStatus = await queueChanges( + projectId, + subsequentChangesToQueue, + versionBeforeSubsequentQueue + ) + expect(subsequentStatus).to.equal('ok') + + // Compute the expected snapshot after applying all changes + const expectedSnapshot = initialSnapshot.clone() + await expectedSnapshot.loadFiles('hollow', blobStore) + for (const change of subsequentChangesToQueue) { + const hollowChange = change.clone() + await hollowChange.loadFiles('hollow', blobStore) + hollowChange.applyTo(expectedSnapshot, { strict: true }) + } + + // Confirm that state in redis matches the expected snapshot and changes queue + const finalRedisState = await redisBackend.getState(projectId) + expect(finalRedisState).to.not.be.null + const expectedFinalVersion = + versionBeforeSubsequentQueue + subsequentChangesToQueue.length + expect(finalRedisState.headVersion).to.equal(expectedFinalVersion) + expect(finalRedisState.headSnapshot).to.deep.equal(expectedSnapshot.toRaw()) + const allQueuedChangesRaw = initialChangesToQueue + .concat(subsequentChangesToQueue) + .map(c => c.toRaw()) + expect(finalRedisState.changes).to.deep.equal(allQueuedChangesRaw) + }) + + it('skips queuing changes when there is no snapshot and the onlyIfExists flag is set', async function () { + // Start with an empty chunk store for the project + const projectId = fixtures.docs.uninitializedProject.id + await chunkStore.initializeProject(projectId) + + // Ensure that the initial state in redis is empty + const initialRedisState = await redisBackend.getState(projectId) + expect(initialRedisState.headVersion).to.be.null + expect(initialRedisState.headSnapshot).to.be.null + expect(initialRedisState.changes).to.be.an('array').that.is.empty + + // Add a test file to the blob store + const blobStore = new BlobStore(projectId) + await blobStore.putFile(testFiles.path('hello.txt')) + + // Prepare an initial change to add a single file to an empty project + const change = new Change( + [ + new AddFileOperation( + 'test.tex', + File.fromHash(testFiles.HELLO_TXT_HASH) + ), + ], + new Date(), + [] + ) + const changesToQueue = [change] + const endVersion = 0 + + // Queue the changes to add the test file + const status = await queueChanges(projectId, changesToQueue, endVersion, { + onlyIfExists: true, + }) + expect(status).to.equal('ignore') + + // Verify that the state in redis has not changed + const redisState = await redisBackend.getState(projectId) + expect(redisState).to.deep.equal(initialRedisState) + }) + + it('creates an initial hollow snapshot when redis has no snapshot (falls back to chunkStore with an empty chunk)', async function () { + // Start with an empty chunk store for the project + const projectId = fixtures.docs.uninitializedProject.id + await chunkStore.initializeProject(projectId) + const blobStore = new BlobStore(projectId) + await blobStore.putFile(testFiles.path('hello.txt')) + + // Prepare an initial change to add a single file to an empty project + const change = new Change( + [ + new AddFileOperation( + 'test.tex', + File.fromHash(testFiles.HELLO_TXT_HASH) + ), + ], + new Date(), + [] + ) + const changesToQueue = [change] + const endVersion = 0 + + // Queue the changes to add the test file + const status = await queueChanges(projectId, changesToQueue, endVersion) + expect(status).to.equal('ok') + + // Verify that we now have some state in redis + const redisState = await redisBackend.getState(projectId) + expect(redisState).to.not.be.null + expect(redisState.headSnapshot.files['test.tex']).to.deep.equal({ + stringLength: testFiles.HELLO_TXT_UTF8_LENGTH, + }) + }) + + it('throws ConflictingEndVersion if endVersion does not match current version (from chunkStore)', async function () { + const projectId = fixtures.docs.uninitializedProject.id + // Initialise an empty project in the chunk store + await chunkStore.initializeProject(projectId) + + // Ensure that the initial state in redis is empty + const initialRedisState = await redisBackend.getState(projectId) + expect(initialRedisState.headVersion).to.be.null + + // Prepare a change to add a file + const change = new Change( + [new AddFileOperation('test.tex', File.fromString(''))], + new Date(), + [] + ) + const changesToQueue = [change] + const incorrectEndVersion = 1 + + // Attempt to queue the changes with an incorrect endVersion (1 instead of 0) + await expect(queueChanges(projectId, changesToQueue, incorrectEndVersion)) + .to.be.rejectedWith(Chunk.ConflictingEndVersion) + .and.eventually.satisfies(err => { + expect(err.info).to.have.property( + 'clientEndVersion', + incorrectEndVersion + ) + expect(err.info).to.have.property('latestEndVersion', 0) + return true + }) + + // Verify that the state in redis has not changed + const redisStateAfterError = await redisBackend.getState(projectId) + expect(redisStateAfterError).to.deep.equal(initialRedisState) + }) + + it('throws ConflictingEndVersion if endVersion does not match current version (from redis snapshot)', async function () { + const projectId = fixtures.docs.uninitializedProject.id + + // Initialise the project in the redis with a test file + await chunkStore.initializeProject(projectId) + const initialChange = new Change( + [new AddFileOperation('initial.tex', File.fromString('content'))], + new Date(), + [] + ) + const initialChangesToQueue = [initialChange] + const versionBeforeInitialQueue = 0 + + // Queue the initial changes + await queueChanges( + projectId, + initialChangesToQueue, + versionBeforeInitialQueue + ) + const versionInRedisAfterSetup = + versionBeforeInitialQueue + initialChangesToQueue.length + + // Confirm that the initial changes were queued successfully + const initialRedisState = await redisBackend.getState(projectId) + expect(initialRedisState).to.not.be.null + expect(initialRedisState.headVersion).to.equal(versionInRedisAfterSetup) + + // Now prepare a subsequent change for the queue + const subsequentChange = new Change( + [new AddFileOperation('another.tex', File.fromString(''))], + new Date(), + [] + ) + const subsequentChangesToQueue = [subsequentChange] + const incorrectEndVersion = 0 + + // Attempt to queue the changes with an incorrect endVersion (0 instead of 1) + await expect( + queueChanges(projectId, subsequentChangesToQueue, incorrectEndVersion) + ) + .to.be.rejectedWith(Chunk.ConflictingEndVersion) + .and.eventually.satisfies(err => { + expect(err.info).to.have.property( + 'clientEndVersion', + incorrectEndVersion + ) + expect(err.info).to.have.property( + 'latestEndVersion', + versionInRedisAfterSetup + ) + return true + }) + + // Verify that the state in redis has not changed + const redisStateAfterError = await redisBackend.getState(projectId) + expect(redisStateAfterError).to.not.be.null + expect(redisStateAfterError).to.deep.equal(initialRedisState) + }) +}) diff --git a/services/history-v1/test/acceptance/js/storage/support/redis.js b/services/history-v1/test/acceptance/js/storage/support/redis.js new file mode 100644 index 0000000000..3f5b9cda27 --- /dev/null +++ b/services/history-v1/test/acceptance/js/storage/support/redis.js @@ -0,0 +1,75 @@ +'use strict' + +const { Snapshot } = require('overleaf-editor-core') +const redis = require('../../../../../storage/lib/redis') +const redisBackend = require('../../../../../storage/lib/chunk_store/redis') +const rclient = redis.rclientHistory +const keySchema = redisBackend.keySchema + +// Helper to set up a basic project state in Redis +async function setupProjectState( + projectId, + { + headVersion = 0, + persistedVersion = null, + expireTime = null, + persistTime = null, + changes = [], + expireTimeFuture = false, // Default to not setting future expire time unless specified + } +) { + const headSnapshot = new Snapshot() + await rclient.set( + keySchema.head({ projectId }), + JSON.stringify(headSnapshot.toRaw()) + ) + await rclient.set( + keySchema.headVersion({ projectId }), + headVersion.toString() + ) + + if (persistedVersion !== null) { + await rclient.set( + keySchema.persistedVersion({ projectId }), + persistedVersion.toString() + ) + } else { + await rclient.del(keySchema.persistedVersion({ projectId })) + } + + if (expireTime !== null) { + await rclient.set( + keySchema.expireTime({ projectId }), + expireTime.toString() + ) + } else { + // If expireTimeFuture is true, set it to a future time, otherwise delete it if null + if (expireTimeFuture) { + const futureExpireTime = Date.now() + 5 * 60 * 1000 // 5 minutes in the future + await rclient.set( + keySchema.expireTime({ projectId }), + futureExpireTime.toString() + ) + } else { + await rclient.del(keySchema.expireTime({ projectId })) + } + } + + if (persistTime !== null) { + await rclient.set( + keySchema.persistTime({ projectId }), + persistTime.toString() + ) + } else { + await rclient.del(keySchema.persistTime({ projectId })) + } + + if (changes.length > 0) { + const rawChanges = changes.map(c => JSON.stringify(c.toRaw())) + await rclient.rpush(keySchema.changes({ projectId }), ...rawChanges) + } else { + await rclient.del(keySchema.changes({ projectId })) + } +} + +module.exports = { setupProjectState, rclient, keySchema } diff --git a/services/history-v1/test/acceptance/js/storage/support/runscript.js b/services/history-v1/test/acceptance/js/storage/support/runscript.js new file mode 100644 index 0000000000..7ff8355566 --- /dev/null +++ b/services/history-v1/test/acceptance/js/storage/support/runscript.js @@ -0,0 +1,35 @@ +'use strict' + +const { promisify } = require('node:util') +const { execFile } = require('node:child_process') + +async function runScript(scriptPath, options = {}) { + const TIMEOUT = options.timeout || 10 * 1000 // 10 seconds default + let result + try { + result = await promisify(execFile)('node', [scriptPath], { + encoding: 'utf-8', + timeout: TIMEOUT, + env: { + ...process.env, + LOG_LEVEL: 'debug', // Override LOG_LEVEL for script output + }, + }) + result.status = 0 + } catch (err) { + const { stdout, stderr, code } = err + if (typeof code !== 'number') { + console.error(`Error running script ${scriptPath}:`, err) + throw err + } + result = { stdout, stderr, status: code } + } + // The script might exit with status 1 if it finds no keys to process, which is ok + if (result.status !== 0 && result.status !== 1) { + console.error(`Script ${scriptPath} failed:`, result.stderr) + throw new Error(`Script ${scriptPath} failed with status ${result.status}`) + } + return result +} + +module.exports = { runScript } diff --git a/services/notifications/docker-compose.ci.yml b/services/notifications/docker-compose.ci.yml index 8fd86c1fbb..24b57ab084 100644 --- a/services/notifications/docker-compose.ci.yml +++ b/services/notifications/docker-compose.ci.yml @@ -24,10 +24,13 @@ services: MOCHA_GREP: ${MOCHA_GREP} NODE_ENV: test NODE_OPTIONS: "--unhandled-rejections=strict" + volumes: + - ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it depends_on: mongo: condition: service_started user: node + entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 -- command: npm run test:acceptance diff --git a/services/notifications/docker-compose.yml b/services/notifications/docker-compose.yml index 090742ff6d..167e45fdb1 100644 --- a/services/notifications/docker-compose.yml +++ b/services/notifications/docker-compose.yml @@ -26,6 +26,7 @@ services: - .:/overleaf/services/notifications - ../../node_modules:/overleaf/node_modules - ../../libraries:/overleaf/libraries + - ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it working_dir: /overleaf/services/notifications environment: ELASTIC_SEARCH_DSN: es:9200 @@ -39,6 +40,7 @@ services: depends_on: mongo: condition: service_started + entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 -- command: npm run --silent test:acceptance mongo: diff --git a/services/project-history/app/js/HistoryStoreManager.js b/services/project-history/app/js/HistoryStoreManager.js index bb41dfb3c0..38658bdf5b 100644 --- a/services/project-history/app/js/HistoryStoreManager.js +++ b/services/project-history/app/js/HistoryStoreManager.js @@ -35,7 +35,10 @@ class StringStream extends stream.Readable { _mocks.getMostRecentChunk = (projectId, historyId, callback) => { const path = `projects/${historyId}/latest/history` logger.debug({ projectId, historyId }, 'getting chunk from history service') - _requestChunk({ path, json: true }, callback) + _requestChunk({ path, json: true }, (err, chunk) => { + if (err) return callback(OError.tag(err)) + callback(null, chunk) + }) } /** @@ -54,7 +57,10 @@ export function getChunkAtVersion(projectId, historyId, version, callback) { { projectId, historyId, version }, 'getting chunk from history service for version' ) - _requestChunk({ path, json: true }, callback) + _requestChunk({ path, json: true }, (err, chunk) => { + if (err) return callback(OError.tag(err)) + callback(null, chunk) + }) } export function getMostRecentVersion(projectId, historyId, callback) { @@ -68,8 +74,10 @@ export function getMostRecentVersion(projectId, historyId, callback) { _.sortBy(chunk.chunk.history.changes || [], x => x.timestamp) ) // find the latest project and doc versions in the chunk - _getLatestProjectVersion(projectId, chunk, (err1, projectVersion) => + _getLatestProjectVersion(projectId, chunk, (err1, projectVersion) => { + if (err1) err1 = OError.tag(err1) _getLatestV2DocVersions(projectId, chunk, (err2, v2DocVersions) => { + if (err2) err2 = OError.tag(err2) // return the project and doc versions const projectStructureAndDocVersions = { project: projectVersion, @@ -83,7 +91,7 @@ export function getMostRecentVersion(projectId, historyId, callback) { chunk ) }) - ) + }) }) } @@ -211,7 +219,10 @@ export function getProjectBlob(historyId, blobHash, callback) { logger.debug({ historyId, blobHash }, 'getting blob from history service') _requestHistoryService( { path: `projects/${historyId}/blobs/${blobHash}` }, - callback + (err, blob) => { + if (err) return callback(OError.tag(err)) + callback(null, blob) + } ) } @@ -277,7 +288,10 @@ function createBlobFromString(historyId, data, fileId, callback) { (fsPath, cb) => { _createBlob(historyId, fsPath, cb) }, - callback + (err, hash) => { + if (err) return callback(OError.tag(err)) + callback(null, hash) + } ) } @@ -330,7 +344,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) { try { ranges = HistoryBlobTranslator.createRangeBlobDataFromUpdate(update) } catch (error) { - return callback(error) + return callback(OError.tag(error)) } createBlobFromString( historyId, @@ -338,7 +352,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) { `project-${projectId}-doc-${update.doc}`, (err, fileHash) => { if (err) { - return callback(err) + return callback(OError.tag(err)) } if (ranges) { createBlobFromString( @@ -347,7 +361,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) { `project-${projectId}-doc-${update.doc}-ranges`, (err, rangesHash) => { if (err) { - return callback(err) + return callback(OError.tag(err)) } logger.debug( { fileHash, rangesHash }, @@ -415,7 +429,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) { }, (err, fileHash) => { if (err) { - return callback(err) + return callback(OError.tag(err)) } if (update.hash && update.hash !== fileHash) { logger.warn( @@ -447,7 +461,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) { }, (err, fileHash) => { if (err) { - return callback(err) + return callback(OError.tag(err)) } logger.debug({ fileHash }, 'created empty blob for file') callback(null, { file: fileHash }) @@ -520,7 +534,10 @@ export function initializeProject(historyId, callback) { export function deleteProject(projectId, callback) { _requestHistoryService( { method: 'DELETE', path: `projects/${projectId}` }, - callback + err => { + if (err) return callback(OError.tag(err)) + callback(null) + } ) } diff --git a/services/project-history/app/js/SyncManager.js b/services/project-history/app/js/SyncManager.js index ef8caf69eb..43cb61be9f 100644 --- a/services/project-history/app/js/SyncManager.js +++ b/services/project-history/app/js/SyncManager.js @@ -23,6 +23,7 @@ import { isInsert, isDelete } from './Utils.js' /** * @import { Comment as HistoryComment, TrackedChange as HistoryTrackedChange } from 'overleaf-editor-core' + * @import { CommentRawData, TrackedChangeRawData } from 'overleaf-editor-core/lib/types' * @import { Comment, Entity, ResyncDocContentUpdate, RetainOp, TrackedChange } from './types' * @import { TrackedChangeTransition, TrackingDirective, TrackingType, Update } from './types' * @import { ProjectStructureUpdate } from './types' @@ -764,11 +765,19 @@ class SyncUpdateExpander { } const persistedComments = file.getComments().toArray() - await this.queueUpdatesForOutOfSyncComments( - update, - pathname, - persistedComments - ) + if (update.resyncDocContent.historyOTRanges) { + this.queueUpdatesForOutOfSyncCommentsHistoryOT( + update, + pathname, + file.getComments().toRaw() + ) + } else { + await this.queueUpdatesForOutOfSyncComments( + update, + pathname, + persistedComments + ) + } const persistedChanges = file.getTrackedChanges().asSorted() await this.queueUpdatesForOutOfSyncTrackedChanges( @@ -825,6 +834,91 @@ class SyncUpdateExpander { return expandedUpdate } + /** + * Queue updates for out of sync comments + * + * @param {ResyncDocContentUpdate} update + * @param {string} pathname + * @param {CommentRawData[]} persistedComments + */ + queueUpdatesForOutOfSyncCommentsHistoryOT( + update, + pathname, + persistedComments + ) { + const expectedComments = + update.resyncDocContent.historyOTRanges?.comments ?? [] + const expectedCommentsById = new Map( + expectedComments.map(comment => [comment.id, comment]) + ) + const persistedCommentsById = new Map( + persistedComments.map(comment => [comment.id, comment]) + ) + + // Delete any persisted comment that is not in the expected comment list. + for (const persistedComment of persistedComments) { + if (!expectedCommentsById.has(persistedComment.id)) { + this.expandedUpdates.push({ + doc: update.doc, + op: [{ deleteComment: persistedComment.id }], + meta: { + pathname, + resync: true, + origin: this.origin, + ts: update.meta.ts, + }, + }) + } + } + + for (const expectedComment of expectedComments) { + const persistedComment = persistedCommentsById.get(expectedComment.id) + if ( + persistedComment && + commentRangesAreInSyncHistoryOT(persistedComment, expectedComment) + ) { + if (expectedComment.resolved === persistedComment.resolved) { + // Both comments are identical; do nothing + } else { + // Only the resolved state differs + this.expandedUpdates.push({ + doc: update.doc, + op: [ + { + commentId: expectedComment.id, + resolved: expectedComment.resolved, + }, + ], + meta: { + pathname, + resync: true, + origin: this.origin, + ts: update.meta.ts, + }, + }) + } + } else { + // New comment or ranges differ + this.expandedUpdates.push({ + doc: update.doc, + op: [ + { + commentId: expectedComment.id, + ranges: expectedComment.ranges, + resolved: expectedComment.resolved, + }, + ], + meta: { + pathname, + resync: true, + origin: this.origin, + ts: update.meta.ts, + }, + }) + } + } + } + /** * Queue updates for out of sync comments * @@ -951,6 +1045,7 @@ class SyncUpdateExpander { for (const transition of getTrackedChangesTransitions( persistedChanges, expectedChanges, + update.resyncDocContent.historyOTRanges?.trackedChanges || [], expectedContent.length )) { if (transition.pos > cursor) { @@ -1018,6 +1113,25 @@ class SyncUpdateExpander { } } +/** + * Compares the ranges in the persisted and expected comments + * + * @param {CommentRawData} persistedComment + * @param {CommentRawData} expectedComment + */ +function commentRangesAreInSyncHistoryOT(persistedComment, expectedComment) { + if (persistedComment.ranges.length !== expectedComment.ranges.length) { + return false + } + for (let i = 0; i < persistedComment.ranges.length; i++) { + const persistedRange = persistedComment.ranges[i] + const expectedRange = expectedComment.ranges[i] + if (persistedRange.pos !== expectedRange.pos) return false + if (persistedRange.length !== expectedRange.length) return false + } + return true +} + /** * Compares the ranges in the persisted and expected comments * @@ -1049,11 +1163,13 @@ function commentRangesAreInSync(persistedComment, expectedComment) { * * @param {readonly HistoryTrackedChange[]} persistedChanges * @param {TrackedChange[]} expectedChanges + * @param {TrackedChangeRawData[]} persistedChangesHistoryOT * @param {number} docLength */ function getTrackedChangesTransitions( persistedChanges, expectedChanges, + persistedChangesHistoryOT, docLength ) { /** @type {TrackedChangeTransition[]} */ @@ -1076,6 +1192,19 @@ function getTrackedChangesTransitions( }) } + for (const change of persistedChangesHistoryOT) { + transitions.push({ + stage: 'expected', + pos: change.range.pos, + tracking: change.tracking, + }) + transitions.push({ + stage: 'expected', + pos: change.range.pos + change.range.length, + tracking: { type: 'none' }, + }) + } + for (const change of expectedChanges) { const op = change.op const pos = op.hpos ?? op.p diff --git a/services/project-history/app/js/UpdateCompressor.js b/services/project-history/app/js/UpdateCompressor.js index 471fc791ab..5ae7591a7f 100644 --- a/services/project-history/app/js/UpdateCompressor.js +++ b/services/project-history/app/js/UpdateCompressor.js @@ -1,8 +1,15 @@ // @ts-check +import Metrics from '@overleaf/metrics' import OError from '@overleaf/o-error' import DMP from 'diff-match-patch' import { EditOperationBuilder } from 'overleaf-editor-core' +import zlib from 'node:zlib' +import { ReadableString, WritableBuffer } from '@overleaf/stream-utils' +import Stream from 'node:stream' +import logger from '@overleaf/logger' +import { callbackify } from '@overleaf/promise-utils' +import Settings from '@overleaf/settings' /** * @import { DeleteOp, InsertOp, Op, Update } from './types' @@ -162,7 +169,9 @@ export function concatUpdatesWithSameVersion(updates) { lastUpdate.op != null && lastUpdate.v === update.v && lastUpdate.doc === update.doc && - lastUpdate.pathname === update.pathname + lastUpdate.pathname === update.pathname && + EditOperationBuilder.isValid(update.op[0]) === + EditOperationBuilder.isValid(lastUpdate.op[0]) ) { lastUpdate.op = lastUpdate.op.concat(update.op) if (update.meta.doc_hash == null) { @@ -180,6 +189,66 @@ export function concatUpdatesWithSameVersion(updates) { return concattedUpdates } +async function estimateStorage(updates) { + const blob = JSON.stringify(updates) + const bytes = Buffer.from(blob).byteLength + const read = new ReadableString(blob) + const compress = zlib.createGzip() + const write = new WritableBuffer() + await Stream.promises.pipeline(read, compress, write) + const bytesGz = write.size() + return { bytes, bytesGz, nUpdates: updates.length } +} + +/** + * @param {Update[]} rawUpdates + * @param {string} projectId + * @param {import("./Profiler").Profiler} profile + * @return {Promise} + */ +async function compressRawUpdatesWithMetrics(rawUpdates, projectId, profile) { + if (100 * Math.random() > Settings.estimateCompressionSample) { + return compressRawUpdatesWithProfile(rawUpdates, projectId, profile) + } + const before = await estimateStorage(rawUpdates) + profile.log('estimateRawUpdatesSize') + const updates = compressRawUpdatesWithProfile(rawUpdates, projectId, profile) + const after = await estimateStorage(updates) + for (const [path, values] of Object.entries({ before, after })) { + for (const [method, v] of Object.entries(values)) { + Metrics.summary('updates_compression_estimate', v, { path, method }) + } + } + for (const method of Object.keys(before)) { + const percentage = Math.ceil(100 * (after[method] / before[method])) + Metrics.summary('updates_compression_percentage', percentage, { method }) + } + profile.log('estimateCompressedUpdatesSize') + return updates +} + +export const compressRawUpdatesWithMetricsCb = callbackify( + compressRawUpdatesWithMetrics +) + +/** + * @param {Update[]} rawUpdates + * @param {string} projectId + * @param {import("./Profiler").Profiler} profile + * @return {Update[]} + */ +function compressRawUpdatesWithProfile(rawUpdates, projectId, profile) { + const updates = compressRawUpdates(rawUpdates) + const timeTaken = profile.log('compressRawUpdates').getTimeDelta() + if (timeTaken >= 1000) { + logger.debug( + { projectId, updates: rawUpdates, timeTaken }, + 'slow compression of raw updates' + ) + } + return updates +} + export function compressRawUpdates(rawUpdates) { let updates = convertToSingleOpUpdates(rawUpdates) updates = compressUpdates(updates) diff --git a/services/project-history/app/js/UpdatesProcessor.js b/services/project-history/app/js/UpdatesProcessor.js index a76241d7ca..b4895c012d 100644 --- a/services/project-history/app/js/UpdatesProcessor.js +++ b/services/project-history/app/js/UpdatesProcessor.js @@ -546,7 +546,10 @@ export function _processUpdates( } if (filteredUpdates.length === 0) { // return early if there are no updates to apply - return SyncManager.setResyncState(projectId, newSyncState, callback) + return SyncManager.setResyncState(projectId, newSyncState, err => { + if (err) return callback(err) + callback(null, { resyncNeeded: false }) + }) } // only make request to history service if we have actual updates to process _getMostRecentVersionWithDebug( @@ -593,17 +596,17 @@ export function _processUpdates( return cb(err) } profile.log('skipAlreadyAppliedUpdates') - const compressedUpdates = - UpdateCompressor.compressRawUpdates(unappliedUpdates) - const timeTaken = profile - .log('compressRawUpdates') - .getTimeDelta() - if (timeTaken >= 1000) { - logger.debug( - { projectId, updates: unappliedUpdates, timeTaken }, - 'slow compression of raw updates' - ) - } + cb(null, unappliedUpdates) + }, + (unappliedUpdates, cb) => { + UpdateCompressor.compressRawUpdatesWithMetricsCb( + unappliedUpdates, + projectId, + profile, + cb + ) + }, + (compressedUpdates, cb) => { cb = profile.wrap('createBlobs', cb) BlobManager.createBlobsForUpdates( projectId, diff --git a/services/project-history/app/js/types.ts b/services/project-history/app/js/types.ts index 96701e587f..c11b7741e3 100644 --- a/services/project-history/app/js/types.ts +++ b/services/project-history/app/js/types.ts @@ -3,6 +3,8 @@ import { LinkedFileData, RawEditOperation, RawOrigin, + CommentRawData, + TrackedChangeRawData, } from 'overleaf-editor-core/lib/types' export type Update = @@ -118,6 +120,10 @@ export type ResyncDocContentUpdate = { content: string version: number ranges?: Ranges + historyOTRanges?: { + comments: CommentRawData[] + trackedChanges: TrackedChangeRawData[] + } resolvedCommentIds?: string[] } projectHistoryId: string diff --git a/services/project-history/config/settings.defaults.cjs b/services/project-history/config/settings.defaults.cjs index d259d070b9..d767cddd96 100644 --- a/services/project-history/config/settings.defaults.cjs +++ b/services/project-history/config/settings.defaults.cjs @@ -110,4 +110,8 @@ module.exports = { shortHistoryQueues: (process.env.SHORT_HISTORY_QUEUES || '') .split(',') .filter(s => !!s), + estimateCompressionSample: parseInt( + process.env.ESTIMATE_COMPRESSION_SAMPLE || '0', + 10 + ), } diff --git a/services/project-history/docker-compose.ci.yml b/services/project-history/docker-compose.ci.yml index 2fe97bd9b3..ca15f35fef 100644 --- a/services/project-history/docker-compose.ci.yml +++ b/services/project-history/docker-compose.ci.yml @@ -28,12 +28,15 @@ services: MOCHA_GREP: ${MOCHA_GREP} NODE_ENV: test NODE_OPTIONS: "--unhandled-rejections=strict" + volumes: + - ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it depends_on: mongo: condition: service_started redis: condition: service_healthy user: node + entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 -- command: npm run test:acceptance @@ -45,7 +48,7 @@ services: command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs . user: root redis: - image: redis + image: redis:7.4.3 healthcheck: test: ping="$$(redis-cli ping)" && [ "$$ping" = 'PONG' ] interval: 1s diff --git a/services/project-history/docker-compose.yml b/services/project-history/docker-compose.yml index 68360baf44..95a36b5fcb 100644 --- a/services/project-history/docker-compose.yml +++ b/services/project-history/docker-compose.yml @@ -26,6 +26,7 @@ services: - .:/overleaf/services/project-history - ../../node_modules:/overleaf/node_modules - ../../libraries:/overleaf/libraries + - ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it working_dir: /overleaf/services/project-history environment: ELASTIC_SEARCH_DSN: es:9200 @@ -45,10 +46,11 @@ services: condition: service_started redis: condition: service_healthy + entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 -- command: npm run --silent test:acceptance redis: - image: redis + image: redis:7.4.3 healthcheck: test: ping=$$(redis-cli ping) && [ "$$ping" = 'PONG' ] interval: 1s diff --git a/services/project-history/package.json b/services/project-history/package.json index 2a54a807d3..4160f36f6f 100644 --- a/services/project-history/package.json +++ b/services/project-history/package.json @@ -9,8 +9,8 @@ "test:unit": "npm run test:unit:_run -- --grep=$MOCHA_GREP", "start": "node app.js", "nodemon": "node --watch app.js", - "test:acceptance:_run": "LOG_LEVEL=fatal mocha --loader=esmock --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js", - "test:unit:_run": "LOG_LEVEL=fatal mocha --loader=esmock --recursive --reporter spec $@ test/unit/js", + "test:acceptance:_run": "mocha --loader=esmock --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js", + "test:unit:_run": "mocha --loader=esmock --recursive --reporter spec $@ test/unit/js", "lint": "eslint --max-warnings 0 --format unix .", "format": "prettier --list-different $PWD/'**/*.*js'", "format:fix": "prettier --write $PWD/'**/*.*js'", @@ -25,6 +25,7 @@ "@overleaf/promise-utils": "*", "@overleaf/redis-wrapper": "*", "@overleaf/settings": "*", + "@overleaf/stream-utils": "*", "async": "^3.2.5", "aws-sdk": "^2.650.0", "body-parser": "^1.20.3", diff --git a/services/project-history/scripts/retry_failures.js b/services/project-history/scripts/retry_failures.js new file mode 100755 index 0000000000..85ee21faf4 --- /dev/null +++ b/services/project-history/scripts/retry_failures.js @@ -0,0 +1,26 @@ +import * as RetryManager from '../app/js/RetryManager.js' +import minimist from 'minimist' + +const args = minimist(process.argv.slice(2), { + string: ['failureType', 'timeout', 'limit'], + default: { + failureType: 'soft', + timeout: (60 * 60 * 1000).toString(), + limit: (100_000).toString(), + }, +}) + +const failureType = args.failureType +const timeout = parseInt(args.timeout, 10) +const limit = parseInt(args.limit, 10) + +RetryManager.retryFailures({ failureType, timeout, limit }, (err, result) => { + if (err) { + console.error(err) + process.exit(1) + } else { + console.log(JSON.stringify(result)) + console.log('Done.') + } + process.exit(0) +}) diff --git a/services/project-history/test/acceptance/js/SyncTests.js b/services/project-history/test/acceptance/js/SyncTests.js index 89e002d4dd..f7420e6cdb 100644 --- a/services/project-history/test/acceptance/js/SyncTests.js +++ b/services/project-history/test/acceptance/js/SyncTests.js @@ -1225,7 +1225,7 @@ describe('Syncing with web and doc-updater', function () { ) }) - it('should fix comments in the history store', function (done) { + it('should add comments in the history store', function (done) { const commentId = 'comment-id' const addComment = MockHistoryStore() .post(`/api/projects/${historyId}/legacy_changes`, body => { @@ -1315,6 +1315,1195 @@ describe('Syncing with web and doc-updater', function () { } ) }) + + it('should add comments in the history store (history-ot)', function (done) { + const commentId = 'comment-id' + const addComment = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([ + { + v2Authors: [], + authors: [], + timestamp: this.timestamp.toJSON(), + operations: [ + { + pathname: 'main.tex', + commentId, + ranges: [{ pos: 1, length: 10 }], + }, + ], + origin: { kind: 'test-origin' }, + }, + ]) + return true + }) + .query({ end_version: 0 }) + .reply(204) + + async.series( + [ + cb => { + ProjectHistoryClient.resyncHistory(this.project_id, cb) + }, + cb => { + const update = { + projectHistoryId: historyId, + resyncProjectStructure: { + docs: [{ path: '/main.tex' }], + files: [], + }, + meta: { + ts: this.timestamp, + }, + } + ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) + }, + cb => { + const update = { + path: '/main.tex', + projectHistoryId: historyId, + resyncDocContent: { + content: 'a\nb', + historyOTRanges: { + comments: [ + { + id: commentId, + ranges: [ + { + pos: 1, + length: 10, + }, + ], + }, + ], + }, + }, + doc: this.doc_id, + meta: { + ts: this.timestamp, + }, + } + ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) + }, + cb => { + ProjectHistoryClient.flushProject(this.project_id, cb) + }, + ], + error => { + if (error) { + return done(error) + } + assert( + addComment.isDone(), + `/api/projects/${historyId}/changes should have been called` + ) + done() + } + ) + }) + + it('should add tracked changes in the history store', function (done) { + const fixTrackedChange = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([ + { + v2Authors: [], + authors: [], + timestamp: this.timestamp.toJSON(), + operations: [ + { + pathname: 'main.tex', + textOperation: [ + { + r: 1, + tracking: { + ts: this.timestamp.toJSON(), + type: 'delete', + userId: 'user-id', + }, + }, + { + r: 1, + tracking: { + ts: this.timestamp.toJSON(), + type: 'insert', + userId: 'user-id', + }, + }, + 1, + ], + }, + ], + origin: { kind: 'test-origin' }, + }, + ]) + return true + }) + .query({ end_version: 0 }) + .reply(204) + + async.series( + [ + cb => { + ProjectHistoryClient.resyncHistory(this.project_id, cb) + }, + cb => { + const update = { + projectHistoryId: historyId, + resyncProjectStructure: { + docs: [{ path: '/main.tex' }], + files: [], + }, + meta: { + ts: this.timestamp, + }, + } + ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) + }, + cb => { + const update = { + path: '/main.tex', + projectHistoryId: historyId, + resyncDocContent: { + content: 'a\nb', + ranges: { + changes: [ + { + id: 'id1', + op: { + d: 'a', + p: 0, + }, + metadata: { + user_id: 'user-id', + ts: this.timestamp, + }, + }, + { + id: 'id2', + op: { + i: '\n', + p: 0, + hpos: 1, + }, + metadata: { + user_id: 'user-id', + ts: this.timestamp, + }, + }, + ], + }, + }, + doc: this.doc_id, + meta: { + ts: this.timestamp, + }, + } + ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) + }, + cb => { + ProjectHistoryClient.flushProject(this.project_id, cb) + }, + ], + error => { + if (error) { + return done(error) + } + assert( + fixTrackedChange.isDone(), + `/api/projects/${historyId}/changes should have been called` + ) + done() + } + ) + }) + + it('should add tracked changes in the history store (history-ot)', function (done) { + const fixTrackedChange = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([ + { + v2Authors: [], + authors: [], + timestamp: this.timestamp.toJSON(), + operations: [ + { + pathname: 'main.tex', + textOperation: [ + { + r: 1, + tracking: { + ts: this.timestamp.toJSON(), + type: 'delete', + userId: 'user-id', + }, + }, + { + r: 1, + tracking: { + ts: this.timestamp.toJSON(), + type: 'insert', + userId: 'user-id', + }, + }, + 1, + ], + }, + ], + origin: { kind: 'test-origin' }, + }, + ]) + return true + }) + .query({ end_version: 0 }) + .reply(204) + + async.series( + [ + cb => { + ProjectHistoryClient.resyncHistory(this.project_id, cb) + }, + cb => { + const update = { + projectHistoryId: historyId, + resyncProjectStructure: { + docs: [{ path: '/main.tex' }], + files: [], + }, + meta: { + ts: this.timestamp, + }, + } + ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) + }, + cb => { + const update = { + path: '/main.tex', + projectHistoryId: historyId, + resyncDocContent: { + content: 'a\nb', + historyOTRanges: { + trackedChanges: [ + { + range: { pos: 0, length: 1 }, + tracking: { + ts: this.timestamp.toJSON(), + type: 'delete', + userId: 'user-id', + }, + }, + { + range: { pos: 1, length: 1 }, + tracking: { + ts: this.timestamp.toJSON(), + type: 'insert', + userId: 'user-id', + }, + }, + ], + }, + }, + doc: this.doc_id, + meta: { + ts: this.timestamp, + }, + } + ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) + }, + cb => { + ProjectHistoryClient.flushProject(this.project_id, cb) + }, + ], + error => { + if (error) { + return done(error) + } + assert( + fixTrackedChange.isDone(), + `/api/projects/${historyId}/changes should have been called` + ) + done() + } + ) + }) + }) + + describe("when a doc's ranges are out of sync", function () { + const commentId = 'comment-id' + beforeEach(function () { + MockHistoryStore() + .get(`/api/projects/${historyId}/latest/history`) + .reply(200, { + chunk: { + history: { + snapshot: { + files: { + 'main.tex': { + hash: '0a207c060e61f3b88eaee0a8cd0696f46fb155eb', + rangesHash: '0a207c060e61f3b88eaee0a8cd0696f46fb155ec', + stringLength: 3, + }, + }, + }, + changes: [], + }, + startVersion: 0, + }, + }) + + MockHistoryStore() + .get( + `/api/projects/${historyId}/blobs/0a207c060e61f3b88eaee0a8cd0696f46fb155eb` + ) + .reply(200, 'a\nb') + + MockHistoryStore() + .get( + `/api/projects/${historyId}/blobs/0a207c060e61f3b88eaee0a8cd0696f46fb155ec` + ) + .reply( + 200, + JSON.stringify({ + comments: [{ id: commentId, ranges: [{ pos: 0, length: 3 }] }], + trackedChanges: [ + { + range: { pos: 0, length: 1 }, + tracking: { + ts: this.timestamp.toJSON(), + type: 'delete', + userId: 'user-id', + }, + }, + { + range: { pos: 2, length: 1 }, + tracking: { + ts: this.timestamp.toJSON(), + type: 'insert', + userId: 'user-id', + }, + }, + ], + }) + ) + }) + + it('should fix comments in the history store', function (done) { + const addComment = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([ + { + v2Authors: [], + authors: [], + timestamp: this.timestamp.toJSON(), + operations: [ + { + pathname: 'main.tex', + commentId, + ranges: [{ pos: 1, length: 2 }], + }, + ], + origin: { kind: 'test-origin' }, + }, + ]) + return true + }) + .query({ end_version: 0 }) + .reply(204) + + async.series( + [ + cb => { + ProjectHistoryClient.resyncHistory(this.project_id, cb) + }, + cb => { + const update = { + projectHistoryId: historyId, + resyncProjectStructure: { + docs: [{ path: '/main.tex' }], + files: [], + }, + meta: { + ts: this.timestamp, + }, + } + ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) + }, + cb => { + const update = { + path: '/main.tex', + projectHistoryId: historyId, + resyncDocContent: { + content: 'a\nb', + ranges: { + comments: [ + { + id: commentId, + op: { + c: 'a', + p: 0, + hpos: 1, + hlen: 2, + t: commentId, + }, + meta: { + user_id: 'user-id', + ts: this.timestamp, + }, + }, + ], + changes: [ + { + id: 'id1', + op: { + d: 'a', + p: 0, + }, + metadata: { + user_id: 'user-id', + ts: this.timestamp, + }, + }, + { + id: 'id2', + op: { + i: '\n', + p: 1, + hpos: 2, + }, + metadata: { + user_id: 'user-id', + ts: this.timestamp, + }, + }, + ], + }, + }, + doc: this.doc_id, + meta: { + ts: this.timestamp, + }, + } + ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) + }, + cb => { + ProjectHistoryClient.flushProject(this.project_id, cb) + }, + ], + error => { + if (error) { + return done(error) + } + assert( + addComment.isDone(), + `/api/projects/${historyId}/changes should have been called` + ) + done() + } + ) + }) + + it('should fix resolved state for comments in the history store', function (done) { + const addComment = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([ + { + v2Authors: [], + authors: [], + timestamp: this.timestamp.toJSON(), + operations: [ + { + pathname: 'main.tex', + commentId, + resolved: true, + }, + ], + origin: { kind: 'test-origin' }, + }, + ]) + return true + }) + .query({ end_version: 0 }) + .reply(204) + + async.series( + [ + cb => { + ProjectHistoryClient.resyncHistory(this.project_id, cb) + }, + cb => { + const update = { + projectHistoryId: historyId, + resyncProjectStructure: { + docs: [{ path: '/main.tex' }], + files: [], + }, + meta: { + ts: this.timestamp, + }, + } + ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) + }, + cb => { + const update = { + path: '/main.tex', + projectHistoryId: historyId, + resyncDocContent: { + content: 'a\nb', + resolvedCommentIds: [commentId], + ranges: { + comments: [ + { + id: commentId, + op: { + c: 'a', + p: 0, + hpos: 0, + hlen: 3, + t: commentId, + }, + meta: { + user_id: 'user-id', + ts: this.timestamp, + }, + }, + ], + changes: [ + { + id: 'id1', + op: { + d: 'a', + p: 0, + }, + metadata: { + user_id: 'user-id', + ts: this.timestamp, + }, + }, + { + id: 'id2', + op: { + i: '\n', + p: 1, + hpos: 2, + }, + metadata: { + user_id: 'user-id', + ts: this.timestamp, + }, + }, + ], + }, + }, + doc: this.doc_id, + meta: { + ts: this.timestamp, + }, + } + ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) + }, + cb => { + ProjectHistoryClient.flushProject(this.project_id, cb) + }, + ], + error => { + if (error) { + return done(error) + } + assert( + addComment.isDone(), + `/api/projects/${historyId}/changes should have been called` + ) + done() + } + ) + }) + + it('should fix comments in the history store (history-ot)', function (done) { + const addComment = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([ + { + v2Authors: [], + authors: [], + timestamp: this.timestamp.toJSON(), + operations: [ + { + pathname: 'main.tex', + commentId, + ranges: [{ pos: 1, length: 2 }], + }, + ], + origin: { kind: 'test-origin' }, + }, + ]) + return true + }) + .query({ end_version: 0 }) + .reply(204) + + async.series( + [ + cb => { + ProjectHistoryClient.resyncHistory(this.project_id, cb) + }, + cb => { + const update = { + projectHistoryId: historyId, + resyncProjectStructure: { + docs: [{ path: '/main.tex' }], + files: [], + }, + meta: { + ts: this.timestamp, + }, + } + ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) + }, + cb => { + const update = { + path: '/main.tex', + projectHistoryId: historyId, + resyncDocContent: { + content: 'a\nb', + historyOTRanges: { + comments: [ + { + id: commentId, + ranges: [ + { + pos: 1, + length: 2, + }, + ], + }, + ], + trackedChanges: [ + { + range: { pos: 0, length: 1 }, + tracking: { + ts: this.timestamp.toJSON(), + type: 'delete', + userId: 'user-id', + }, + }, + { + range: { pos: 2, length: 1 }, + tracking: { + ts: this.timestamp.toJSON(), + type: 'insert', + userId: 'user-id', + }, + }, + ], + }, + }, + doc: this.doc_id, + meta: { + ts: this.timestamp, + }, + } + ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) + }, + cb => { + ProjectHistoryClient.flushProject(this.project_id, cb) + }, + ], + error => { + if (error) { + return done(error) + } + assert( + addComment.isDone(), + `/api/projects/${historyId}/changes should have been called` + ) + done() + } + ) + }) + + it('should fix resolved state for comments in the history store (history-ot)', function (done) { + const addComment = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([ + { + v2Authors: [], + authors: [], + timestamp: this.timestamp.toJSON(), + operations: [ + { + pathname: 'main.tex', + commentId, + resolved: true, + }, + ], + origin: { kind: 'test-origin' }, + }, + ]) + return true + }) + .query({ end_version: 0 }) + .reply(204) + + async.series( + [ + cb => { + ProjectHistoryClient.resyncHistory(this.project_id, cb) + }, + cb => { + const update = { + projectHistoryId: historyId, + resyncProjectStructure: { + docs: [{ path: '/main.tex' }], + files: [], + }, + meta: { + ts: this.timestamp, + }, + } + ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) + }, + cb => { + const update = { + path: '/main.tex', + projectHistoryId: historyId, + resyncDocContent: { + content: 'a\nb', + historyOTRanges: { + comments: [ + { + id: commentId, + ranges: [ + { + pos: 0, + length: 3, + }, + ], + resolved: true, + }, + ], + trackedChanges: [ + { + range: { pos: 0, length: 1 }, + tracking: { + ts: this.timestamp.toJSON(), + type: 'delete', + userId: 'user-id', + }, + }, + { + range: { pos: 2, length: 1 }, + tracking: { + ts: this.timestamp.toJSON(), + type: 'insert', + userId: 'user-id', + }, + }, + ], + }, + }, + doc: this.doc_id, + meta: { + ts: this.timestamp, + }, + } + ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) + }, + cb => { + ProjectHistoryClient.flushProject(this.project_id, cb) + }, + ], + error => { + if (error) { + return done(error) + } + assert( + addComment.isDone(), + `/api/projects/${historyId}/changes should have been called` + ) + done() + } + ) + }) + + it('should fix tracked changes in the history store', function (done) { + const fixTrackedChange = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([ + { + v2Authors: [], + authors: [], + timestamp: this.timestamp.toJSON(), + operations: [ + { + pathname: 'main.tex', + textOperation: [ + 1, + { + r: 1, + tracking: { + ts: this.timestamp.toJSON(), + type: 'insert', + userId: 'user-id', + }, + }, + { + r: 1, + tracking: { + type: 'none', + }, + }, + ], + }, + ], + origin: { kind: 'test-origin' }, + }, + ]) + return true + }) + .query({ end_version: 0 }) + .reply(204) + + async.series( + [ + cb => { + ProjectHistoryClient.resyncHistory(this.project_id, cb) + }, + cb => { + const update = { + projectHistoryId: historyId, + resyncProjectStructure: { + docs: [{ path: '/main.tex' }], + files: [], + }, + meta: { + ts: this.timestamp, + }, + } + ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) + }, + cb => { + const update = { + path: '/main.tex', + projectHistoryId: historyId, + resyncDocContent: { + content: 'a\nb', + ranges: { + comments: [ + { + id: commentId, + op: { + c: 'a', + p: 0, + hpos: 0, + hlen: 3, + t: commentId, + }, + meta: { + user_id: 'user-id', + ts: this.timestamp, + }, + }, + ], + changes: [ + { + id: 'id1', + op: { + d: 'a', + p: 0, + }, + metadata: { + user_id: 'user-id', + ts: this.timestamp, + }, + }, + { + id: 'id2', + op: { + i: '\n', + p: 0, + hpos: 1, + }, + metadata: { + user_id: 'user-id', + ts: this.timestamp, + }, + }, + ], + }, + }, + doc: this.doc_id, + meta: { + ts: this.timestamp, + }, + } + ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) + }, + cb => { + ProjectHistoryClient.flushProject(this.project_id, cb) + }, + ], + error => { + if (error) { + return done(error) + } + assert( + fixTrackedChange.isDone(), + `/api/projects/${historyId}/changes should have been called` + ) + done() + } + ) + }) + + it('should fix tracked changes in the history store (history-ot)', function (done) { + const fixTrackedChange = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([ + { + v2Authors: [], + authors: [], + timestamp: this.timestamp.toJSON(), + operations: [ + { + pathname: 'main.tex', + textOperation: [ + 1, + { + r: 1, + tracking: { + ts: this.timestamp.toJSON(), + type: 'insert', + userId: 'user-id', + }, + }, + { + r: 1, + tracking: { + type: 'none', + }, + }, + ], + }, + ], + origin: { kind: 'test-origin' }, + }, + ]) + return true + }) + .query({ end_version: 0 }) + .reply(204) + + async.series( + [ + cb => { + ProjectHistoryClient.resyncHistory(this.project_id, cb) + }, + cb => { + const update = { + projectHistoryId: historyId, + resyncProjectStructure: { + docs: [{ path: '/main.tex' }], + files: [], + }, + meta: { + ts: this.timestamp, + }, + } + ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) + }, + cb => { + const update = { + path: '/main.tex', + projectHistoryId: historyId, + resyncDocContent: { + content: 'a\nb', + historyOTRanges: { + comments: [ + { + id: commentId, + ranges: [ + { + pos: 0, + length: 3, + }, + ], + }, + ], + trackedChanges: [ + { + range: { pos: 0, length: 1 }, + tracking: { + ts: this.timestamp.toJSON(), + type: 'delete', + userId: 'user-id', + }, + }, + { + range: { pos: 1, length: 1 }, + tracking: { + ts: this.timestamp.toJSON(), + type: 'insert', + userId: 'user-id', + }, + }, + ], + }, + }, + doc: this.doc_id, + meta: { + ts: this.timestamp, + }, + } + ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) + }, + cb => { + ProjectHistoryClient.flushProject(this.project_id, cb) + }, + ], + error => { + if (error) { + return done(error) + } + assert( + fixTrackedChange.isDone(), + `/api/projects/${historyId}/changes should have been called` + ) + done() + } + ) + }) + + it('should fix both comments and tracked changes in the history store (history-ot)', function (done) { + const fixTrackedChange = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([ + // not merged due to comment operation using history-ot and tracked-changes operation using sharejs ot + { + v2Authors: [], + authors: [], + timestamp: this.timestamp.toJSON(), + operations: [ + { + pathname: 'main.tex', + commentId, + ranges: [{ pos: 1, length: 2 }], + }, + ], + origin: { kind: 'test-origin' }, + }, + { + v2Authors: [], + authors: [], + timestamp: this.timestamp.toJSON(), + operations: [ + { + pathname: 'main.tex', + textOperation: [ + 1, + { + r: 1, + tracking: { + ts: this.timestamp.toJSON(), + type: 'insert', + userId: 'user-id', + }, + }, + { + r: 1, + tracking: { + type: 'none', + }, + }, + ], + }, + ], + origin: { kind: 'test-origin' }, + }, + ]) + return true + }) + .query({ end_version: 0 }) + .reply(204) + + async.series( + [ + cb => { + ProjectHistoryClient.resyncHistory(this.project_id, cb) + }, + cb => { + const update = { + projectHistoryId: historyId, + resyncProjectStructure: { + docs: [{ path: '/main.tex' }], + files: [], + }, + meta: { + ts: this.timestamp, + }, + } + ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) + }, + cb => { + const update = { + path: '/main.tex', + projectHistoryId: historyId, + resyncDocContent: { + content: 'a\nb', + historyOTRanges: { + comments: [ + { + id: commentId, + ranges: [ + { + pos: 1, + length: 2, + }, + ], + }, + ], + trackedChanges: [ + { + range: { pos: 0, length: 1 }, + tracking: { + ts: this.timestamp.toJSON(), + type: 'delete', + userId: 'user-id', + }, + }, + { + range: { pos: 1, length: 1 }, + tracking: { + ts: this.timestamp.toJSON(), + type: 'insert', + userId: 'user-id', + }, + }, + ], + }, + }, + doc: this.doc_id, + meta: { + ts: this.timestamp, + }, + } + ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) + }, + cb => { + ProjectHistoryClient.flushProject(this.project_id, cb) + }, + ], + error => { + if (error) { + return done(error) + } + assert( + fixTrackedChange.isDone(), + `/api/projects/${historyId}/changes should have been called` + ) + done() + } + ) + }) }) describe('resyncProjectStructureOnly', function () { diff --git a/services/project-history/test/unit/js/UpdatesManager/UpdatesProcessorTests.js b/services/project-history/test/unit/js/UpdatesManager/UpdatesProcessorTests.js index 6f148e5a8d..fcc0918e11 100644 --- a/services/project-history/test/unit/js/UpdatesManager/UpdatesProcessorTests.js +++ b/services/project-history/test/unit/js/UpdatesManager/UpdatesProcessorTests.js @@ -6,14 +6,14 @@ import * as Errors from '../../../../app/js/Errors.js' const MODULE_PATH = '../../../../app/js/UpdatesProcessor.js' describe('UpdatesProcessor', function () { - before(async function () { + beforeEach(async function () { this.extendLock = sinon.stub() this.BlobManager = { createBlobsForUpdates: sinon.stub(), } this.HistoryStoreManager = { getMostRecentVersion: sinon.stub(), - sendChanges: sinon.stub().yields(null, {}), + sendChanges: sinon.stub().yields(null, { resyncNeeded: true }), } this.LockManager = { runWithLock: sinon.spy((key, runner, callback) => @@ -22,7 +22,7 @@ describe('UpdatesProcessor', function () { } this.RedisManager = {} this.UpdateCompressor = { - compressRawUpdates: sinon.stub(), + compressRawUpdatesWithMetricsCb: sinon.stub(), } this.UpdateTranslator = { convertToChanges: sinon.stub(), @@ -299,7 +299,10 @@ describe('UpdatesProcessor', function () { null, this.expandedUpdates ) - this.UpdateCompressor.compressRawUpdates.returns(this.compressedUpdates) + this.UpdateCompressor.compressRawUpdatesWithMetricsCb.yields( + null, + this.compressedUpdates + ) this.BlobManager.createBlobsForUpdates.callsArgWith( 4, null, @@ -315,8 +318,8 @@ describe('UpdatesProcessor', function () { this.ol_project_id, this.rawUpdates, this.extendLock, - err => { - this.callback(err) + (err, flushResponse) => { + this.callback(err, flushResponse) done() } ) @@ -347,7 +350,7 @@ describe('UpdatesProcessor', function () { }) it('should compress updates', function () { - this.UpdateCompressor.compressRawUpdates.should.have.been.calledWith( + this.UpdateCompressor.compressRawUpdatesWithMetricsCb.should.have.been.calledWith( this.expandedUpdates ) }) @@ -382,8 +385,74 @@ describe('UpdatesProcessor', function () { ) }) - it('should call the callback with no error', function () { - this.callback.should.have.been.called + it('should call the callback with no error and flush response', function () { + this.callback.should.have.been.calledWith(null, { resyncNeeded: true }) + }) + }) + + describe('no updates', function () { + beforeEach(function (done) { + this.SyncManager.skipUpdatesDuringSync.yields( + null, + [], + this.newSyncState + ) + this.UpdatesProcessor._processUpdates( + this.project_id, + this.ol_project_id, + this.rawUpdates, + this.extendLock, + (err, flushResponse) => { + this.callback(err, flushResponse) + done() + } + ) + }) + + it('should not get the latest version id', function () { + this.HistoryStoreManager.getMostRecentVersion.should.not.have.been.calledWith( + this.project_id, + this.ol_project_id + ) + }) + + it('should skip updates when resyncing', function () { + this.SyncManager.skipUpdatesDuringSync.should.have.been.calledWith( + this.project_id, + this.rawUpdates + ) + }) + + it('should not expand sync updates', function () { + this.SyncManager.expandSyncUpdates.should.not.have.been.called + }) + + it('should not compress updates', function () { + this.UpdateCompressor.compressRawUpdatesWithMetricsCb.should.not.have + .been.called + }) + + it('should not create any blobs for the updates', function () { + this.BlobManager.createBlobsForUpdates.should.not.have.been.called + }) + + it('should not convert the updates into a change requests', function () { + this.UpdateTranslator.convertToChanges.should.not.have.been.called + }) + + it('should not send the change request to the history store', function () { + this.HistoryStoreManager.sendChanges.should.not.have.been.called + }) + + it('should set the sync state', function () { + this.SyncManager.setResyncState.should.have.been.calledWith( + this.project_id, + this.newSyncState + ) + }) + + it('should call the callback with fake flush response', function () { + this.callback.should.have.been.calledWith(null, { resyncNeeded: false }) }) }) @@ -412,7 +481,7 @@ describe('UpdatesProcessor', function () { }) describe('_skipAlreadyAppliedUpdates', function () { - before(function () { + beforeEach(function () { this.UpdateTranslator.isProjectStructureUpdate.callsFake( update => update.version != null ) @@ -420,7 +489,7 @@ describe('UpdatesProcessor', function () { }) describe('with all doc ops in order', function () { - before(function () { + beforeEach(function () { this.updates = [ { doc: 'id', v: 1 }, { doc: 'id', v: 2 }, @@ -440,7 +509,7 @@ describe('UpdatesProcessor', function () { }) describe('with all project ops in order', function () { - before(function () { + beforeEach(function () { this.updates = [ { version: 1 }, { version: 2 }, @@ -460,7 +529,7 @@ describe('UpdatesProcessor', function () { }) describe('with all multiple doc and ops in order', function () { - before(function () { + beforeEach(function () { this.updates = [ { doc: 'id1', v: 1 }, { doc: 'id1', v: 2 }, @@ -488,64 +557,47 @@ describe('UpdatesProcessor', function () { }) describe('with doc ops out of order', function () { - before(function () { + beforeEach(function () { this.updates = [ { doc: 'id', v: 1 }, { doc: 'id', v: 2 }, { doc: 'id', v: 4 }, { doc: 'id', v: 3 }, ] - this.skipFn = sinon.spy( - this.UpdatesProcessor._mocks, - '_skipAlreadyAppliedUpdates' - ) - try { - this.updatesToApply = - this.UpdatesProcessor._skipAlreadyAppliedUpdates( - this.project_id, - this.updates, - { docs: {} } - ) - } catch (error) {} - }) - - after(function () { - this.skipFn.restore() }) it('should throw an exception', function () { - this.skipFn.threw('OpsOutOfOrderError').should.equal(true) + expect(() => { + this.UpdatesProcessor._skipAlreadyAppliedUpdates( + this.project_id, + this.updates, + { docs: {} } + ) + }).to.throw(Errors.OpsOutOfOrderError) }) }) describe('with project ops out of order', function () { - before(function () { + beforeEach(function () { + this.UpdateTranslator.isProjectStructureUpdate.callsFake( + update => update.version != null + ) this.updates = [ { version: 1 }, { version: 2 }, { version: 4 }, { version: 3 }, ] - this.skipFn = sinon.spy( - this.UpdatesProcessor._mocks, - '_skipAlreadyAppliedUpdates' - ) - try { - this.updatesToApply = - this.UpdatesProcessor._skipAlreadyAppliedUpdates( - this.project_id, - this.updates, - { docs: {} } - ) - } catch (error) {} - }) - - after(function () { - this.skipFn.restore() }) it('should throw an exception', function () { - this.skipFn.threw('OpsOutOfOrderError').should.equal(true) + expect(() => { + this.UpdatesProcessor._skipAlreadyAppliedUpdates( + this.project_id, + this.updates, + { docs: {} } + ) + }).to.throw(Errors.OpsOutOfOrderError) }) }) }) diff --git a/services/real-time/docker-compose.ci.yml b/services/real-time/docker-compose.ci.yml index 9011627c06..a5a2292e72 100644 --- a/services/real-time/docker-compose.ci.yml +++ b/services/real-time/docker-compose.ci.yml @@ -43,7 +43,7 @@ services: command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs . user: root redis: - image: redis + image: redis:7.4.3 healthcheck: test: ping="$$(redis-cli ping)" && [ "$$ping" = 'PONG' ] interval: 1s diff --git a/services/real-time/docker-compose.yml b/services/real-time/docker-compose.yml index 9333271dcf..f1041164bc 100644 --- a/services/real-time/docker-compose.yml +++ b/services/real-time/docker-compose.yml @@ -46,7 +46,7 @@ services: command: npm run --silent test:acceptance redis: - image: redis + image: redis:7.4.3 healthcheck: test: ping=$$(redis-cli ping) && [ "$$ping" = 'PONG' ] interval: 1s diff --git a/services/web/.eslintrc.js b/services/web/.eslintrc.js index 2fa9e8f547..ef3cf11de5 100644 --- a/services/web/.eslintrc.js +++ b/services/web/.eslintrc.js @@ -383,6 +383,18 @@ module.exports = { 'Modify location via customLocalStorage instead of calling window.localStorage methods directly', }, ], + 'no-unused-vars': 'off', + '@typescript-eslint/no-unused-vars': [ + 'error', + { + args: 'after-used', + argsIgnorePattern: '^_', + ignoreRestSiblings: false, + caughtErrors: 'none', + vars: 'all', + varsIgnorePattern: '^_', + }, + ], }, }, { diff --git a/services/web/.prettierignore b/services/web/.prettierignore index f4be187b87..94ab5579c2 100644 --- a/services/web/.prettierignore +++ b/services/web/.prettierignore @@ -6,6 +6,7 @@ frontend/js/vendor modules/**/frontend/js/vendor public/js public/minjs +frontend/stylesheets/bootstrap-5/modules/metrics/nvd3.scss frontend/stylesheets/components/nvd3.less frontend/js/features/source-editor/lezer-latex/latex.mjs frontend/js/features/source-editor/lezer-latex/latex.terms.mjs diff --git a/services/web/.storybook/preview.tsx b/services/web/.storybook/preview.tsx index e3838a6f97..320caac144 100644 --- a/services/web/.storybook/preview.tsx +++ b/services/web/.storybook/preview.tsx @@ -122,6 +122,12 @@ const preview: Preview = { // render stories in iframes, to isolate modals inlineStories: false, }, + options: { + storySort: { + method: 'alphabetical', + order: ['Shared'], + }, + }, }, globalTypes: { theme: { diff --git a/services/web/Makefile b/services/web/Makefile index 58323058b8..6ebbc357c6 100644 --- a/services/web/Makefile +++ b/services/web/Makefile @@ -83,6 +83,11 @@ test_unit_app: $(DOCKER_COMPOSE) run --name unit_test_$(BUILD_DIR_NAME) --rm test_unit $(DOCKER_COMPOSE) down -v -t 0 +test_unit_mocha: export COMPOSE_PROJECT_NAME=unit_test_mocha_$(BUILD_DIR_NAME) +test_unit_mocha: + $(DOCKER_COMPOSE) run --rm test_unit npm run test:unit:mocha + $(DOCKER_COMPOSE) down -v -t 0 + test_unit_esm: export COMPOSE_PROJECT_NAME=unit_test_esm_$(BUILD_DIR_NAME) test_unit_esm: $(DOCKER_COMPOSE) run --rm test_unit npm run test:unit:esm diff --git a/services/web/app/src/Features/Authentication/AuthenticationController.js b/services/web/app/src/Features/Authentication/AuthenticationController.js index 7a97d2ac9c..baba8aacee 100644 --- a/services/web/app/src/Features/Authentication/AuthenticationController.js +++ b/services/web/app/src/Features/Authentication/AuthenticationController.js @@ -82,6 +82,7 @@ const AuthenticationController = { analyticsId: user.analyticsId || user._id, alphaProgram: user.alphaProgram || undefined, // only store if set betaProgram: user.betaProgram || undefined, // only store if set + externalAuth: user.externalAuth || false, } if (user.isAdmin) { lightUser.isAdmin = true diff --git a/services/web/app/src/Features/Authorization/AuthorizationManager.js b/services/web/app/src/Features/Authorization/AuthorizationManager.js index 2f339de83d..22d92ea9d9 100644 --- a/services/web/app/src/Features/Authorization/AuthorizationManager.js +++ b/services/web/app/src/Features/Authorization/AuthorizationManager.js @@ -88,9 +88,54 @@ async function getPrivilegeLevelForProject( opts = {} ) { if (userId) { - return getPrivilegeLevelForProjectWithUser(userId, projectId, opts) + return await getPrivilegeLevelForProjectWithUser( + userId, + projectId, + null, + opts + ) } else { - return getPrivilegeLevelForProjectWithoutUser(projectId, token, opts) + return await getPrivilegeLevelForProjectWithoutUser(projectId, token, opts) + } +} + +/** + * Get the privilege level that the user has for the project. + * + * @param userId - The id of the user that wants to access the project. + * @param projectId - The id of the project to be accessed. + * @param {string} token + * @param {ProjectAccess} projectAccess + * @param {Object} opts + * @param {boolean} opts.ignoreSiteAdmin - Do not consider whether the user is + * a site admin. + * @param {boolean} opts.ignorePublicAccess - Do not consider the project is + * publicly accessible. + * + * @returns {string|boolean} The privilege level. One of "owner", + * "readAndWrite", "readOnly" or false. + */ +async function getPrivilegeLevelForProjectWithProjectAccess( + userId, + projectId, + token, + projectAccess, + opts = {} +) { + if (userId) { + return await getPrivilegeLevelForProjectWithUser( + userId, + projectId, + projectAccess, + opts + ) + } else { + return await _getPrivilegeLevelForProjectWithoutUserWithPublicAccessLevel( + projectId, + token, + projectAccess.publicAccessLevel(), + opts + ) } } @@ -98,6 +143,7 @@ async function getPrivilegeLevelForProject( async function getPrivilegeLevelForProjectWithUser( userId, projectId, + projectAccess, opts = {} ) { if (!opts.ignoreSiteAdmin) { @@ -106,11 +152,11 @@ async function getPrivilegeLevelForProjectWithUser( } } - const privilegeLevel = - await CollaboratorsGetter.promises.getMemberIdPrivilegeLevel( - userId, - projectId - ) + projectAccess = + projectAccess || + (await CollaboratorsGetter.promises.getProjectAccess(projectId)) + + const privilegeLevel = projectAccess.privilegeLevelForUser(userId) if (privilegeLevel && privilegeLevel !== PrivilegeLevels.NONE) { // The user has direct access return privilegeLevel @@ -119,7 +165,7 @@ async function getPrivilegeLevelForProjectWithUser( if (!opts.ignorePublicAccess) { // Legacy public-access system // User is present (not anonymous), but does not have direct access - const publicAccessLevel = await getPublicAccessLevel(projectId) + const publicAccessLevel = projectAccess.publicAccessLevel() if (publicAccessLevel === PublicAccessLevels.READ_ONLY) { return PrivilegeLevels.READ_ONLY } @@ -137,7 +183,21 @@ async function getPrivilegeLevelForProjectWithoutUser( token, opts = {} ) { - const publicAccessLevel = await getPublicAccessLevel(projectId) + return await _getPrivilegeLevelForProjectWithoutUserWithPublicAccessLevel( + projectId, + token, + await getPublicAccessLevel(projectId), + opts + ) +} + +// User is Anonymous, Try Token-based access +async function _getPrivilegeLevelForProjectWithoutUserWithPublicAccessLevel( + projectId, + token, + publicAccessLevel, + opts = {} +) { if (!opts.ignorePublicAccess) { if (publicAccessLevel === PublicAccessLevels.READ_ONLY) { // Legacy public read-only access for anonymous user @@ -149,7 +209,7 @@ async function getPrivilegeLevelForProjectWithoutUser( } } if (publicAccessLevel === PublicAccessLevels.TOKEN_BASED) { - return getPrivilegeLevelForProjectWithToken(projectId, token) + return await getPrivilegeLevelForProjectWithToken(projectId, token) } // Deny anonymous user access @@ -309,6 +369,7 @@ module.exports = { canUserRenameProject, canUserAdminProject, getPrivilegeLevelForProject, + getPrivilegeLevelForProjectWithProjectAccess, isRestrictedUserForProject, isUserSiteAdmin, }, diff --git a/services/web/app/src/Features/Chat/ChatManager.js b/services/web/app/src/Features/Chat/ChatManager.js index 9625881dd8..7eab6039d8 100644 --- a/services/web/app/src/Features/Chat/ChatManager.js +++ b/services/web/app/src/Features/Chat/ChatManager.js @@ -1,61 +1,46 @@ -const async = require('async') -const UserInfoManager = require('../User/UserInfoManager') const UserInfoController = require('../User/UserInfoController') -const { promisify } = require('@overleaf/promise-utils') +const UserGetter = require('../User/UserGetter') +const { callbackify } = require('@overleaf/promise-utils') -function injectUserInfoIntoThreads(threads, callback) { - // There will be a lot of repitition of user_ids, so first build a list - // of unique ones to perform db look ups on, then use these to populate the - // user fields - let message, thread, threadId, userId - if (callback == null) { - callback = function () {} - } - const userIds = {} - for (threadId in threads) { - thread = threads[threadId] +async function injectUserInfoIntoThreads(threads) { + const userIds = new Set() + for (const thread of Object.values(threads)) { if (thread.resolved) { - userIds[thread.resolved_by_user_id] = true + userIds.add(thread.resolved_by_user_id) } - for (message of Array.from(thread.messages)) { - userIds[message.user_id] = true + for (const message of thread.messages) { + userIds.add(message.user_id) } } - const jobs = [] - const users = {} - for (userId in userIds) { - ;(userId => - jobs.push(cb => - UserInfoManager.getPersonalInfo(userId, function (error, user) { - if (error != null) return cb(error) - user = UserInfoController.formatPersonalInfo(user) - users[userId] = user - cb() - }) - ))(userId) + const projection = { + _id: true, + first_name: true, + last_name: true, + email: true, } - - return async.series(jobs, function (error) { - if (error != null) { - return callback(error) + const users = await UserGetter.promises.getUsers(userIds, projection) + const usersById = new Map() + for (const user of users) { + usersById.set( + user._id.toString(), + UserInfoController.formatPersonalInfo(user) + ) + } + for (const thread of Object.values(threads)) { + if (thread.resolved) { + thread.resolved_by_user = usersById.get(thread.resolved_by_user_id) } - for (threadId in threads) { - thread = threads[threadId] - if (thread.resolved) { - thread.resolved_by_user = users[thread.resolved_by_user_id] - } - for (message of Array.from(thread.messages)) { - message.user = users[message.user_id] - } + for (const message of thread.messages) { + message.user = usersById.get(message.user_id) } - return callback(null, threads) - }) + } + return threads } module.exports = { - injectUserInfoIntoThreads, + injectUserInfoIntoThreads: callbackify(injectUserInfoIntoThreads), promises: { - injectUserInfoIntoThreads: promisify(injectUserInfoIntoThreads), + injectUserInfoIntoThreads, }, } diff --git a/services/web/app/src/Features/Collaborators/CollaboratorsGetter.js b/services/web/app/src/Features/Collaborators/CollaboratorsGetter.js index caa6ef159d..a3543ae614 100644 --- a/services/web/app/src/Features/Collaborators/CollaboratorsGetter.js +++ b/services/web/app/src/Features/Collaborators/CollaboratorsGetter.js @@ -1,3 +1,4 @@ +// @ts-check const { callbackify } = require('util') const pLimit = require('p-limit') const { ObjectId } = require('mongodb-legacy') @@ -15,9 +16,6 @@ module.exports = { getMemberIdsWithPrivilegeLevels: callbackify(getMemberIdsWithPrivilegeLevels), getMemberIds: callbackify(getMemberIds), getInvitedMemberIds: callbackify(getInvitedMemberIds), - getInvitedMembersWithPrivilegeLevels: callbackify( - getInvitedMembersWithPrivilegeLevels - ), getInvitedMembersWithPrivilegeLevelsFromFields: callbackify( getInvitedMembersWithPrivilegeLevelsFromFields ), @@ -31,10 +29,10 @@ module.exports = { userIsTokenMember: callbackify(userIsTokenMember), getAllInvitedMembers: callbackify(getAllInvitedMembers), promises: { + getProjectAccess, getMemberIdsWithPrivilegeLevels, getMemberIds, getInvitedMemberIds, - getInvitedMembersWithPrivilegeLevels, getInvitedMembersWithPrivilegeLevelsFromFields, getMemberIdPrivilegeLevel, getInvitedEditCollaboratorCount, @@ -50,7 +48,202 @@ module.exports = { }, } -async function getMemberIdsWithPrivilegeLevels(projectId) { +/** + * @typedef ProjectMember + * @property {string} id + * @property {typeof PrivilegeLevels[keyof PrivilegeLevels]} privilegeLevel + * @property {typeof Sources[keyof Sources]} source + * @property {boolean} [pendingEditor] + * @property {boolean} [pendingReviewer] + */ + +/** + * @typedef LoadedProjectMember + * @property {typeof PrivilegeLevels[keyof PrivilegeLevels]} privilegeLevel + * @property {{_id: ObjectId, email: string, features: any, first_name: string, last_name: string, signUpDate: Date}} user + * @property {boolean} [pendingEditor] + * @property {boolean} [pendingReviewer] + */ + +// Wrapper for determining multiple dimensions of project access. +class ProjectAccess { + /** @type {ProjectMember[]} */ + #members + + /** @type {typeof PublicAccessLevels[keyof PublicAccessLevels]} */ + #publicAccessLevel + + /** + * @param {{ owner_ref: ObjectId; collaberator_refs: ObjectId[]; readOnly_refs: ObjectId[]; tokenAccessReadAndWrite_refs: ObjectId[]; tokenAccessReadOnly_refs: ObjectId[]; publicAccesLevel: typeof PublicAccessLevels[keyof PublicAccessLevels]; pendingEditor_refs: ObjectId[]; reviewer_refs: ObjectId[]; pendingReviewer_refs: ObjectId[]; }} project + */ + constructor(project) { + this.#members = _getMemberIdsWithPrivilegeLevelsFromFields( + project.owner_ref, + project.collaberator_refs, + project.readOnly_refs, + project.tokenAccessReadAndWrite_refs, + project.tokenAccessReadOnly_refs, + project.publicAccesLevel, + project.pendingEditor_refs, + project.reviewer_refs, + project.pendingReviewer_refs + ) + this.#publicAccessLevel = project.publicAccesLevel + } + + /** + * @return {Promise<{ownerMember: LoadedProjectMember|undefined, members: LoadedProjectMember[]}>} + */ + async loadOwnerAndInvitedMembers() { + const all = await _loadMembers( + this.#members.filter(m => m.source !== Sources.TOKEN) + ) + return { + ownerMember: all.find(m => m.privilegeLevel === PrivilegeLevels.OWNER), + members: all.filter(m => m.privilegeLevel !== PrivilegeLevels.OWNER), + } + } + + /** + * @return {Promise} + */ + async loadInvitedMembers() { + return _loadMembers( + this.#members.filter( + m => + m.source !== Sources.TOKEN && + m.privilegeLevel !== PrivilegeLevels.OWNER + ) + ) + } + + /** + * @return {Promise} + */ + async loadOwner() { + const [owner] = await _loadMembers( + this.#members.filter(m => m.privilegeLevel === PrivilegeLevels.OWNER) + ) + return owner + } + + /** + * @return {ProjectMember[]} + */ + allMembers() { + return this.#members + } + + /** + * @return {typeof PublicAccessLevels[keyof PublicAccessLevels]} + */ + publicAccessLevel() { + return this.#publicAccessLevel + } + + /** + * @return {string[]} + */ + memberIds() { + return this.#members.map(m => m.id) + } + + /** + * @return {string[]} + */ + invitedMemberIds() { + return this.#members.filter(m => m.source !== Sources.TOKEN).map(m => m.id) + } + + /** + * @param {string | ObjectId} userId + * @return {typeof PrivilegeLevels[keyof PrivilegeLevels]} + */ + privilegeLevelForUser(userId) { + if (!userId) return PrivilegeLevels.NONE + for (const member of this.#members) { + if (member.id === userId.toString()) { + return member.privilegeLevel + } + } + return PrivilegeLevels.NONE + } + + /** + * @param {string | ObjectId} userId + * @return {boolean} + */ + isUserTokenMember(userId) { + if (!userId) return false + for (const member of this.#members) { + if (member.id === userId.toString() && member.source === Sources.TOKEN) { + return true + } + } + return false + } + + /** + * @param {string | ObjectId} userId + * @return {boolean} + */ + isUserInvitedMember(userId) { + if (!userId) return false + for (const member of this.#members) { + if (member.id === userId.toString() && member.source !== Sources.TOKEN) { + return true + } + } + return false + } + + /** + * @param {string | ObjectId} userId + * @return {boolean} + */ + isUserInvitedReadWriteMember(userId) { + for (const member of this.#members) { + if ( + member.id.toString() === userId.toString() && + member.source !== Sources.TOKEN && + member.privilegeLevel === PrivilegeLevels.READ_AND_WRITE + ) { + return true + } + } + return false + } + + /** + * Counts invited members with editor or reviewer roles + * @return {number} + */ + countInvitedEditCollaborators() { + return this.#members.filter( + m => + m.source === Sources.INVITE && + (m.privilegeLevel === PrivilegeLevels.READ_AND_WRITE || + m.privilegeLevel === PrivilegeLevels.REVIEW) + ).length + } + + /** + * Counts invited members that are readonly pending editors or pending reviewers + * @return {number} + */ + countInvitedPendingEditors() { + return this.#members.filter( + m => + m.source === Sources.INVITE && + m.privilegeLevel === PrivilegeLevels.READ_ONLY && + (m.pendingEditor || m.pendingReviewer) + ).length + } +} + +module.exports.ProjectAccess = ProjectAccess + +async function getProjectAccess(projectId) { const project = await ProjectGetter.promises.getProject(projectId, { owner_ref: 1, collaberator_refs: 1, @@ -65,34 +258,19 @@ async function getMemberIdsWithPrivilegeLevels(projectId) { if (!project) { throw new Errors.NotFoundError(`no project found with id ${projectId}`) } - const memberIds = _getMemberIdsWithPrivilegeLevelsFromFields( - project.owner_ref, - project.collaberator_refs, - project.readOnly_refs, - project.tokenAccessReadAndWrite_refs, - project.tokenAccessReadOnly_refs, - project.publicAccesLevel, - project.pendingEditor_refs, - project.reviewer_refs, - project.pendingReviewer_refs - ) - return memberIds + return new ProjectAccess(project) +} + +async function getMemberIdsWithPrivilegeLevels(projectId) { + return (await getProjectAccess(projectId)).allMembers() } async function getMemberIds(projectId) { - const members = await getMemberIdsWithPrivilegeLevels(projectId) - return members.map(m => m.id) + return (await getProjectAccess(projectId)).memberIds() } async function getInvitedMemberIds(projectId) { - const members = await getMemberIdsWithPrivilegeLevels(projectId) - return members.filter(m => m.source !== Sources.TOKEN).map(m => m.id) -} - -async function getInvitedMembersWithPrivilegeLevels(projectId) { - let members = await getMemberIdsWithPrivilegeLevels(projectId) - members = members.filter(m => m.source !== Sources.TOKEN) - return _loadMembers(members) + return (await getProjectAccess(projectId)).invitedMemberIds() } async function getInvitedMembersWithPrivilegeLevelsFromFields( @@ -107,7 +285,7 @@ async function getInvitedMembersWithPrivilegeLevelsFromFields( readOnlyIds, [], [], - null, + 'private', [], reviewerIds, [] @@ -121,69 +299,31 @@ async function getMemberIdPrivilegeLevel(userId, projectId) { if (userId == null) { return PrivilegeLevels.NONE } - const members = await getMemberIdsWithPrivilegeLevels(projectId) - for (const member of members) { - if (member.id === userId.toString()) { - return member.privilegeLevel - } - } - return PrivilegeLevels.NONE + return (await getProjectAccess(projectId)).privilegeLevelForUser(userId) } async function getInvitedEditCollaboratorCount(projectId) { - // Counts invited members with editor or reviewer roles - const members = await getMemberIdsWithPrivilegeLevels(projectId) - return members.filter( - m => - m.source === Sources.INVITE && - (m.privilegeLevel === PrivilegeLevels.READ_AND_WRITE || - m.privilegeLevel === PrivilegeLevels.REVIEW) - ).length + return (await getProjectAccess(projectId)).countInvitedEditCollaborators() } async function getInvitedPendingEditorCount(projectId) { - // Only counts invited members that are readonly pending editors or pending - // reviewers - const members = await getMemberIdsWithPrivilegeLevels(projectId) - return members.filter( - m => - m.source === Sources.INVITE && - m.privilegeLevel === PrivilegeLevels.READ_ONLY && - (m.pendingEditor || m.pendingReviewer) - ).length + return (await getProjectAccess(projectId)).countInvitedPendingEditors() } async function isUserInvitedMemberOfProject(userId, projectId) { if (!userId) { return false } - const members = await getMemberIdsWithPrivilegeLevels(projectId) - for (const member of members) { - if ( - member.id.toString() === userId.toString() && - member.source !== Sources.TOKEN - ) { - return true - } - } - return false + return (await getProjectAccess(projectId)).isUserInvitedMember(userId) } async function isUserInvitedReadWriteMemberOfProject(userId, projectId) { if (!userId) { return false } - const members = await getMemberIdsWithPrivilegeLevels(projectId) - for (const member of members) { - if ( - member.id.toString() === userId.toString() && - member.source !== Sources.TOKEN && - member.privilegeLevel === PrivilegeLevels.READ_AND_WRITE - ) { - return true - } - } - return false + return (await getProjectAccess(projectId)).isUserInvitedReadWriteMember( + userId + ) } async function getPublicShareTokens(userId, projectId) { @@ -209,10 +349,13 @@ async function getPublicShareTokens(userId, projectId) { return null } + // @ts-ignore if (memberInfo.isOwner) { return memberInfo.tokens + // @ts-ignore } else if (memberInfo.hasTokenReadOnlyAccess) { return { + // @ts-ignore readOnly: memberInfo.tokens.readOnly, } } else { @@ -224,6 +367,7 @@ async function getPublicShareTokens(userId, projectId) { // excluding projects where the user is listed in the token access fields when // token access has been disabled. async function getProjectsUserIsMemberOf(userId, fields) { + // @ts-ignore const limit = pLimit(2) const [readAndWrite, review, readOnly, tokenReadAndWrite, tokenReadOnly] = await Promise.all([ @@ -274,10 +418,9 @@ async function dangerouslyGetAllProjectsUserIsMemberOf(userId, fields) { async function getAllInvitedMembers(projectId) { try { - const rawMembers = await getInvitedMembersWithPrivilegeLevels(projectId) - const { members } = - ProjectEditorHandler.buildOwnerAndMembersViews(rawMembers) - return members + const projectAccess = await getProjectAccess(projectId) + const invitedMembers = await projectAccess.loadInvitedMembers() + return invitedMembers.map(ProjectEditorHandler.buildUserModelView) } catch (err) { throw OError.tag(err, 'error getting members for project', { projectId }) } @@ -316,6 +459,19 @@ async function userIsReadWriteTokenMember(userId, projectId) { return project != null } +/** + * @param {ObjectId} ownerId + * @param {ObjectId[]} collaboratorIds + * @param {ObjectId[]} readOnlyIds + * @param {ObjectId[]} tokenAccessIds + * @param {ObjectId[]} tokenAccessReadOnlyIds + * @param {typeof PublicAccessLevels[keyof PublicAccessLevels]} publicAccessLevel + * @param {ObjectId[]} pendingEditorIds + * @param {ObjectId[]} reviewerIds + * @param {ObjectId[]} pendingReviewerIds + * @return {ProjectMember[]} + * @private + */ function _getMemberIdsWithPrivilegeLevelsFromFields( ownerId, collaboratorIds, @@ -384,7 +540,13 @@ function _getMemberIdsWithPrivilegeLevelsFromFields( return members } +/** + * @param {ProjectMember[]} members + * @return {Promise} + * @private + */ async function _loadMembers(members) { + if (members.length === 0) return [] const userIds = Array.from(new Set(members.map(m => m.id))) const users = new Map() for (const user of await UserGetter.promises.getUsers(userIds, { diff --git a/services/web/app/src/Features/Collaborators/CollaboratorsHandler.js b/services/web/app/src/Features/Collaborators/CollaboratorsHandler.js index 96b4cd6e37..8b5b1bc3c2 100644 --- a/services/web/app/src/Features/Collaborators/CollaboratorsHandler.js +++ b/services/web/app/src/Features/Collaborators/CollaboratorsHandler.js @@ -161,6 +161,7 @@ async function addUserIdToProject( }) let level let existingUsers = project.collaberator_refs || [] + existingUsers = existingUsers.concat(project.reviewer_refs || []) existingUsers = existingUsers.concat(project.readOnly_refs || []) existingUsers = existingUsers.map(u => u.toString()) if (existingUsers.includes(userId.toString())) { diff --git a/services/web/app/src/Features/Collaborators/CollaboratorsInviteController.mjs b/services/web/app/src/Features/Collaborators/CollaboratorsInviteController.mjs index 4c2d911709..db853afac3 100644 --- a/services/web/app/src/Features/Collaborators/CollaboratorsInviteController.mjs +++ b/services/web/app/src/Features/Collaborators/CollaboratorsInviteController.mjs @@ -16,7 +16,6 @@ import ProjectAuditLogHandler from '../Project/ProjectAuditLogHandler.js' import Errors from '../Errors/Errors.js' import AuthenticationController from '../Authentication/AuthenticationController.js' import PrivilegeLevels from '../Authorization/PrivilegeLevels.js' -import SplitTestHandler from '../SplitTests/SplitTestHandler.js' // This rate limiter allows a different number of requests depending on the // number of callaborators a user is allowed. This is implemented by providing @@ -246,9 +245,6 @@ async function viewInvite(req, res) { const projectId = req.params.Project_id const { token } = req.params - // Read split test assignment so that it's available for Pug to read - await SplitTestHandler.promises.getAssignment(req, res, 'core-pug-bs5') - const _renderInvalidPage = function () { res.status(404) logger.debug({ projectId }, 'invite not valid, rendering not-valid page') diff --git a/services/web/app/src/Features/Docstore/DocstoreManager.js b/services/web/app/src/Features/Docstore/DocstoreManager.js index 5fe0f27dc9..4074b90605 100644 --- a/services/web/app/src/Features/Docstore/DocstoreManager.js +++ b/services/web/app/src/Features/Docstore/DocstoreManager.js @@ -1,10 +1,11 @@ const { promisify } = require('util') -const { promisifyMultiResult } = require('@overleaf/promise-utils') +const { promisifyMultiResult, callbackify } = require('@overleaf/promise-utils') const request = require('request').defaults({ jar: false }) const OError = require('@overleaf/o-error') const logger = require('@overleaf/logger') const settings = require('@overleaf/settings') const Errors = require('../Errors/Errors') +const { fetchJson } = require('@overleaf/fetch-utils') const TIMEOUT = 30 * 1000 // request timeout @@ -86,6 +87,22 @@ function getAllDeletedDocs(projectId, callback) { }) } +/** + * @param {string} projectId + */ +async function getCommentThreadIds(projectId) { + const url = `${settings.apis.docstore.url}/project/${projectId}/comment-thread-ids` + return fetchJson(url, { signal: AbortSignal.timeout(TIMEOUT) }) +} + +/** + * @param {string} projectId + */ +async function getTrackedChangesUserIds(projectId) { + const url = `${settings.apis.docstore.url}/project/${projectId}/tracked-changes-user-ids` + return fetchJson(url, { signal: AbortSignal.timeout(TIMEOUT) }) +} + /** * @param {string} projectId * @param {Callback} callback @@ -292,6 +309,8 @@ module.exports = { getAllDeletedDocs, getAllRanges, getDoc, + getCommentThreadIds: callbackify(getCommentThreadIds), + getTrackedChangesUserIds: callbackify(getTrackedChangesUserIds), isDocDeleted, updateDoc, projectHasRanges, @@ -304,6 +323,8 @@ module.exports = { getAllDeletedDocs: promisify(getAllDeletedDocs), getAllRanges: promisify(getAllRanges), getDoc: promisifyMultiResult(getDoc, ['lines', 'rev', 'version', 'ranges']), + getCommentThreadIds, + getTrackedChangesUserIds, isDocDeleted: promisify(isDocDeleted), updateDoc: promisifyMultiResult(updateDoc, ['modified', 'rev']), projectHasRanges: promisify(projectHasRanges), diff --git a/services/web/app/src/Features/Editor/EditorHttpController.js b/services/web/app/src/Features/Editor/EditorHttpController.js index 8128a95b26..f44b57f069 100644 --- a/services/web/app/src/Features/Editor/EditorHttpController.js +++ b/services/web/app/src/Features/Editor/EditorHttpController.js @@ -4,14 +4,13 @@ const ProjectGetter = require('../Project/ProjectGetter') const AuthorizationManager = require('../Authorization/AuthorizationManager') const ProjectEditorHandler = require('../Project/ProjectEditorHandler') const Metrics = require('@overleaf/metrics') -const CollaboratorsGetter = require('../Collaborators/CollaboratorsGetter') const CollaboratorsInviteGetter = require('../Collaborators/CollaboratorsInviteGetter') -const CollaboratorsHandler = require('../Collaborators/CollaboratorsHandler') const PrivilegeLevels = require('../Authorization/PrivilegeLevels') const SessionManager = require('../Authentication/SessionManager') const Errors = require('../Errors/Errors') const { expressify } = require('@overleaf/promise-utils') const Settings = require('@overleaf/settings') +const { ProjectAccess } = require('../Collaborators/CollaboratorsGetter') module.exports = { joinProject: expressify(joinProject), @@ -43,12 +42,6 @@ async function joinProject(req, res, next) { if (!project) { return res.sendStatus(403) } - // Hide sensitive data if the user is restricted - if (isRestrictedUser) { - project.owner = { _id: project.owner._id } - project.members = [] - project.invites = [] - } // Only show the 'renamed or deleted' message once if (project.deletedByExternalDataSource) { await ProjectDeleter.promises.unmarkAsDeletedByExternalSource(projectId) @@ -75,42 +68,43 @@ async function _buildJoinProjectView(req, projectId, userId) { if (project == null) { throw new Errors.NotFoundError('project not found') } - const members = - await CollaboratorsGetter.promises.getInvitedMembersWithPrivilegeLevels( - projectId - ) + const projectAccess = new ProjectAccess(project) const token = req.body.anonymousAccessToken const privilegeLevel = - await AuthorizationManager.promises.getPrivilegeLevelForProject( + await AuthorizationManager.promises.getPrivilegeLevelForProjectWithProjectAccess( userId, projectId, - token + token, + projectAccess ) if (privilegeLevel == null || privilegeLevel === PrivilegeLevels.NONE) { return { project: null, privilegeLevel: null, isRestrictedUser: false } } - const invites = - await CollaboratorsInviteGetter.promises.getAllInvites(projectId) - const isTokenMember = await CollaboratorsHandler.promises.userIsTokenMember( - userId, - projectId - ) - const isInvitedMember = - await CollaboratorsGetter.promises.isUserInvitedMemberOfProject( - userId, - projectId - ) + const isTokenMember = projectAccess.isUserTokenMember(userId) + const isInvitedMember = projectAccess.isUserInvitedMember(userId) const isRestrictedUser = AuthorizationManager.isRestrictedUser( userId, privilegeLevel, isTokenMember, isInvitedMember ) + let ownerMember + let members = [] + let invites = [] + if (isRestrictedUser) { + ownerMember = await projectAccess.loadOwner() + } else { + ;({ ownerMember, members } = + await projectAccess.loadOwnerAndInvitedMembers()) + invites = await CollaboratorsInviteGetter.promises.getAllInvites(projectId) + } return { project: ProjectEditorHandler.buildProjectModelView( project, + ownerMember, members, - invites + invites, + isRestrictedUser ), privilegeLevel, isTokenMember, diff --git a/services/web/app/src/Features/Email/EmailBuilder.js b/services/web/app/src/Features/Email/EmailBuilder.js index 01565201ac..4741838b15 100644 --- a/services/web/app/src/Features/Email/EmailBuilder.js +++ b/services/web/app/src/Features/Email/EmailBuilder.js @@ -949,6 +949,33 @@ templates.welcomeWithoutCTA = NoCTAEmailTemplate({ }, }) +templates.removeGroupMember = NoCTAEmailTemplate({ + subject(opts) { + return `Your ${settings.appName} account has been removed from ${opts.adminName}’s group` + }, + title(opts) { + return `Your ${settings.appName} account has been removed from ${opts.adminName}’s group` + }, + greeting() { + return '' + }, + message() { + const passwordResetUrl = `${settings.siteUrl}/user/password/reset` + + return [ + 'Don’t worry, your account and projects are still accessible. But there are a few changes to be aware of:', + '
    ' + + `
  • Your account will have reverted to a free ${settings.appName} plan.
  • `, + `
  • Any project collaborators have been set to read-only (you can invite one collaborator per project on the free plan).
  • `, + `
  • If you previously logged in via SSO, you’ll need to set a password to access your account.
  • ` + + '
', + `If you think this has been done in error, please contact your group admin.`, + `Thanks!`, + `Team ${settings.appName}`, + ] + }, +}) + function _formatUserNameAndEmail(user, placeholder) { if (user.first_name && user.last_name) { const fullName = `${user.first_name} ${user.last_name}` diff --git a/services/web/app/src/Features/History/RestoreManager.js b/services/web/app/src/Features/History/RestoreManager.js index 8c73695eed..16ef2024f6 100644 --- a/services/web/app/src/Features/History/RestoreManager.js +++ b/services/web/app/src/Features/History/RestoreManager.js @@ -18,6 +18,12 @@ const OError = require('@overleaf/o-error') const ProjectGetter = require('../Project/ProjectGetter') const ProjectEntityHandler = require('../Project/ProjectEntityHandler') +async function getCommentThreadIds(projectId) { + await DocumentUpdaterHandler.promises.flushProjectToMongo(projectId) + const raw = await DocstoreManager.promises.getCommentThreadIds(projectId) + return new Map(Object.entries(raw).map(([doc, ids]) => [doc, new Set(ids)])) +} + const RestoreManager = { async restoreFileFromV2(userId, projectId, version, pathname) { const fsPath = await RestoreManager._writeFileVersionToDisk( @@ -52,6 +58,25 @@ const RestoreManager = { }, async revertFile(userId, projectId, version, pathname, options = {}) { + const threadIds = await getCommentThreadIds(projectId) + return await RestoreManager._revertSingleFile( + userId, + projectId, + version, + pathname, + threadIds, + options + ) + }, + + async _revertSingleFile( + userId, + projectId, + version, + pathname, + threadIds, + options = {} + ) { const project = await ProjectGetter.promises.getProject(projectId, { overleaf: true, }) @@ -115,6 +140,7 @@ const RestoreManager = { origin, userId ) + threadIds.delete(file.element._id.toString()) } const { metadata } = await RestoreManager._getMetadataFromHistory( @@ -154,22 +180,12 @@ const RestoreManager = { const documentCommentIds = new Set( ranges.comments?.map(({ op: { t } }) => t) ) - - await DocumentUpdaterHandler.promises.flushProjectToMongo(projectId) - - const docsWithRanges = - await DocstoreManager.promises.getAllRanges(projectId) - - const nonOrphanedThreadIds = new Set() - for (const { ranges } of docsWithRanges) { - for (const comment of ranges.comments ?? []) { - nonOrphanedThreadIds.add(comment.op.t) + const commentIdsToDuplicate = Array.from(documentCommentIds).filter(id => { + for (const ids of threadIds.values()) { + if (ids.has(id)) return true } - } - - const commentIdsToDuplicate = Array.from(documentCommentIds).filter(id => - nonOrphanedThreadIds.has(id) - ) + return false + }) const newRanges = { changes: ranges.changes, comments: [] } @@ -191,6 +207,7 @@ const RestoreManager = { continue } // We have a new id for this comment thread + comment.id = result.duplicateId comment.op.t = result.duplicateId } newRanges.comments.push(comment) @@ -231,8 +248,6 @@ const RestoreManager = { delete threadData.resolved_by_user_id delete threadData.resolved_at } - // remove the resolved property from the comment range as the chat service is synced at this point - delete commentRange.op.resolved } await ChatManager.promises.injectUserInfoIntoThreads(newCommentThreadData) @@ -259,6 +274,11 @@ const RestoreManager = { origin, userId ) + // For revertProject: The next doc that gets reverted will need to duplicate all the threads seen here. + threadIds.set( + _id.toString(), + new Set(newRanges.comments.map(({ op: { t } }) => t)) + ) return { _id, @@ -321,11 +341,17 @@ const RestoreManager = { version, timestamp: new Date(updateAtVersion.meta.end_ts).toISOString(), } + const threadIds = await getCommentThreadIds(projectId) for (const pathname of pathsAtPastVersion) { - await RestoreManager.revertFile(userId, projectId, version, pathname, { - origin, - }) + await RestoreManager._revertSingleFile( + userId, + projectId, + version, + pathname, + threadIds, + { origin } + ) } const entitiesAtLiveVersion = diff --git a/services/web/app/src/Features/Notifications/NotificationsController.mjs b/services/web/app/src/Features/Notifications/NotificationsController.mjs index ae1d9208f3..35b5f0a677 100644 --- a/services/web/app/src/Features/Notifications/NotificationsController.mjs +++ b/services/web/app/src/Features/Notifications/NotificationsController.mjs @@ -33,4 +33,26 @@ export default { res.sendStatus(200) ) }, + + getNotification(req, res, next) { + const userId = SessionManager.getLoggedInUserId(req.session) + const { notificationId } = req.params + NotificationsHandler.getUserNotifications( + userId, + function (err, unreadNotifications) { + if (err) { + return next(err) + } + const notification = unreadNotifications.find( + n => n._id === notificationId + ) + + if (!notification) { + return res.status(404).end() + } + + res.json(notification) + } + ) + }, } diff --git a/services/web/app/src/Features/PasswordReset/PasswordResetController.mjs b/services/web/app/src/Features/PasswordReset/PasswordResetController.mjs index b7fc2da9c8..771782c302 100644 --- a/services/web/app/src/Features/PasswordReset/PasswordResetController.mjs +++ b/services/web/app/src/Features/PasswordReset/PasswordResetController.mjs @@ -119,7 +119,11 @@ async function requestReset(req, res, next) { OError.tag(err, 'failed to generate and email password reset token', { email, }) - if (err.message === 'user does not have permission for change-password') { + + if ( + err.message === + 'user does not have one or more permissions within change-password' + ) { return res.status(403).json({ message: { key: 'no-password-allowed-due-to-sso', diff --git a/services/web/app/src/Features/PasswordReset/PasswordResetHandler.mjs b/services/web/app/src/Features/PasswordReset/PasswordResetHandler.mjs index 094f18b95f..2c1aefe6a6 100644 --- a/services/web/app/src/Features/PasswordReset/PasswordResetHandler.mjs +++ b/services/web/app/src/Features/PasswordReset/PasswordResetHandler.mjs @@ -72,6 +72,7 @@ async function getUserForPasswordResetToken(token) { 'overleaf.id': 1, email: 1, must_reconfirm: 1, + hashedPassword: 1, }) await assertUserPermissions(user, ['change-password']) diff --git a/services/web/app/src/Features/Project/ProjectController.js b/services/web/app/src/Features/Project/ProjectController.js index ec128ffd54..6744fb8d78 100644 --- a/services/web/app/src/Features/Project/ProjectController.js +++ b/services/web/app/src/Features/Project/ProjectController.js @@ -14,6 +14,7 @@ const ProjectHelper = require('./ProjectHelper') const metrics = require('@overleaf/metrics') const { User } = require('../../models/User') const SubscriptionLocator = require('../Subscription/SubscriptionLocator') +const { isPaidSubscription } = require('../Subscription/SubscriptionHelper') const LimitationsManager = require('../Subscription/LimitationsManager') const Settings = require('@overleaf/settings') const AuthorizationManager = require('../Authorization/AuthorizationManager') @@ -352,6 +353,7 @@ const _ProjectController = { 'overleaf-assist-bundle', 'word-count-client', 'editor-popup-ux-survey', + 'new-editor-error-logs-redesign', ].filter(Boolean) const getUserValues = async userId => @@ -654,17 +656,12 @@ const _ProjectController = { } } - const hasNonRecurlySubscription = - subscription && !subscription.recurlySubscription_id + const hasPaidSubscription = isPaidSubscription(subscription) const hasManuallyCollectedSubscription = subscription?.collectionMethod === 'manual' - const canPurchaseAddons = !( - hasNonRecurlySubscription || hasManuallyCollectedSubscription - ) const assistantDisabled = user.aiErrorAssistant?.enabled === false // the assistant has been manually disabled by the user const canUseErrorAssistant = - (user.features?.aiErrorAssistant || canPurchaseAddons) && - !assistantDisabled + !hasManuallyCollectedSubscription && !assistantDisabled let featureUsage = {} @@ -731,12 +728,11 @@ const _ProjectController = { ? 'project/ide-react-detached' : 'project/ide-react' - let chatEnabled - if (Features.hasFeature('saas')) { - chatEnabled = - Features.hasFeature('chat') && req.capabilitySet.has('chat') - } else { - chatEnabled = Features.hasFeature('chat') + const capabilities = [...req.capabilitySet] + + // make sure the capability is added to CE/SP when the feature is enabled + if (!Features.hasFeature('saas') && Features.hasFeature('chat')) { + capabilities.push('chat') } const isOverleafAssistBundleEnabled = @@ -768,6 +764,12 @@ const _ProjectController = { isOverleafAssistBundleEnabled && (await ProjectController._getAddonPrices(req, res)) + const reducedTimeoutWarning = + await SplitTestHandler.promises.getAssignmentForUser( + project.owner_ref, + '10s-timeout-warning' + ) + let planCode = subscription?.planCode if (!planCode && !userInNonIndividualSub) { planCode = 'personal' @@ -791,7 +793,7 @@ const _ProjectController = { referal_id: user.referal_id, signUpDate: user.signUpDate, allowedFreeTrial, - hasRecurlySubscription: subscription?.recurlySubscription_id != null, + hasPaidSubscription, featureSwitches: user.featureSwitches, features: fullFeatureSet, featureUsage, @@ -824,6 +826,7 @@ const _ProjectController = { lineHeight: user.ace.lineHeight || 'normal', overallTheme: user.ace.overallTheme, mathPreview: user.ace.mathPreview, + breadcrumbs: user.ace.breadcrumbs, referencesSearchMode: user.ace.referencesSearchMode, enableNewEditor: user.ace.enableNewEditor ?? true, }, @@ -837,7 +840,7 @@ const _ProjectController = { isTokenMember, isInvitedMember ), - chatEnabled, + capabilities, projectHistoryBlobsEnabled: Features.hasFeature( 'project-history-blobs' ), @@ -881,6 +884,10 @@ const _ProjectController = { paywallPlans, customerIoEnabled, addonPrices, + compileSettings: { + reducedTimeoutWarning: reducedTimeoutWarning?.variant, + compileTimeout: ownerFeatures?.compileTimeout, + }, }) timer.done() } catch (err) { diff --git a/services/web/app/src/Features/Project/ProjectDeleter.js b/services/web/app/src/Features/Project/ProjectDeleter.js index e5764bab86..b81281e319 100644 --- a/services/web/app/src/Features/Project/ProjectDeleter.js +++ b/services/web/app/src/Features/Project/ProjectDeleter.js @@ -106,8 +106,24 @@ async function expireDeletedProjectsAfterDuration() { deletedProject => deletedProject.deleterData.deletedProjectId ) ) - for (const projectId of projectIds) { - await expireDeletedProject(projectId) + logger.info( + { projectCount: projectIds.length }, + 'expiring batch of deleted projects' + ) + try { + for (const projectId of projectIds) { + await expireDeletedProject(projectId) + } + logger.info( + { projectCount: projectIds.length }, + 'batch of deleted projects expired successfully' + ) + } catch (error) { + logger.warn( + { error }, + 'something went wrong expiring batch of deleted projects' + ) + throw error } } @@ -276,12 +292,15 @@ async function deleteProject(projectId, options = {}) { ) await Project.deleteOne({ _id: projectId }).exec() + + logger.info( + { projectId, userId: project.owner_ref }, + 'successfully deleted project' + ) } catch (err) { logger.warn({ err }, 'problem deleting project') throw err } - - logger.debug({ projectId }, 'successfully deleted project') } async function undeleteProject(projectId, options = {}) { @@ -335,17 +354,22 @@ async function undeleteProject(projectId, options = {}) { async function expireDeletedProject(projectId) { try { + logger.info({ projectId }, 'expiring deleted project') const activeProject = await Project.findById(projectId).exec() if (activeProject) { // That project is active. The deleted project record might be there // because of an incomplete delete or undelete operation. Clean it up and // return. + logger.info( + { projectId }, + 'deleted project record found but project is active' + ) await DeletedProject.deleteOne({ 'deleterData.deletedProjectId': projectId, }) - await ProjectAuditLogEntry.deleteMany({ projectId }) return } + const deletedProject = await DeletedProject.findOne({ 'deleterData.deletedProjectId': projectId, }).exec() @@ -361,12 +385,14 @@ async function expireDeletedProject(projectId) { ) return } - + const userId = deletedProject.deletedProjectOwnerId const historyId = deletedProject.project.overleaf && deletedProject.project.overleaf.history && deletedProject.project.overleaf.history.id + logger.info({ projectId, userId }, 'destroying expired project data') + await Promise.all([ DocstoreManager.promises.destroyProject(deletedProject.project._id), HistoryManager.promises.deleteProject( @@ -379,6 +405,10 @@ async function expireDeletedProject(projectId) { Modules.promises.hooks.fire('projectExpired', deletedProject.project._id), ]) + logger.info( + { projectId, userId }, + 'redacting PII from the deleted project record' + ) await DeletedProject.updateOne( { _id: deletedProject._id, @@ -390,6 +420,7 @@ async function expireDeletedProject(projectId) { }, } ).exec() + logger.info({ projectId, userId }, 'expired deleted project successfully') } catch (error) { logger.warn({ projectId, error }, 'error expiring deleted project') throw error diff --git a/services/web/app/src/Features/Project/ProjectEditorHandler.js b/services/web/app/src/Features/Project/ProjectEditorHandler.js index 05e5beba09..3d3d300e66 100644 --- a/services/web/app/src/Features/Project/ProjectEditorHandler.js +++ b/services/web/app/src/Features/Project/ProjectEditorHandler.js @@ -6,8 +6,13 @@ const Features = require('../../infrastructure/Features') module.exports = ProjectEditorHandler = { trackChangesAvailable: false, - buildProjectModelView(project, members, invites) { - let owner, ownerFeatures + buildProjectModelView( + project, + ownerMember, + members, + invites, + isRestrictedUser + ) { const result = { _id: project._id, name: project.name, @@ -20,20 +25,23 @@ module.exports = ProjectEditorHandler = { description: project.description, spellCheckLanguage: project.spellCheckLanguage, deletedByExternalDataSource: project.deletedByExternalDataSource || false, - members: [], - invites: this.buildInvitesView(invites), imageName: project.imageName != null ? Path.basename(project.imageName) : undefined, } - ;({ owner, ownerFeatures, members } = - this.buildOwnerAndMembersViews(members)) - result.owner = owner - result.members = members + if (isRestrictedUser) { + result.owner = { _id: project.owner_ref } + result.members = [] + result.invites = [] + } else { + result.owner = this.buildUserModelView(ownerMember) + result.members = members.map(this.buildUserModelView) + result.invites = this.buildInvitesView(invites) + } - result.features = _.defaults(ownerFeatures || {}, { + result.features = _.defaults(ownerMember?.user?.features || {}, { collaborators: -1, // Infinite versioning: false, dropbox: false, @@ -62,25 +70,6 @@ module.exports = ProjectEditorHandler = { return result }, - buildOwnerAndMembersViews(members) { - let owner = null - let ownerFeatures = null - const filteredMembers = [] - for (const member of members || []) { - if (member.privilegeLevel === 'owner') { - ownerFeatures = member.user.features - owner = this.buildUserModelView(member) - } else { - filteredMembers.push(this.buildUserModelView(member)) - } - } - return { - owner, - ownerFeatures, - members: filteredMembers, - } - }, - buildUserModelView(member) { const user = member.user return { diff --git a/services/web/app/src/Features/Project/ProjectListController.mjs b/services/web/app/src/Features/Project/ProjectListController.mjs index c62396e153..ab2b0e3082 100644 --- a/services/web/app/src/Features/Project/ProjectListController.mjs +++ b/services/web/app/src/Features/Project/ProjectListController.mjs @@ -26,6 +26,7 @@ import GeoIpLookup from '../../infrastructure/GeoIpLookup.js' import SplitTestHandler from '../SplitTests/SplitTestHandler.js' import SplitTestSessionHandler from '../SplitTests/SplitTestSessionHandler.js' import TutorialHandler from '../Tutorial/TutorialHandler.js' +import SubscriptionHelper from '../Subscription/SubscriptionHelper.js' /** * @import { GetProjectsRequest, GetProjectsResponse, AllUsersProjects, MongoProject } from "./types" @@ -388,13 +389,13 @@ async function projectListPage(req, res, next) { } } - let hasIndividualRecurlySubscription = false + let hasIndividualPaidSubscription = false try { - hasIndividualRecurlySubscription = - usersIndividualSubscription?.groupPlan === false && - usersIndividualSubscription?.recurlyStatus?.state !== 'canceled' && - usersIndividualSubscription?.recurlySubscription_id !== '' + hasIndividualPaidSubscription = + SubscriptionHelper.isIndividualActivePaidSubscription( + usersIndividualSubscription + ) } catch (error) { logger.error({ err: error }, 'Failed to get individual subscription') } @@ -408,6 +409,15 @@ async function projectListPage(req, res, next) { 'papers-notification-banner' ) + const customerIoEnabled = + await SplitTestHandler.promises.hasUserBeenAssignedToVariant( + req, + userId, + 'customer-io-trial-conversion', + 'enabled', + true + ) + res.render('project/list-react', { title: 'your_projects', usersBestSubscription, @@ -437,8 +447,9 @@ async function projectListPage(req, res, next) { groupId: subscription._id, groupName: subscription.teamName, })), - hasIndividualRecurlySubscription, + hasIndividualPaidSubscription, userRestrictions: Array.from(req.userRestrictions || []), + customerIoEnabled, }) } diff --git a/services/web/app/src/Features/Subscription/Errors.js b/services/web/app/src/Features/Subscription/Errors.js index cbcd0014f7..9ebb08c6db 100644 --- a/services/web/app/src/Features/Subscription/Errors.js +++ b/services/web/app/src/Features/Subscription/Errors.js @@ -26,10 +26,17 @@ class SubtotalLimitExceededError extends OError {} class HasPastDueInvoiceError extends OError {} +class PaymentActionRequiredError extends OError { + constructor(info) { + super('Payment action required', info) + } +} + module.exports = { RecurlyTransactionError, DuplicateAddOnError, AddOnNotPresentError, + PaymentActionRequiredError, MissingBillingInfoError, ManuallyCollectedError, PendingChangeError, diff --git a/services/web/app/src/Features/Subscription/FeaturesUpdater.js b/services/web/app/src/Features/Subscription/FeaturesUpdater.js index a8c27f705f..16413c501c 100644 --- a/services/web/app/src/Features/Subscription/FeaturesUpdater.js +++ b/services/web/app/src/Features/Subscription/FeaturesUpdater.js @@ -3,6 +3,7 @@ const { callbackify } = require('util') const { callbackifyMultiResult } = require('@overleaf/promise-utils') const PlansLocator = require('./PlansLocator') const SubscriptionLocator = require('./SubscriptionLocator') +const SubscriptionHelper = require('./SubscriptionHelper') const UserFeaturesUpdater = require('./UserFeaturesUpdater') const FeaturesHelper = require('./FeaturesHelper') const Settings = require('@overleaf/settings') @@ -117,7 +118,10 @@ async function computeFeatures(userId) { async function _getIndividualFeatures(userId) { const subscription = await SubscriptionLocator.promises.getUsersSubscription(userId) - if (subscription == null || subscription?.recurlyStatus?.state === 'paused') { + if ( + subscription == null || + SubscriptionHelper.getPaidSubscriptionState(subscription) === 'paused' + ) { return {} } diff --git a/services/web/app/src/Features/Subscription/PaymentProviderEntities.js b/services/web/app/src/Features/Subscription/PaymentProviderEntities.js index 6fe8638389..21bd504caf 100644 --- a/services/web/app/src/Features/Subscription/PaymentProviderEntities.js +++ b/services/web/app/src/Features/Subscription/PaymentProviderEntities.js @@ -8,11 +8,13 @@ const OError = require('@overleaf/o-error') const { DuplicateAddOnError, AddOnNotPresentError } = require('./Errors') const PlansLocator = require('./PlansLocator') -const SubscriptionHelper = require('./SubscriptionHelper') -const AI_ADD_ON_CODE = 'assistant' +let SubscriptionHelper = null // Work around circular import (loaded at the bottom of the file) + const MEMBERS_LIMIT_ADD_ON_CODE = 'additional-license' -const STANDALONE_AI_ADD_ON_CODES = ['assistant', 'assistant-annual'] +const AI_ASSIST_STANDALONE_MONTHLY_PLAN_CODE = 'assistant' +const AI_ASSIST_STANDALONE_ANNUAL_PLAN_CODE = 'assistant-annual' +const AI_ADD_ON_CODE = 'assistant' class PaymentProviderSubscription { /** @@ -132,9 +134,11 @@ class PaymentProviderSubscription { if (newPlan == null) { throw new OError('Unable to find plan in settings', { planCode }) } + const isInTrial = SubscriptionHelper.isInTrial(this.trialPeriodEnd) const shouldChangeAtTermEnd = SubscriptionHelper.shouldPlanChangeAtTermEnd( currentPlan, - newPlan + newPlan, + isInTrial ) const changeRequest = new PaymentProviderSubscriptionChangeRequest({ @@ -248,9 +252,10 @@ class PaymentProviderSubscription { const addOnUpdates = this.addOns .filter(addOn => addOn.code !== code) .map(addOn => addOn.toAddOnUpdate()) + const isInTrial = SubscriptionHelper.isInTrial(this.trialPeriodEnd) return new PaymentProviderSubscriptionChangeRequest({ subscription: this, - timeframe: 'term_end', + timeframe: isInTrial ? 'now' : 'term_end', addOnUpdates, }) } @@ -587,7 +592,10 @@ class PaymentProviderAccount { * @param {string} planCode */ function isStandaloneAiAddOnPlanCode(planCode) { - return STANDALONE_AI_ADD_ON_CODES.includes(planCode) + return ( + planCode === AI_ASSIST_STANDALONE_MONTHLY_PLAN_CODE || + planCode === AI_ASSIST_STANDALONE_ANNUAL_PLAN_CODE + ) } /** @@ -618,7 +626,8 @@ function subscriptionChangeIsAiAssistUpgrade(subscriptionChange) { module.exports = { AI_ADD_ON_CODE, MEMBERS_LIMIT_ADD_ON_CODE, - STANDALONE_AI_ADD_ON_CODES, + AI_ASSIST_STANDALONE_MONTHLY_PLAN_CODE, + AI_ASSIST_STANDALONE_ANNUAL_PLAN_CODE, PaymentProviderSubscription, PaymentProviderSubscriptionAddOn, PaymentProviderSubscriptionChange, @@ -636,3 +645,5 @@ module.exports = { subscriptionChangeIsAiAssistUpgrade, PaymentProviderImmediateCharge, } + +SubscriptionHelper = require('./SubscriptionHelper') diff --git a/services/web/app/src/Features/Subscription/PlansLocator.js b/services/web/app/src/Features/Subscription/PlansLocator.js index 24343e1109..67d2f31c52 100644 --- a/services/web/app/src/Features/Subscription/PlansLocator.js +++ b/services/web/app/src/Features/Subscription/PlansLocator.js @@ -1,10 +1,15 @@ -// TODO: This file may be deleted when Stripe is fully implemented to all users, so, consider deleting it +// @ts-check + const Settings = require('@overleaf/settings') const logger = require('@overleaf/logger') /** * @typedef {import('../../../../types/subscription/plan').RecurlyPlanCode} RecurlyPlanCode * @typedef {import('../../../../types/subscription/plan').StripeLookupKey} StripeLookupKey + * @typedef {import('../../../../types/subscription/plan').StripeBaseLookupKey} StripeBaseLookupKey + * @typedef {import('../../../../types/subscription/plan').Plan} Plan + * @typedef {import('../../../../types/subscription/currency').StripeCurrencyCode} StripeCurrencyCode + * @typedef {import('stripe').Stripe.Price.Recurring.Interval} BillingCycleInterval */ function ensurePlansAreSetupCorrectly() { @@ -24,61 +29,105 @@ function ensurePlansAreSetupCorrectly() { }) } -const recurlyPlanCodeToStripeLookupKey = { - 'professional-annual': 'professional_annual', - professional: 'professional_monthly', - professional_free_trial_7_days: 'professional_monthly', - 'collaborator-annual': 'standard_annual', +/** + * @type {Record} + */ +const recurlyCodeToStripeBaseLookupKey = { collaborator: 'standard_monthly', + 'collaborator-annual': 'standard_annual', collaborator_free_trial_7_days: 'standard_monthly', - 'student-annual': 'student_annual', + + professional: 'professional_monthly', + 'professional-annual': 'professional_annual', + professional_free_trial_7_days: 'professional_monthly', + student: 'student_monthly', + 'student-annual': 'student_annual', student_free_trial_7_days: 'student_monthly', - group_professional: 'group_professional_enterprise', - group_professional_educational: 'group_professional_educational', + + // TODO: change all group plans' lookup_keys to match the UK account after they have been added group_collaborator: 'group_standard_enterprise', group_collaborator_educational: 'group_standard_educational', - assistant_annual: 'error_assist_annual', - assistant: 'error_assist_monthly', + group_professional: 'group_professional_enterprise', + group_professional_educational: 'group_professional_educational', + + assistant: 'assistant_monthly', + 'assistant-annual': 'assistant_annual', +} + +const LATEST_STRIPE_LOOKUP_KEY_VERSION = 'jun2025' + +/** + * Build the Stripe lookup key, will be in this format: + * `${productCode}_${billingInterval}_${latestVersion}_${currency}` + * (for example: 'assistant_annual_jun2025_clp') + * + * @param {RecurlyPlanCode} recurlyCode + * @param {StripeCurrencyCode} currency + * @param {BillingCycleInterval} [billingCycleInterval] -- needed for handling 'assistant' add-on + * @returns {StripeLookupKey|null} + */ +function buildStripeLookupKey(recurlyCode, currency, billingCycleInterval) { + let stripeBaseLookupKey = recurlyCodeToStripeBaseLookupKey[recurlyCode] + + // Recurly always uses 'assistant' as the code regardless of the subscription duration + if (recurlyCode === 'assistant' && billingCycleInterval) { + if (billingCycleInterval === 'month') { + stripeBaseLookupKey = 'assistant_monthly' + } + if (billingCycleInterval === 'year') { + stripeBaseLookupKey = 'assistant_annual' + } + } + + if (stripeBaseLookupKey == null) { + return null + } + + return `${stripeBaseLookupKey}_${LATEST_STRIPE_LOOKUP_KEY_VERSION}_${currency}` } /** - * - * @param {RecurlyPlanCode} recurlyPlanCode - * @returns {StripeLookupKey} + * @typedef {{ planType: 'individual' | 'group' | 'student' | null, period: 'annual' | 'monthly' }} PlanTypeAndPeriod + * @type {Record} */ -function mapRecurlyPlanCodeToStripeLookupKey(recurlyPlanCode) { - return recurlyPlanCodeToStripeLookupKey[recurlyPlanCode] -} - const recurlyPlanCodeToPlanTypeAndPeriod = { collaborator: { planType: 'individual', period: 'monthly' }, - collaborator_free_trial_7_days: { planType: 'individual', period: 'monthly' }, 'collaborator-annual': { planType: 'individual', period: 'annual' }, + collaborator_free_trial_7_days: { planType: 'individual', period: 'monthly' }, + professional: { planType: 'individual', period: 'monthly' }, + 'professional-annual': { planType: 'individual', period: 'annual' }, professional_free_trial_7_days: { planType: 'individual', period: 'monthly', }, - 'professional-annual': { planType: 'individual', period: 'annual' }, + student: { planType: 'student', period: 'monthly' }, - student_free_trial_7_days: { planType: 'student', period: 'monthly' }, 'student-annual': { planType: 'student', period: 'annual' }, - group_professional: { planType: 'group', period: 'annual' }, - group_professional_educational: { planType: 'group', period: 'annual' }, + student_free_trial_7_days: { planType: 'student', period: 'monthly' }, + group_collaborator: { planType: 'group', period: 'annual' }, group_collaborator_educational: { planType: 'group', period: 'annual' }, + group_professional: { planType: 'group', period: 'annual' }, + group_professional_educational: { planType: 'group', period: 'annual' }, + + assistant: { planType: null, period: 'monthly' }, + 'assistant-annual': { planType: null, period: 'annual' }, } /** - * * @param {RecurlyPlanCode} recurlyPlanCode - * @returns {{ planType: 'individual' | 'group' | 'student', period: 'annual' | 'monthly'}} + * @returns {PlanTypeAndPeriod} */ function getPlanTypeAndPeriodFromRecurlyPlanCode(recurlyPlanCode) { return recurlyPlanCodeToPlanTypeAndPeriod[recurlyPlanCode] } +/** + * @param {string|null} [planCode] + * @returns {Plan|null} + */ function findLocalPlanInSettings(planCode) { for (const plan of Settings.plans) { if (plan.planCode === planCode) { @@ -91,6 +140,6 @@ function findLocalPlanInSettings(planCode) { module.exports = { ensurePlansAreSetupCorrectly, findLocalPlanInSettings, - mapRecurlyPlanCodeToStripeLookupKey, + buildStripeLookupKey, getPlanTypeAndPeriodFromRecurlyPlanCode, } diff --git a/services/web/app/src/Features/Subscription/RecurlyClient.js b/services/web/app/src/Features/Subscription/RecurlyClient.js index 753d49ba0f..25332a9c34 100644 --- a/services/web/app/src/Features/Subscription/RecurlyClient.js +++ b/services/web/app/src/Features/Subscription/RecurlyClient.js @@ -22,6 +22,7 @@ const { MissingBillingInfoError, SubtotalLimitExceededError, } = require('./Errors') +const RecurlyMetrics = require('./RecurlyMetrics') /** * @import { PaymentProviderSubscriptionChangeRequest } from './PaymentProviderEntities' @@ -29,10 +30,28 @@ const { * @import { PaymentMethod } from './types' */ +class RecurlyClientWithErrorHandling extends recurly.Client { + /** + * @param {import('recurly/lib/recurly/Http').Response} response + * @return {Error | null} + * @private + */ + _errorFromResponse(response) { + RecurlyMetrics.recordMetrics( + response.status, + response.rateLimit, + response.rateLimitRemaining, + response.rateLimitReset.getTime() + ) + // @ts-ignore + return super._errorFromResponse(response) + } +} + const recurlySettings = Settings.apis.recurly const recurlyApiKey = recurlySettings ? recurlySettings.apiKey : undefined -const client = new recurly.Client(recurlyApiKey) +const client = new RecurlyClientWithErrorHandling(recurlyApiKey) /** * Get account for a given user @@ -717,6 +736,21 @@ async function failInvoice(invoiceId) { await client.markInvoiceFailed(invoiceId) } +async function terminateSubscriptionByUuid(subscriptionUuid) { + const subscription = await client.terminateSubscription( + 'uuid-' + subscriptionUuid, + { + body: { + refund: 'none', + }, + } + ) + + logger.debug({ subscriptionUuid }, 'subscription terminated') + + return subscription +} + module.exports = { errors: recurly.errors, @@ -740,6 +774,7 @@ module.exports = { resumeSubscriptionByUuid: callbackify(resumeSubscriptionByUuid), getPastDueInvoices: callbackify(getPastDueInvoices), failInvoice: callbackify(failInvoice), + terminateSubscriptionByUuid: callbackify(terminateSubscriptionByUuid), promises: { getSubscription, @@ -762,5 +797,6 @@ module.exports = { getPlan, getPastDueInvoices, failInvoice, + terminateSubscriptionByUuid, }, } diff --git a/services/web/app/src/Features/Subscription/RecurlyMetrics.js b/services/web/app/src/Features/Subscription/RecurlyMetrics.js new file mode 100644 index 0000000000..1b709d7dc4 --- /dev/null +++ b/services/web/app/src/Features/Subscription/RecurlyMetrics.js @@ -0,0 +1,38 @@ +const Metrics = require('@overleaf/metrics') + +/** + * @param {number} status + * @param {number} rateLimit + * @param {number} rateLimitRemaining + * @param {number} rateLimitReset + */ +function recordMetrics(status, rateLimit, rateLimitRemaining, rateLimitReset) { + Metrics.inc('recurly_request', 1, { status }) + const metrics = { rateLimit, rateLimitRemaining, rateLimitReset } + for (const [method, v] of Object.entries(metrics)) { + if (Number.isNaN(v)) continue + Metrics.gauge('recurly_request_rate_limiting', v, 1, { method }) + } +} + +/** + * @param {Response} response + */ +function recordMetricsFromResponse(response) { + const rateLimit = parseInt( + response.headers.get('X-RateLimit-Limit') || '', + 10 + ) + const rateLimitRemaining = parseInt( + response.headers.get('X-RateLimit-Remaining') || '', + 10 + ) + const rateLimitReset = + parseInt(response.headers.get('X-RateLimit-Reset') || '', 10) * 1000 + recordMetrics(response.status, rateLimit, rateLimitRemaining, rateLimitReset) +} + +module.exports = { + recordMetrics, + recordMetricsFromResponse, +} diff --git a/services/web/app/src/Features/Subscription/RecurlyWrapper.js b/services/web/app/src/Features/Subscription/RecurlyWrapper.js index 2227597737..243da6edce 100644 --- a/services/web/app/src/Features/Subscription/RecurlyWrapper.js +++ b/services/web/app/src/Features/Subscription/RecurlyWrapper.js @@ -9,24 +9,30 @@ const logger = require('@overleaf/logger') const Errors = require('../Errors/Errors') const SubscriptionErrors = require('./Errors') const { callbackify } = require('@overleaf/promise-utils') +const RecurlyMetrics = require('./RecurlyMetrics') /** - * @param accountId - * @param newEmail + * Updates the email address of a Recurly account + * + * @param userId + * @param newAccountEmail - the new email address to set for the Recurly account */ -async function updateAccountEmailAddress(accountId, newEmail) { +async function updateAccountEmailAddress(userId, newAccountEmail) { const data = { - email: newEmail, + email: newAccountEmail, } let requestBody try { requestBody = RecurlyWrapper._buildXml('account', data) } catch (error) { - throw OError.tag(error, 'error building xml', { accountId, newEmail }) + throw OError.tag(error, 'error building xml', { + accountId: userId, + newEmail: newAccountEmail, + }) } const { body } = await RecurlyWrapper.promises.apiRequest({ - url: `accounts/${accountId}`, + url: `accounts/${userId}`, method: 'PUT', body: requestBody, }) @@ -412,9 +418,15 @@ const promises = { } try { - return await fetchStringWithResponse(fetchUrl, fetchOptions) + const { body, response } = await fetchStringWithResponse( + fetchUrl, + fetchOptions + ) + RecurlyMetrics.recordMetricsFromResponse(response) + return { body, response } } catch (error) { if (error instanceof RequestFailedError) { + RecurlyMetrics.recordMetricsFromResponse(error.response) if (error.response.status === 404 && expect404) { return { response: error.response, body: null } } else if (error.response.status === 422 && expect422) { @@ -681,12 +693,15 @@ const promises = { } }, - async extendTrial(subscriptionId, daysUntilExpire) { + async extendTrial(subscriptionId, trialEndsAt, daysUntilExpire) { if (daysUntilExpire == null) { daysUntilExpire = 7 } + if (trialEndsAt == null) { + trialEndsAt = new Date() + } const nextRenewalDate = new Date() - nextRenewalDate.setDate(nextRenewalDate.getDate() + daysUntilExpire) + nextRenewalDate.setDate(trialEndsAt.getDate() + daysUntilExpire) logger.debug( { subscriptionId, daysUntilExpire }, 'Exending Free trial for user' diff --git a/services/web/app/src/Features/Subscription/SubscriptionController.js b/services/web/app/src/Features/Subscription/SubscriptionController.js index 7aa345e7a8..5856682166 100644 --- a/services/web/app/src/Features/Subscription/SubscriptionController.js +++ b/services/web/app/src/Features/Subscription/SubscriptionController.js @@ -2,6 +2,7 @@ const SessionManager = require('../Authentication/SessionManager') const SubscriptionHandler = require('./SubscriptionHandler') +const SubscriptionHelper = require('./SubscriptionHelper') const SubscriptionViewModelBuilder = require('./SubscriptionViewModelBuilder') const LimitationsManager = require('./LimitationsManager') const RecurlyWrapper = require('./RecurlyWrapper') @@ -15,7 +16,11 @@ const AnalyticsManager = require('../Analytics/AnalyticsManager') const RecurlyEventHandler = require('./RecurlyEventHandler') const { expressify } = require('@overleaf/promise-utils') const OError = require('@overleaf/o-error') -const { DuplicateAddOnError, AddOnNotPresentError } = require('./Errors') +const { + DuplicateAddOnError, + AddOnNotPresentError, + PaymentActionRequiredError, +} = require('./Errors') const SplitTestHandler = require('../SplitTests/SplitTestHandler') const AuthorizationManager = require('../Authorization/AuthorizationManager') const Modules = require('../../infrastructure/Modules') @@ -27,6 +32,11 @@ const PlansLocator = require('./PlansLocator') const PaymentProviderEntities = require('./PaymentProviderEntities') const { User } = require('../../models/User') const UserGetter = require('../User/UserGetter') +const PermissionsManager = require('../Authorization/PermissionsManager') +const { + sanitizeSessionUserForFrontEnd, +} = require('../../infrastructure/FrontEndUser') +const { IndeterminateInvoiceError } = require('../Errors/Errors') /** * @import { SubscriptionChangeDescription } from '../../../../types/subscription/subscription-change-preview' @@ -78,9 +88,13 @@ async function userSubscriptionPage(req, res) { await Modules.promises.hooks.fire('userCanExtendTrial', user) )?.[0] const fromPlansPage = req.query.hasSubscription + const isInTrial = SubscriptionHelper.isInTrial( + personalSubscription?.payment?.trialEndsAt + ) const plansData = SubscriptionViewModelBuilder.buildPlansListForSubscriptionDash( - personalSubscription?.plan + personalSubscription?.plan, + isInTrial ) AnalyticsManager.recordEventForSession(req.session, 'subscription-page-view') @@ -258,7 +272,8 @@ async function pauseSubscription(req, res, next) { { pause_length: pauseCycles, plan_code: subscription?.planCode, - subscriptionId: subscription?.recurlySubscription_id, + subscriptionId: + SubscriptionHelper.getPaymentProviderSubscriptionId(subscription), } ) @@ -311,7 +326,9 @@ function cancelSubscription(req, res, next) { async function canceledSubscription(req, res, next) { return res.render('subscriptions/canceled-subscription-react', { title: 'subscription_canceled', - user: SessionManager.getSessionUser(req.session), + user: sanitizeSessionUserForFrontEnd( + SessionManager.getSessionUser(req.session) + ), }) } @@ -330,7 +347,8 @@ function cancelV1Subscription(req, res, next) { } async function previewAddonPurchase(req, res) { - const userId = SessionManager.getLoggedInUserId(req.session) + const user = SessionManager.getSessionUser(req.session) + const userId = user._id const addOnCode = req.params.addOnCode const purchaseReferrer = req.query.purchaseReferrer @@ -338,6 +356,16 @@ async function previewAddonPurchase(req, res) { return HttpErrorHandler.notFound(req, res, `Unknown add-on: ${addOnCode}`) } + const canUseAi = await PermissionsManager.promises.checkUserPermissions( + user, + ['use-ai'] + ) + if (!canUseAi) { + return res.redirect( + '/user/subscription?redirect-reason=ai-assist-unavailable' + ) + } + /** @type {PaymentMethod[]} */ const paymentMethod = await Modules.promises.hooks.fire( 'getPaymentMethod', @@ -410,8 +438,6 @@ async function purchaseAddon(req, res, next) { logger.debug({ userId: user._id, addOnCode }, 'purchasing add-ons') try { - // set a restore point in the case of a failed payment for the upgrade (Recurly only) - await SubscriptionHandler.promises.setSubscriptionRestorePoint(user._id) await SubscriptionHandler.promises.purchaseAddon( user._id, addOnCode, @@ -425,6 +451,11 @@ async function purchaseAddon(req, res, next) { 'Your subscription already includes this add-on', { addon: addOnCode } ) + } else if (err instanceof PaymentActionRequiredError) { + return res.status(402).json({ + message: 'Payment action required', + clientSecret: err.info.clientSecret, + }) } else { if (err instanceof Error) { OError.tag(err, 'something went wrong purchasing add-ons', { @@ -526,18 +557,18 @@ function cancelPendingSubscriptionChange(req, res, next) { }) } -function updateAccountEmailAddress(req, res, next) { +async function updateAccountEmailAddress(req, res, next) { const user = SessionManager.getSessionUser(req.session) - RecurlyWrapper.updateAccountEmailAddress( - user._id, - user.email, - function (error) { - if (error) { - return next(error) - } - res.sendStatus(200) - } - ) + try { + await Modules.promises.hooks.fire( + 'updateAccountEmailAddress', + user._id, + user.email + ) + return res.sendStatus(200) + } catch (error) { + return next(error) + } } function reactivateSubscription(req, res, next) { @@ -596,6 +627,13 @@ function recurlyCallback(req, res, next) { eventData.transaction.subscription_id, lastSubscription, function (err) { + if (err instanceof IndeterminateInvoiceError) { + logger.warn( + { recurlySubscriptionId: err.info.recurlySubscriptionId }, + 'could not determine invoice to fail for subscription' + ) + return res.sendStatus(200) + } if (err) { return next(err) } @@ -695,7 +733,7 @@ async function getRecommendedCurrency(req, res) { ip = req.query.ip } const currencyLookup = await GeoIpLookup.promises.getCurrencyCode(ip) - let countryCode = currencyLookup.countryCode + const countryCode = currencyLookup.countryCode const recommendedCurrency = currencyLookup.currencyCode let currency = null @@ -706,13 +744,6 @@ async function getRecommendedCurrency(req, res) { currency = recommendedCurrency } - const queryCountryCode = req.query.countryCode?.toUpperCase() - - // only enable countryCode testing flag on staging or dev environments - if (queryCountryCode && process.env.NODE_ENV !== 'production') { - countryCode = queryCountryCode - } - return { currency, recommendedCurrency, @@ -812,7 +843,7 @@ function makeChangePreview( paymentMethod: paymentMethod?.toString(), netTerms: subscription.netTerms, nextPlan: { - annual: nextPlan.annual ?? false, + annual: nextPlan?.annual ?? false, }, nextInvoice: { date: subscription.periodEnd.toISOString(), @@ -850,7 +881,7 @@ module.exports = { cancelV1Subscription, previewSubscription: expressify(previewSubscription), cancelPendingSubscriptionChange, - updateAccountEmailAddress, + updateAccountEmailAddress: expressify(updateAccountEmailAddress), reactivateSubscription, recurlyCallback, extendTrial: expressify(extendTrial), diff --git a/services/web/app/src/Features/Subscription/SubscriptionGroupHandler.js b/services/web/app/src/Features/Subscription/SubscriptionGroupHandler.js index c717b2eec6..ba862baa67 100644 --- a/services/web/app/src/Features/Subscription/SubscriptionGroupHandler.js +++ b/services/web/app/src/Features/Subscription/SubscriptionGroupHandler.js @@ -4,6 +4,7 @@ const OError = require('@overleaf/o-error') const SubscriptionUpdater = require('./SubscriptionUpdater') const SubscriptionLocator = require('./SubscriptionLocator') const SubscriptionController = require('./SubscriptionController') +const SubscriptionHelper = require('./SubscriptionHelper') const { Subscription } = require('../../models/Subscription') const { User } = require('../../models/User') const RecurlyClient = require('./RecurlyClient') @@ -77,7 +78,7 @@ async function ensureFlexibleLicensingEnabled(plan) { } async function ensureSubscriptionIsActive(subscription) { - if (subscription?.recurlyStatus?.state !== 'active') { + if (SubscriptionHelper.getPaidSubscriptionState(subscription) !== 'active') { throw new InactiveError('The subscription is not active', { subscriptionId: subscription._id.toString(), }) diff --git a/services/web/app/src/Features/Subscription/SubscriptionHandler.js b/services/web/app/src/Features/Subscription/SubscriptionHandler.js index 1296a2a7de..104acd8783 100644 --- a/services/web/app/src/Features/Subscription/SubscriptionHandler.js +++ b/services/web/app/src/Features/Subscription/SubscriptionHandler.js @@ -1,21 +1,21 @@ // @ts-check -const recurly = require('recurly') const RecurlyWrapper = require('./RecurlyWrapper') const RecurlyClient = require('./RecurlyClient') const { User } = require('../../models/User') const logger = require('@overleaf/logger') +const SubscriptionHelper = require('./SubscriptionHelper') const SubscriptionUpdater = require('./SubscriptionUpdater') const SubscriptionLocator = require('./SubscriptionLocator') const LimitationsManager = require('./LimitationsManager') const EmailHandler = require('../Email/EmailHandler') const { callbackify } = require('@overleaf/promise-utils') const UserUpdater = require('../User/UserUpdater') -const { NotFoundError, IndeterminateInvoiceError } = require('../Errors/Errors') +const { IndeterminateInvoiceError } = require('../Errors/Errors') const Modules = require('../../infrastructure/Modules') /** - * @import { PaymentProviderSubscription, PaymentProviderSubscriptionChange } from './PaymentProviderEntities' + * @import { PaymentProviderSubscriptionChange } from './PaymentProviderEntities' */ async function validateNoSubscriptionInRecurly(userId) { @@ -102,8 +102,7 @@ async function updateSubscription(user, planCode) { if ( !hasSubscription || subscription == null || - (subscription.recurlySubscription_id == null && - subscription.paymentProvider?.subscriptionId == null) + SubscriptionHelper.getPaymentProviderSubscriptionId(subscription) == null ) { return } @@ -247,11 +246,8 @@ async function attemptPaypalInvoiceCollection(recurlyAccountCode) { ) } -async function extendTrial(subscription, daysToExend) { - await RecurlyWrapper.promises.extendTrial( - subscription.recurlySubscription_id, - daysToExend - ) +async function extendTrial(subscription, daysToExtend) { + await Modules.promises.hooks.fire('extendTrial', subscription, daysToExtend) } /** @@ -278,24 +274,12 @@ async function previewAddonPurchase(userId, addOnCode) { * @param {number} quantity */ async function purchaseAddon(userId, addOnCode, quantity) { - const subscription = await getSubscriptionForUser(userId) - try { - await RecurlyClient.promises.getAddOn(subscription.planCode, addOnCode) - } catch (err) { - if (err instanceof recurly.errors.NotFoundError) { - throw new NotFoundError({ - message: 'Add-on not found', - info: { addOnCode }, - }) - } - throw err - } - const changeRequest = subscription.getRequestForAddOnPurchase( + await Modules.promises.hooks.fire( + 'purchaseAddOn', + userId, addOnCode, quantity ) - await RecurlyClient.promises.applySubscriptionChangeRequest(changeRequest) - await syncSubscription({ uuid: subscription.id }, userId) } /** @@ -305,51 +289,17 @@ async function purchaseAddon(userId, addOnCode, quantity) { * @param {string} addOnCode */ async function removeAddon(userId, addOnCode) { - const subscription = await getSubscriptionForUser(userId) - const changeRequest = subscription.getRequestForAddOnRemoval(addOnCode) - await RecurlyClient.promises.applySubscriptionChangeRequest(changeRequest) - await syncSubscription({ uuid: subscription.id }, userId) -} - -/** - * Returns the Recurly UUID for the given user - * - * Throws a NotFoundError if the subscription can't be found - * - * @param {string} userId - * @return {Promise} - */ -async function getSubscriptionForUser(userId) { - const subscription = - await SubscriptionLocator.promises.getUsersSubscription(userId) - const recurlyId = subscription?.recurlySubscription_id - if (recurlyId == null) { - throw new NotFoundError({ - message: 'Recurly subscription not found', - info: { userId }, - }) - } - - try { - const subscription = await RecurlyClient.promises.getSubscription(recurlyId) - return subscription - } catch (err) { - if (err instanceof recurly.errors.NotFoundError) { - throw new NotFoundError({ - message: 'Subscription not found', - info: { userId, recurlyId }, - }) - } else { - throw err - } - } + await Modules.promises.hooks.fire('removeAddOn', userId, addOnCode) } async function pauseSubscription(user, pauseCycles) { // only allow pausing on monthly plans not in a trial const { subscription } = await LimitationsManager.promises.userHasSubscription(user) - if (!subscription || !subscription.recurlyStatus) { + if ( + !subscription || + !SubscriptionHelper.getPaidSubscriptionState(subscription) + ) { throw new Error('No active subscription to pause') } @@ -360,10 +310,9 @@ async function pauseSubscription(user, pauseCycles) { ) { throw new Error('Can only pause monthly individual plans') } - if ( - subscription.recurlyStatus.trialEndsAt && - subscription.recurlyStatus.trialEndsAt > new Date() - ) { + const trialEndsAt = + SubscriptionHelper.getSubscriptionTrialEndsAt(subscription) + if (trialEndsAt && trialEndsAt > new Date()) { throw new Error('Cannot pause a subscription in a trial') } if (subscription.addOns?.length) { @@ -379,7 +328,10 @@ async function pauseSubscription(user, pauseCycles) { async function resumeSubscription(user) { const { subscription } = await LimitationsManager.promises.userHasSubscription(user) - if (!subscription || !subscription.recurlyStatus) { + if ( + !subscription || + !SubscriptionHelper.getPaidSubscriptionState(subscription) + ) { throw new Error('No active subscription to resume') } await RecurlyClient.promises.resumeSubscriptionByUuid( @@ -432,7 +384,7 @@ async function revertPlanChange( throw new IndeterminateInvoiceError( 'cant determine invoice to fail for plan revert', { - info: { recurlySubscriptionId }, + recurlySubscriptionId, } ) } diff --git a/services/web/app/src/Features/Subscription/SubscriptionHelper.js b/services/web/app/src/Features/Subscription/SubscriptionHelper.js index efb8895280..429432349d 100644 --- a/services/web/app/src/Features/Subscription/SubscriptionHelper.js +++ b/services/web/app/src/Features/Subscription/SubscriptionHelper.js @@ -1,11 +1,25 @@ const { formatCurrency } = require('../../util/currency') const GroupPlansData = require('./GroupPlansData') +const { isStandaloneAiAddOnPlanCode } = require('./PaymentProviderEntities') /** * If the user changes to a less expensive plan, we shouldn't apply the change immediately. * This is to avoid unintended/artifical credits on users Recurly accounts. */ -function shouldPlanChangeAtTermEnd(oldPlan, newPlan) { +function shouldPlanChangeAtTermEnd(oldPlan, newPlan, isInTrial) { + if (isInTrial) { + // we should always upgrade or downgrade immediately if actively in trial + return false + } + + if ( + oldPlan.annual === newPlan.annual && + isStandaloneAiAddOnPlanCode(oldPlan.planCode) && + !isStandaloneAiAddOnPlanCode(newPlan.planCode) + ) { + // changing from an standalone AI add-on plan to a non-AI plan should not be considered a downgrade + return false + } return oldPlan.price_in_cents > newPlan.price_in_cents } @@ -86,7 +100,75 @@ function generateInitialLocalizedGroupPrice(recommendedCurrency, locale) { } } +function isPaidSubscription(subscription) { + const hasRecurlySubscription = + subscription?.recurlySubscription_id && + subscription?.recurlySubscription_id !== '' + const hasStripeSubscription = + subscription?.paymentProvider?.subscriptionId && + subscription?.paymentProvider?.subscriptionId !== '' + return !!(subscription && (hasRecurlySubscription || hasStripeSubscription)) +} + +function isIndividualActivePaidSubscription(subscription) { + return ( + isPaidSubscription(subscription) && + subscription?.groupPlan === false && + subscription?.recurlyStatus?.state !== 'canceled' && + subscription?.paymentProvider?.state !== 'canceled' + ) +} + +function getPaymentProviderSubscriptionId(subscription) { + if (subscription?.recurlySubscription_id) { + return subscription.recurlySubscription_id + } + if (subscription?.paymentProvider?.subscriptionId) { + return subscription.paymentProvider.subscriptionId + } + return null +} + +function getPaidSubscriptionState(subscription) { + if (subscription?.recurlyStatus?.state) { + return subscription.recurlyStatus.state + } + if (subscription?.paymentProvider?.state) { + return subscription.paymentProvider.state + } + return null +} + +function getSubscriptionTrialStartedAt(subscription) { + if (subscription?.recurlyStatus?.trialStartedAt) { + return subscription.recurlyStatus?.trialStartedAt + } + return subscription?.paymentProvider?.trialStartedAt +} + +function getSubscriptionTrialEndsAt(subscription) { + if (subscription?.recurlyStatus?.trialEndsAt) { + return subscription.recurlyStatus?.trialEndsAt + } + return subscription?.paymentProvider?.trialEndsAt +} + +function isInTrial(trialEndsAt) { + if (!trialEndsAt) { + return false + } + + return trialEndsAt.getTime() > Date.now() +} + module.exports = { shouldPlanChangeAtTermEnd, generateInitialLocalizedGroupPrice, + isPaidSubscription, + isIndividualActivePaidSubscription, + getPaymentProviderSubscriptionId, + getPaidSubscriptionState, + getSubscriptionTrialStartedAt, + getSubscriptionTrialEndsAt, + isInTrial, } diff --git a/services/web/app/src/Features/Subscription/SubscriptionLocator.js b/services/web/app/src/Features/Subscription/SubscriptionLocator.js index 978f4d41b7..c0c107eecf 100644 --- a/services/web/app/src/Features/Subscription/SubscriptionLocator.js +++ b/services/web/app/src/Features/Subscription/SubscriptionLocator.js @@ -162,6 +162,45 @@ const SubscriptionLocator = { } : null }, + + async getUserSubscriptionStatus(userId) { + let usersSubscription = { personal: false, group: false } + + if (!userId) { + return usersSubscription + } + + const memberSubscriptions = + await SubscriptionLocator.getMemberSubscriptions(userId) + + const hasActiveGroupSubscription = memberSubscriptions.some( + subscription => + subscription.recurlyStatus?.state === 'active' && subscription.groupPlan + ) + if (hasActiveGroupSubscription) { + // Member of a group plan + usersSubscription = { ...usersSubscription, group: true } + } + + const personalSubscription = + await SubscriptionLocator.getUsersSubscription(userId) + + if (personalSubscription) { + const hasActivePersonalSubscription = + personalSubscription.recurlyStatus?.state === 'active' + if (hasActivePersonalSubscription) { + if (personalSubscription.groupPlan) { + // Owner of a group plan + usersSubscription = { ...usersSubscription, group: true } + } else { + // Owner of an individual plan + usersSubscription = { ...usersSubscription, personal: true } + } + } + } + + return usersSubscription + }, } module.exports = { diff --git a/services/web/app/src/Features/Subscription/SubscriptionUpdater.js b/services/web/app/src/Features/Subscription/SubscriptionUpdater.js index b0e24ce5ad..9de194f262 100644 --- a/services/web/app/src/Features/Subscription/SubscriptionUpdater.js +++ b/services/web/app/src/Features/Subscription/SubscriptionUpdater.js @@ -10,6 +10,7 @@ const { DeletedSubscription } = require('../../models/DeletedSubscription') const logger = require('@overleaf/logger') const Features = require('../../infrastructure/Features') const UserAuditLogHandler = require('../User/UserAuditLogHandler') +const UserUpdater = require('../User/UserUpdater') const AccountMappingHelper = require('../Analytics/AccountMappingHelper') const { SSOConfig } = require('../../models/SSOConfig') const mongoose = require('../../infrastructure/Mongoose') @@ -145,6 +146,20 @@ async function removeUserFromGroup(subscriptionId, userId, auditLog) { { _id: subscriptionId }, { $pull: { member_ids: userId } } ).exec() + + const subscription = await Subscription.findById(subscriptionId) + if (subscription.managedUsersEnabled) { + await UserUpdater.promises.updateUser( + { _id: userId }, + { + $unset: { + 'enrollment.managedBy': 1, + 'enrollment.enrolledAt': 1, + }, + } + ) + } + await FeaturesUpdater.promises.refreshFeatures( userId, 'remove-user-from-group' @@ -318,38 +333,7 @@ async function updateSubscriptionFromRecurly( requesterData ) { if (recurlySubscription.state === 'expired') { - const hasManagedUsersFeature = - Features.hasFeature('saas') && subscription?.managedUsersEnabled - - // If a payment lapses and if the group is managed or has group SSO, as a temporary measure we need to - // make sure that the group continues as-is and no destructive actions are taken. - if (hasManagedUsersFeature) { - logger.warn( - { subscriptionId: subscription._id }, - 'expired subscription has managedUsers feature enabled, skipping deletion' - ) - } else { - let hasGroupSSOEnabled = false - if (subscription?.ssoConfig) { - const ssoConfig = await SSOConfig.findOne({ - _id: subscription.ssoConfig._id || subscription.ssoConfig, - }) - .lean() - .exec() - if (ssoConfig.enabled) { - hasGroupSSOEnabled = true - } - } - - if (hasGroupSSOEnabled) { - logger.warn( - { subscriptionId: subscription._id }, - 'expired subscription has groupSSO feature enabled, skipping deletion' - ) - } else { - await deleteSubscription(subscription, requesterData) - } - } + await handleExpiredSubscription(subscription, requesterData) return } const updatedPlanCode = recurlySubscription.plan.plan_code @@ -450,6 +434,41 @@ async function _sendUserGroupPlanCodeUserProperty(userId) { } } +async function handleExpiredSubscription(subscription, requesterData) { + const hasManagedUsersFeature = + Features.hasFeature('saas') && subscription?.managedUsersEnabled + + // If a payment lapses and if the group is managed or has group SSO, as a temporary measure we need to + // make sure that the group continues as-is and no destructive actions are taken. + if (hasManagedUsersFeature) { + logger.warn( + { subscriptionId: subscription._id }, + 'expired subscription has managedUsers feature enabled, skipping deletion' + ) + } else { + let hasGroupSSOEnabled = false + if (subscription?.ssoConfig) { + const ssoConfig = await SSOConfig.findOne({ + _id: subscription.ssoConfig._id || subscription.ssoConfig, + }) + .lean() + .exec() + if (ssoConfig.enabled) { + hasGroupSSOEnabled = true + } + } + + if (hasGroupSSOEnabled) { + logger.warn( + { subscriptionId: subscription._id }, + 'expired subscription has groupSSO feature enabled, skipping deletion' + ) + } else { + await deleteSubscription(subscription, requesterData) + } + } +} + async function _sendSubscriptionEvent(userId, subscriptionId, event) { const subscription = await Subscription.findOne( { _id: subscriptionId }, @@ -503,7 +522,7 @@ async function setRestorePoint(subscriptionId, planCode, addOns, consumed) { } if (consumed) { - update.$inc = { revertedDueToFailedPayment: 1 } + update.$inc = { timesRevertedDueToFailedPayment: 1 } } await Subscription.updateOne({ _id: subscriptionId }, update).exec() @@ -568,5 +587,6 @@ module.exports = { setRestorePoint, setSubscriptionWasReverted, voidRestorePoint, + handleExpiredSubscription, }, } diff --git a/services/web/app/src/Features/Subscription/SubscriptionViewModelBuilder.js b/services/web/app/src/Features/Subscription/SubscriptionViewModelBuilder.js index 441d9c2c9b..3681975a38 100644 --- a/services/web/app/src/Features/Subscription/SubscriptionViewModelBuilder.js +++ b/services/web/app/src/Features/Subscription/SubscriptionViewModelBuilder.js @@ -1,6 +1,5 @@ // ts-check const Settings = require('@overleaf/settings') -const RecurlyWrapper = require('./RecurlyWrapper') const PlansLocator = require('./PlansLocator') const { isStandaloneAiAddOnPlanCode, @@ -8,7 +7,6 @@ const { } = require('./PaymentProviderEntities') const SubscriptionFormatters = require('./SubscriptionFormatters') const SubscriptionLocator = require('./SubscriptionLocator') -const SubscriptionUpdater = require('./SubscriptionUpdater') const InstitutionsGetter = require('../Institutions/InstitutionsGetter') const InstitutionsManager = require('../Institutions/InstitutionsManager') const PublishersGetter = require('../Publishers/PublishersGetter') @@ -227,6 +225,7 @@ async function buildUsersSubscriptionViewModel(user, locale = 'en') { // don't return subscription payment information delete personalSubscription.paymentProvider delete personalSubscription.recurly + delete personalSubscription.recurlySubscription_id const tax = paymentRecord.subscription.taxAmount || 0 // Some plans allow adding more seats than the base plan provides. @@ -374,15 +373,6 @@ async function buildUsersSubscriptionViewModel(user, locale = 'en') { } } -/** - * @param {{_id: string}} user - * @returns {Promise} - */ -async function getBestSubscription(user) { - const { bestSubscription } = await getUsersSubscriptionDetails(user) - return bestSubscription -} - /** * @param {{_id: string}} user * @returns {Promise<{bestSubscription:Subscription,individualSubscription:DBSubscription|null,memberGroupSubscriptions:DBSubscription[]}>} @@ -400,15 +390,18 @@ async function getUsersSubscriptionDetails(user) { if ( individualSubscription && !individualSubscription.customAccount && - individualSubscription.recurlySubscription_id && - !individualSubscription.recurlyStatus?.state + SubscriptionHelper.getPaymentProviderSubscriptionId( + individualSubscription + ) && + !SubscriptionHelper.getPaidSubscriptionState(individualSubscription) ) { - const recurlySubscription = await RecurlyWrapper.promises.getSubscription( - individualSubscription.recurlySubscription_id, - { includeAccount: true } + const paymentResults = await Modules.promises.hooks.fire( + 'getPaymentFromRecordPromise', + individualSubscription ) - await SubscriptionUpdater.promises.updateSubscriptionFromRecurly( - recurlySubscription, + await Modules.promises.hooks.fire( + 'syncSubscription', + paymentResults[0]?.subscription, individualSubscription ) individualSubscription = @@ -477,7 +470,7 @@ async function getUsersSubscriptionDetails(user) { return { bestSubscription, individualSubscription, memberGroupSubscriptions } } -function buildPlansList(currentPlan) { +function buildPlansList(currentPlan, isInTrial) { const { plans } = Settings const allPlans = {} @@ -491,7 +484,11 @@ function buildPlansList(currentPlan) { result.planCodesChangingAtTermEnd = _.map( _.filter(plans, plan => { if (!plan.hideFromUsers) { - return SubscriptionHelper.shouldPlanChangeAtTermEnd(currentPlan, plan) + return SubscriptionHelper.shouldPlanChangeAtTermEnd( + currentPlan, + plan, + isInTrial + ) } }), 'planCode' @@ -540,7 +537,8 @@ function _isPlanEqualOrBetter(planA, planB) { function _getRemainingTrialDays(subscription) { const now = new Date() - const trialEndDate = subscription.recurlyStatus?.trialEndsAt + const trialEndDate = + SubscriptionHelper.getSubscriptionTrialEndsAt(subscription) return trialEndDate && trialEndDate > now ? Math.ceil( (trialEndDate.getTime() - now.getTime()) / (24 * 60 * 60 * 1000) @@ -575,8 +573,8 @@ function buildGroupSubscriptionForView(groupSubscription) { } } -function buildPlansListForSubscriptionDash(currentPlan) { - const allPlansData = buildPlansList(currentPlan) +function buildPlansListForSubscriptionDash(currentPlan, isInTrial) { + const allPlansData = buildPlansList(currentPlan, isInTrial) const plans = [] // only list individual and visible plans for "change plans" UI if (allPlansData.studentAccounts) { @@ -605,10 +603,8 @@ module.exports = { buildUsersSubscriptionViewModel: callbackify(buildUsersSubscriptionViewModel), buildPlansList, buildPlansListForSubscriptionDash, - getBestSubscription: callbackify(getBestSubscription), promises: { buildUsersSubscriptionViewModel, - getBestSubscription, getUsersSubscriptionDetails, }, } diff --git a/services/web/app/src/Features/Subscription/TeamInvitesController.mjs b/services/web/app/src/Features/Subscription/TeamInvitesController.mjs index b2c9840de4..1eb9ac2907 100644 --- a/services/web/app/src/Features/Subscription/TeamInvitesController.mjs +++ b/services/web/app/src/Features/Subscription/TeamInvitesController.mjs @@ -4,6 +4,7 @@ import OError from '@overleaf/o-error' import TeamInvitesHandler from './TeamInvitesHandler.js' import SessionManager from '../Authentication/SessionManager.js' import SubscriptionLocator from './SubscriptionLocator.js' +import SubscriptionHelper from './SubscriptionHelper.js' import ErrorController from '../Errors/ErrorController.js' import EmailHelper from '../Helpers/EmailHelper.js' import UserGetter from '../User/UserGetter.js' @@ -14,6 +15,7 @@ import EmailHandler from '../Email/EmailHandler.js' import { RateLimiter } from '../../infrastructure/RateLimiter.js' import Modules from '../../infrastructure/Modules.js' import UserAuditLogHandler from '../User/UserAuditLogHandler.js' +import { sanitizeSessionUserForFrontEnd } from '../../infrastructure/FrontEndUser.js' const rateLimiters = { resendGroupInvite: new RateLimiter('resend-group-invite', { @@ -87,12 +89,10 @@ async function viewInvite(req, res, next) { const personalSubscription = await SubscriptionLocator.promises.getUsersSubscription(userId) - const hasIndividualRecurlySubscription = - personalSubscription && - personalSubscription.groupPlan === false && - personalSubscription.recurlyStatus?.state !== 'canceled' && - personalSubscription.recurlySubscription_id && - personalSubscription.recurlySubscription_id !== '' + const hasIndividualPaidSubscription = + SubscriptionHelper.isIndividualActivePaidSubscription( + personalSubscription + ) if (subscription?.managedUsersEnabled) { if (!subscription.populated('groupPolicy')) { @@ -133,6 +133,9 @@ async function viewInvite(req, res, next) { logger.error({ err }, 'error getting subscription admin email') } + const usersSubscription = + await SubscriptionLocator.promises.getUserSubscriptionStatus(userId) + return res.render('subscriptions/team/invite-managed', { inviterName: invite.inviterName, inviteToken: invite.token, @@ -141,7 +144,8 @@ async function viewInvite(req, res, next) { currentManagedUserAdminEmail, groupSSOActive, subscriptionId: subscription._id.toString(), - user: sessionUser, + user: sanitizeSessionUserForFrontEnd(sessionUser), + usersSubscription, }) } else { let currentManagedUserAdminEmail @@ -155,13 +159,13 @@ async function viewInvite(req, res, next) { return res.render('subscriptions/team/invite', { inviterName: invite.inviterName, inviteToken: invite.token, - hasIndividualRecurlySubscription, + hasIndividualPaidSubscription, expired: req.query.expired, userRestrictions: Array.from(req.userRestrictions || []), currentManagedUserAdminEmail, groupSSOActive, subscriptionId: subscription._id.toString(), - user: sessionUser, + user: sanitizeSessionUserForFrontEnd(sessionUser), }) } } else { @@ -203,7 +207,7 @@ async function acceptInvite(req, res, next) { const subscription = await TeamInvitesHandler.promises.acceptInvite( token, userId, - { initiatorId: userId, ipAddress: req.ip } + req.ip ) const groupSSOActive = ( await Modules.promises.hooks.fire('hasGroupSSOEnabled', subscription) diff --git a/services/web/app/src/Features/Subscription/TeamInvitesHandler.js b/services/web/app/src/Features/Subscription/TeamInvitesHandler.js index a89f0612f2..f7a4908355 100644 --- a/services/web/app/src/Features/Subscription/TeamInvitesHandler.js +++ b/services/web/app/src/Features/Subscription/TeamInvitesHandler.js @@ -22,6 +22,7 @@ const { callbackifyMultiResult, } = require('@overleaf/promise-utils') const NotificationsBuilder = require('../Notifications/NotificationsBuilder') +const RecurlyClient = require('./RecurlyClient') async function getInvite(token) { const subscription = await Subscription.findOne({ @@ -64,11 +65,50 @@ async function importInvite(subscription, inviterName, email, token, sentAt) { return subscription.save() } -async function acceptInvite(token, userId, auditLog) { +async function _deleteUserSubscription(userId, ipAddress) { + // Delete released user subscription to make it on a free plan + const subscription = + await SubscriptionLocator.promises.getUsersSubscription(userId) + + if (subscription) { + logger.debug( + { + subscriptionId: subscription._id, + }, + 'deleting user subscription' + ) + + const deleterData = { + id: userId, + ip: ipAddress, + } + await SubscriptionUpdater.promises.deleteSubscription( + subscription, + deleterData + ) + + // Terminate the subscription in Recurly + if (subscription.recurlySubscription_id) { + try { + await RecurlyClient.promises.terminateSubscriptionByUuid( + subscription.recurlySubscription_id + ) + } catch (err) { + logger.error( + { err, subscriptionId: subscription._id }, + 'terminating subscription failed' + ) + } + } + } +} + +async function acceptInvite(token, userId, ipAddress) { const { invite, subscription } = await getInvite(token) if (!invite) { throw new Errors.NotFoundError('invite not found') } + const auditLog = { initiatorId: userId, ipAddress } await SubscriptionUpdater.promises.addUserToGroup( subscription._id, @@ -77,6 +117,7 @@ async function acceptInvite(token, userId, auditLog) { ) if (subscription.managedUsersEnabled) { + await _deleteUserSubscription(userId, ipAddress) await Modules.promises.hooks.fire( 'enrollInManagedSubscription', userId, diff --git a/services/web/app/src/Features/Templates/TemplatesController.js b/services/web/app/src/Features/Templates/TemplatesController.js index a8730a61be..39c4d50ae0 100644 --- a/services/web/app/src/Features/Templates/TemplatesController.js +++ b/services/web/app/src/Features/Templates/TemplatesController.js @@ -4,13 +4,9 @@ const TemplatesManager = require('./TemplatesManager') const ProjectHelper = require('../Project/ProjectHelper') const logger = require('@overleaf/logger') const { expressify } = require('@overleaf/promise-utils') -const SplitTestHandler = require('../SplitTests/SplitTestHandler') const TemplatesController = { async getV1Template(req, res) { - // Read split test assignment so that it's available for Pug to read - await SplitTestHandler.promises.getAssignment(req, res, 'core-pug-bs5') - const templateVersionId = req.params.Template_version_id const templateId = req.query.id if (!/^[0-9]+$/.test(templateVersionId) || !/^[0-9]+$/.test(templateId)) { diff --git a/services/web/app/src/Features/Tutorial/TutorialController.mjs b/services/web/app/src/Features/Tutorial/TutorialController.mjs index e5fc940b34..b4ab3f6727 100644 --- a/services/web/app/src/Features/Tutorial/TutorialController.mjs +++ b/services/web/app/src/Features/Tutorial/TutorialController.mjs @@ -15,6 +15,7 @@ const VALID_KEYS = [ 'editor-popup-ux-survey', 'wf-features-moved', 'review-mode', + 'new-error-logs-promo', ] async function completeTutorial(req, res, next) { diff --git a/services/web/app/src/Features/User/SAMLIdentityManager.js b/services/web/app/src/Features/User/SAMLIdentityManager.js index dc790c59ca..0d3c382775 100644 --- a/services/web/app/src/Features/User/SAMLIdentityManager.js +++ b/services/web/app/src/Features/User/SAMLIdentityManager.js @@ -210,9 +210,13 @@ async function getUser(providerId, externalUserId, userIdAttribute) { ) } const user = await User.findOne({ - 'samlIdentifiers.externalUserId': externalUserId.toString(), - 'samlIdentifiers.providerId': providerId.toString(), - 'samlIdentifiers.userIdAttribute': userIdAttribute.toString(), + samlIdentifiers: { + $elemMatch: { + externalUserId: externalUserId.toString(), + providerId: providerId.toString(), + userIdAttribute: userIdAttribute.toString(), + }, + }, }).exec() return user diff --git a/services/web/app/src/Features/User/UserAuditLogHandler.js b/services/web/app/src/Features/User/UserAuditLogHandler.js index b1d404303e..87cd810161 100644 --- a/services/web/app/src/Features/User/UserAuditLogHandler.js +++ b/services/web/app/src/Features/User/UserAuditLogHandler.js @@ -8,6 +8,7 @@ function _canHaveNoIpAddressId(operation, info) { if (operation === 'must-reset-password-set') return true if (operation === 'remove-email' && info.script) return true if (operation === 'release-managed-user' && info.script) return true + if (operation === 'unlink-dropbox' && info.batch) return true return false } diff --git a/services/web/app/src/Features/User/UserController.js b/services/web/app/src/Features/User/UserController.js index e4186d39a8..cabab8c891 100644 --- a/services/web/app/src/Features/User/UserController.js +++ b/services/web/app/src/Features/User/UserController.js @@ -387,6 +387,9 @@ async function updateUserSettings(req, res, next) { if (req.body.mathPreview != null) { user.ace.mathPreview = req.body.mathPreview } + if (req.body.breadcrumbs != null) { + user.ace.breadcrumbs = Boolean(req.body.breadcrumbs) + } if (req.body.referencesSearchMode != null) { const mode = req.body.referencesSearchMode === 'simple' ? 'simple' : 'advanced' @@ -515,4 +518,5 @@ module.exports = { expireDeletedUsersAfterDuration: expressify(expireDeletedUsersAfterDuration), ensureAffiliationMiddleware: expressify(ensureAffiliationMiddleware), ensureAffiliation, + doLogout, } diff --git a/services/web/app/src/Features/User/UserDeleter.js b/services/web/app/src/Features/User/UserDeleter.js index 662c51ca65..c8d9891bf9 100644 --- a/services/web/app/src/Features/User/UserDeleter.js +++ b/services/web/app/src/Features/User/UserDeleter.js @@ -87,17 +87,29 @@ async function deleteMongoUser(userId) { } async function expireDeletedUser(userId) { - await Modules.promises.hooks.fire('expireDeletedUser', userId) - const deletedUser = await DeletedUser.findOne({ - 'deleterData.deletedUserId': userId, - }).exec() - - await Feedback.deleteMany({ userId }).exec() - await OnboardingDataCollectionManager.deleteOnboardingDataCollection(userId) - - deletedUser.user = undefined - deletedUser.deleterData.deleterIpAddress = undefined - await deletedUser.save() + logger.info({ userId }, 'expiring deleted user') + try { + logger.info({ userId }, 'firing expireDeletedUser hook') + await Modules.promises.hooks.fire('expireDeletedUser', userId) + logger.info({ userId }, 'removing deleted user feedback records') + await Feedback.deleteMany({ userId }).exec() + logger.info({ userId }, 'removing deleted user onboarding data') + await OnboardingDataCollectionManager.deleteOnboardingDataCollection(userId) + logger.info({ userId }, 'redacting PII from the deleted user record') + const deletedUser = await DeletedUser.findOne({ + 'deleterData.deletedUserId': userId, + }).exec() + deletedUser.user = undefined + deletedUser.deleterData.deleterIpAddress = undefined + await deletedUser.save() + logger.info({ userId }, 'deleted user expiry complete') + } catch (error) { + logger.warn( + { error, userId }, + 'something went wrong expiring the deleted user' + ) + throw error + } } async function expireDeletedUsersAfterDuration() { @@ -112,11 +124,27 @@ async function expireDeletedUsersAfterDuration() { if (deletedUsers.length === 0) { return } - - for (let i = 0; i < deletedUsers.length; i++) { - const deletedUserId = deletedUsers[i].deleterData.deletedUserId - await expireDeletedUser(deletedUserId) - await UserAuditLogEntry.deleteMany({ userId: deletedUserId }).exec() + logger.info( + { deletedUsers: deletedUsers.length, retentionPeriodInDays: DURATION }, + 'expiring batch of deleted users older than retention period' + ) + try { + for (let i = 0; i < deletedUsers.length; i++) { + const deletedUserId = deletedUsers[i].deleterData.deletedUserId + await expireDeletedUser(deletedUserId) + logger.info({ deletedUserId }, 'removing deleted user audit log entries') + await UserAuditLogEntry.deleteMany({ userId: deletedUserId }).exec() + } + logger.info( + { deletedUsers: deletedUsers.length }, + 'batch of deleted users expired successfully' + ) + } catch (error) { + logger.warn( + { error }, + 'something went wrong expiring batch of deleted users' + ) + throw error } } diff --git a/services/web/app/src/Features/User/UserGetter.js b/services/web/app/src/Features/User/UserGetter.js index bce4568880..a5fbe42651 100644 --- a/services/web/app/src/Features/User/UserGetter.js +++ b/services/web/app/src/Features/User/UserGetter.js @@ -269,6 +269,7 @@ const UserGetter = { getUsers(query, projection, callback) { try { query = normalizeMultiQuery(query) + if (query?._id?.$in?.length === 0) return callback(null, []) // shortcut for getUsers([]) db.users.find(query, { projection }).toArray(callback) } catch (err) { callback(err) diff --git a/services/web/app/src/Features/User/UserPagesController.mjs b/services/web/app/src/Features/User/UserPagesController.mjs index 29fc505a7c..2f5d46d0d3 100644 --- a/services/web/app/src/Features/User/UserPagesController.mjs +++ b/services/web/app/src/Features/User/UserPagesController.mjs @@ -53,10 +53,8 @@ async function settingsPage(req, res) { const reconfirmedViaSAML = _.get(req.session, ['saml', 'reconfirmed']) delete req.session.saml let shouldAllowEditingDetails = true - if (Settings.ldap && Settings.ldap.updateUserDetailsOnLogin) { - shouldAllowEditingDetails = false - } - if (Settings.saml && Settings.saml.updateUserDetailsOnLogin) { + const externalAuth = req.user.externalAuth + if (externalAuth && Settings[externalAuth].updateUserDetailsOnLogin) { shouldAllowEditingDetails = false } const oauthProviders = Settings.oauthProviders || {} @@ -176,6 +174,7 @@ async function settingsPage(req, res) { gitBridgeEnabled: Settings.enableGitBridge, isSaas: Features.hasFeature('saas'), memberOfSSOEnabledGroups, + capabilities: [...req.capabilitySet], }) } diff --git a/services/web/app/src/Features/User/UserUpdater.js b/services/web/app/src/Features/User/UserUpdater.js index 627e73875d..f21ee9a1ed 100644 --- a/services/web/app/src/Features/User/UserUpdater.js +++ b/services/web/app/src/Features/User/UserUpdater.js @@ -11,7 +11,6 @@ const EmailHandler = require('../Email/EmailHandler') const EmailHelper = require('../Helpers/EmailHelper') const Errors = require('../Errors/Errors') const NewsletterManager = require('../Newsletter/NewsletterManager') -const RecurlyWrapper = require('../Subscription/RecurlyWrapper') const UserAuditLogHandler = require('./UserAuditLogHandler') const AnalyticsManager = require('../Analytics/AnalyticsManager') const SubscriptionLocator = require('../Subscription/SubscriptionLocator') @@ -252,7 +251,11 @@ async function setDefaultEmailAddress( } try { - await RecurlyWrapper.promises.updateAccountEmailAddress(user._id, email) + await Modules.promises.hooks.fire( + 'updateAccountEmailAddress', + user._id, + email + ) } catch (error) { // errors are ignored } diff --git a/services/web/app/src/Features/UserMembership/UserMembershipController.mjs b/services/web/app/src/Features/UserMembership/UserMembershipController.mjs index aaa8fa5812..4be1221255 100644 --- a/services/web/app/src/Features/UserMembership/UserMembershipController.mjs +++ b/services/web/app/src/Features/UserMembership/UserMembershipController.mjs @@ -31,8 +31,11 @@ async function manageGroupMembers(req, res, next) { ) const ssoConfig = await SSOConfig.findById(subscription.ssoConfig).exec() const plan = PlansLocator.findLocalPlanInSettings(subscription.planCode) - const userId = SessionManager.getLoggedInUserId(req.session) + const userId = SessionManager.getLoggedInUserId(req.session)?.toString() const isAdmin = subscription.admin_id.toString() === userId + const isUserGroupManager = + Boolean(subscription.manager_ids?.some(id => id.toString() === userId)) && + !isAdmin const recurlySubscription = subscription.recurlySubscription_id ? await RecurlyClient.promises.getSubscription( subscription.recurlySubscription_id @@ -51,6 +54,7 @@ async function manageGroupMembers(req, res, next) { users, groupSize: subscription.membersLimit, managedUsersActive: subscription.managedUsersEnabled, + isUserGroupManager, groupSSOActive: ssoConfig?.enabled, canUseFlexibleLicensing: plan?.canUseFlexibleLicensing, canUseAddSeatsFeature, diff --git a/services/web/app/src/infrastructure/ExpressLocals.js b/services/web/app/src/infrastructure/ExpressLocals.js index eae1b48219..836670233c 100644 --- a/services/web/app/src/infrastructure/ExpressLocals.js +++ b/services/web/app/src/infrastructure/ExpressLocals.js @@ -19,6 +19,7 @@ const { const { addOptionalCleanupHandlerAfterDrainingConnections, } = require('./GracefulShutdown') +const { sanitizeSessionUserForFrontEnd } = require('./FrontEndUser') const IEEE_BRAND_ID = Settings.ieeeBrandId @@ -106,9 +107,9 @@ module.exports = function (webRouter, privateApiRouter, publicApiRouter) { webRouter.use(function (req, res, next) { req.externalAuthenticationSystemUsed = - Features.externalAuthenticationSystemUsed + () => !!req?.user?.externalAuth res.locals.externalAuthenticationSystemUsed = - Features.externalAuthenticationSystemUsed + () => !!req?.user?.externalAuth req.hasFeature = res.locals.hasFeature = Features.hasFeature next() }) @@ -300,11 +301,7 @@ module.exports = function (webRouter, privateApiRouter, publicApiRouter) { webRouter.use(function (req, res, next) { const currentUser = SessionManager.getSessionUser(req.session) if (currentUser != null) { - res.locals.user = { - email: currentUser.email, - first_name: currentUser.first_name, - last_name: currentUser.last_name, - } + res.locals.user = sanitizeSessionUserForFrontEnd(currentUser) } next() }) diff --git a/services/web/app/src/infrastructure/Features.js b/services/web/app/src/infrastructure/Features.js index aaf51103b9..3264c323cd 100644 --- a/services/web/app/src/infrastructure/Features.js +++ b/services/web/app/src/infrastructure/Features.js @@ -56,7 +56,7 @@ const Features = { case 'registration-page': return ( !Features.externalAuthenticationSystemUsed() || - Boolean(Settings.overleaf) + Boolean(Settings.overleaf) || Settings.oidc?.disableJITAccountCreation ) case 'registration': return Boolean(Settings.overleaf) diff --git a/services/web/app/src/infrastructure/FrontEndUser.js b/services/web/app/src/infrastructure/FrontEndUser.js new file mode 100644 index 0000000000..5a4af9868c --- /dev/null +++ b/services/web/app/src/infrastructure/FrontEndUser.js @@ -0,0 +1,15 @@ +function sanitizeSessionUserForFrontEnd(sessionUser) { + if (sessionUser != null) { + return { + email: sessionUser.email, + first_name: sessionUser.first_name, + last_name: sessionUser.last_name, + } + } + + return null +} + +module.exports = { + sanitizeSessionUserForFrontEnd, +} diff --git a/services/web/app/src/infrastructure/mongodb.js b/services/web/app/src/infrastructure/mongodb.js index a3342c6575..24103b2d82 100644 --- a/services/web/app/src/infrastructure/mongodb.js +++ b/services/web/app/src/infrastructure/mongodb.js @@ -61,7 +61,6 @@ const db = { projectHistoryFailures: internalDb.collection('projectHistoryFailures'), projectHistoryGlobalBlobs: internalDb.collection('projectHistoryGlobalBlobs'), projectHistoryLabels: internalDb.collection('projectHistoryLabels'), - projectHistoryMetaData: internalDb.collection('projectHistoryMetaData'), projectHistorySyncState: internalDb.collection('projectHistorySyncState'), projectInvites: internalDb.collection('projectInvites'), projects: internalDb.collection('projects'), diff --git a/services/web/app/src/models/GroupPolicy.js b/services/web/app/src/models/GroupPolicy.js index e975834008..55728a2415 100644 --- a/services/web/app/src/models/GroupPolicy.js +++ b/services/web/app/src/models/GroupPolicy.js @@ -27,6 +27,9 @@ const GroupPolicySchema = new Schema( // User can't use the chat feature userCannotUseChat: Boolean, + + // User can't use the Dropbox feature + userCannotUseDropbox: Boolean, }, { minimize: false } ) diff --git a/services/web/app/src/models/SSOConfig.js b/services/web/app/src/models/SSOConfig.js index 5d50d51d02..6734b29f57 100644 --- a/services/web/app/src/models/SSOConfig.js +++ b/services/web/app/src/models/SSOConfig.js @@ -10,6 +10,7 @@ const SSOConfigSchema = new Schema( userLastNameAttribute: { type: String }, validated: { type: Boolean, default: false }, enabled: { type: Boolean, default: false }, + useSettingsUKAMF: { type: Boolean, default: false }, }, { diff --git a/services/web/app/src/models/User.js b/services/web/app/src/models/User.js index d228c46b82..c1701023c4 100644 --- a/services/web/app/src/models/User.js +++ b/services/web/app/src/models/User.js @@ -97,6 +97,7 @@ const UserSchema = new Schema( fontFamily: { type: String }, lineHeight: { type: String }, mathPreview: { type: Boolean, default: true }, + breadcrumbs: { type: Boolean, default: true }, referencesSearchMode: { type: String, default: 'advanced' }, // 'advanced' or 'simple' enableNewEditor: { type: Boolean }, }, diff --git a/services/web/app/src/router.mjs b/services/web/app/src/router.mjs index a7e8d5e05f..484c4e7960 100644 --- a/services/web/app/src/router.mjs +++ b/services/web/app/src/router.mjs @@ -217,6 +217,8 @@ async function initialize(webRouter, privateApiRouter, publicApiRouter) { CaptchaMiddleware.canSkipCaptcha ) + await Modules.applyRouter(webRouter, privateApiRouter, publicApiRouter) + webRouter.get('/login', UserPagesController.loginPage) AuthenticationController.addEndpointToLoginWhitelist('/login') @@ -285,8 +287,6 @@ async function initialize(webRouter, privateApiRouter, publicApiRouter) { TokenAccessRouter.apply(webRouter) HistoryRouter.apply(webRouter, privateApiRouter) - await Modules.applyRouter(webRouter, privateApiRouter, publicApiRouter) - if (Settings.enableSubscriptions) { webRouter.get( '/user/bonus', @@ -915,6 +915,12 @@ async function initialize(webRouter, privateApiRouter, publicApiRouter) { NotificationsController.markNotificationAsRead ) + webRouter.get( + '/user/notification/:notificationId', + AuthenticationController.requireLogin(), + NotificationsController.getNotification + ) + // Deprecated in favour of /internal/project/:project_id but still used by versioning privateApiRouter.get( '/project/:project_id/details', diff --git a/services/web/app/views/_customer_io.pug b/services/web/app/views/_customer_io.pug index 81d75f7d7f..781dfaab13 100644 --- a/services/web/app/views/_customer_io.pug +++ b/services/web/app/views/_customer_io.pug @@ -1,10 +1,12 @@ if(customerIoEnabled && ExposedSettings.cioWriteKey && ExposedSettings.cioSiteId) - script(type="text/javascript", id="cio-loader", nonce=scriptNonce, data-cio-write-key=ExposedSettings.cioWriteKey, data-cio-site-id=ExposedSettings.cioSiteId, data-session-analytics-id=getSessionAnalyticsId(), data-user-id=getLoggedInUserId()). + script(type="text/javascript", id="cio-loader", nonce=scriptNonce, data-best-subscription=(usersBestSubscription && usersBestSubscription.type), data-cio-write-key=ExposedSettings.cioWriteKey, data-cio-site-id=ExposedSettings.cioSiteId, data-session-analytics-id=getSessionAnalyticsId(), data-user-id=getLoggedInUserId()). var cioSettings = document.querySelector('#cio-loader').dataset; var analyticsId = cioSettings.sessionAnalyticsId; var siteId = cioSettings.cioSiteId; var writeKey = cioSettings.cioWriteKey; var userId = cioSettings.userId; + var usersBestSubscription = cioSettings.bestSubscription + !function(){var i="cioanalytics", analytics=(window[i]=window[i]||[]);if(!analytics.initialize)if(analytics.invoked)window.console&&console.error&&console.error("Snippet included twice.");else{analytics.invoked=!0;analytics.methods=["trackSubmit","trackClick","trackLink","trackForm","pageview","identify","reset","group","track","ready","alias","debug","page","once","off","on","addSourceMiddleware","addIntegrationMiddleware","setAnonymousId","addDestinationMiddleware"];analytics.factory=function(e){return function(){var t=Array.prototype.slice.call(arguments);t.unshift(e);analytics.push(t);return analytics}};for(var e=0;e 0) - p.thanks The Overleaf Bonus Program has been discontinued, but you'll continue to have access to the features you already earned. - else - p.thanks The Overleaf Bonus Program has been discontinued. - p.thanks Please contact us if you have any questions. - - if (refered_user_count > 0) - .row.ab-bonus - .col-md-10.col-md-offset-1.bonus-banner(style="position: relative; height: 30px; margin-top: 20px;") - - for (var i = 0; i <= 10; i++) { - if (refered_user_count == i) - .number(style="left: "+i+"0%").active #{i} - else - .number(style="left: "+i+"0%") #{i} - - } - - .row.ab-bonus - .col-md-10.col-md-offset-1.bonus-banner - .progress - .progress-bar.progress-bar-info(style="width: "+refered_user_count+"0%") - - .row.ab-bonus - .col-md-10.col-md-offset-1.bonus-banner(style="position: relative; height: 110px;") - .perk(style="left: 10%;", class = refered_user_count >= 1 ? "active" : "") #{translate("one_free_collab")} - .perk(style="left: 30%;", class = refered_user_count >= 3 ? "active" : "") #{translate("three_free_collab")} - .perk(style="left: 60%;", class = refered_user_count >= 6 ? "active" : "") #{translate("free_dropbox_and_history")} + #{translate("three_free_collab")} - .perk(style="left: 90%;", class = refered_user_count >= 9 ? "active" : "") #{translate("free_dropbox_and_history")} + #{translate("unlimited_collabs")} - .row   - - .row.ab-bonus - .col-md-10.col-md-offset-1.bonus-banner.bonus-status - if (refered_user_count == 1) - p.thanks You’ve introduced 1 person to #{settings.appName}. + .card-body + .container-fluid + .row + .col-lg-10.offset-lg-1 + if (refered_user_count > 0) + p.thanks The Overleaf Bonus Program has been discontinued, but you'll continue to have access to the features you already earned. else - p.thanks You’ve introduced #{refered_user_count} people to #{settings.appName}. + p.thanks The Overleaf Bonus Program has been discontinued. + p.thanks Please contact us if you have any questions. + + if (refered_user_count > 0) + .row.ab-bonus + .col-lg-10.offset-lg-1(style="position: relative; height: 30px; margin-top: 20px;") + - for (var i = 0; i <= 10; i++) { + if (refered_user_count == i) + .number(style="left: "+i+"0%").active #{i} + else + .number(style="left: "+i+"0%") #{i} + - } + + .row.ab-bonus + .col-lg-10.offset-lg-1 + .progress + .progress-bar.progress-bar-info(style="width: "+refered_user_count+"0%") + + .row.ab-bonus + .col-lg-10.offset-lg-1(style="position: relative; height: 110px;") + .perk(style="left: 10%;", class = refered_user_count >= 1 ? "active" : "") #{translate("one_free_collab")} + .perk(style="left: 30%;", class = refered_user_count >= 3 ? "active" : "") #{translate("three_free_collab")} + .perk(style="left: 60%;", class = refered_user_count >= 6 ? "active" : "") #{translate("free_dropbox_and_history")} + #{translate("three_free_collab")} + .perk(style="left: 90%;", class = refered_user_count >= 9 ? "active" : "") #{translate("free_dropbox_and_history")} + #{translate("unlimited_collabs")} + .row   + + .row.ab-bonus + .col-lg-10.offset-lg-1.bonus-status + if (refered_user_count == 1) + p.thanks You’ve introduced 1 person to #{settings.appName}. + else + p.thanks You’ve introduced #{refered_user_count} people to #{settings.appName}. diff --git a/services/web/app/views/subscriptions/dashboard-react.pug b/services/web/app/views/subscriptions/dashboard-react.pug index d6a1bff49c..2b6251f2a3 100644 --- a/services/web/app/views/subscriptions/dashboard-react.pug +++ b/services/web/app/views/subscriptions/dashboard-react.pug @@ -27,6 +27,7 @@ block append meta meta(name="ol-user" data-type="json" content=user) if (personalSubscription && personalSubscription.payment) meta(name="ol-recurlyApiKey" content=settings.apis.recurly.publicKey) + meta(name="ol-stripeUKApiKey" content=settings.apis.stripeUK.publishableKey) meta(name="ol-recommendedCurrency" content=personalSubscription.payment.currency) meta(name="ol-groupPlans" data-type="json" content=groupPlans) diff --git a/services/web/app/views/subscriptions/plans/_faq_new.pug b/services/web/app/views/subscriptions/plans/_faq_new.pug index baefb6ed3f..3c926fb22d 100644 --- a/services/web/app/views/subscriptions/plans/_faq_new.pug +++ b/services/web/app/views/subscriptions/plans/_faq_new.pug @@ -1,5 +1,6 @@ include ./_plans_faq_tabs include ../../_mixins/eyebrow +include ../../_mixins/material_symbol - var managingYourSubscription = 'managingYourSubscription' - var overleafIndividualPlans = 'overleafIndividualPlans' @@ -81,6 +82,10 @@ include ../../_mixins/eyebrow .row .col-xs-12.plans-faq-support span #{translate('still_have_questions')} - button(data-ol-open-contact-form-modal="general") + button( + data-ol-open-contact-form-modal="general" + data-bs-toggle=bootstrapVersion === 5 ? "modal" : undefined + data-bs-target=bootstrapVersion === 5 ? "#contactUsModal" : undefined + ) span(style="margin-right: 4px") #{translate('contact_support')} - i.icon-md.material-symbols.material-symbols-rounded.material-symbols-arrow-right(aria-hidden="true") arrow_right_alt + +material-symbol-rounded("arrow_right_alt", "icon-md") diff --git a/services/web/app/views/subscriptions/plans/_plans_faq_tabs.pug b/services/web/app/views/subscriptions/plans/_plans_faq_tabs.pug index f312ebeb46..a598f4774c 100644 --- a/services/web/app/views/subscriptions/plans/_plans_faq_tabs.pug +++ b/services/web/app/views/subscriptions/plans/_plans_faq_tabs.pug @@ -1,5 +1,6 @@ //- If the `plans-page-bs5` split test has been completed, remove the `data-toggle` and `data-target` because it is not needed anymore (bs5 uses `data-bs-toggle` and `data-bs-target`) - +include ../../_mixins/material_symbol + mixin managingYourSubscription() .ol-accordions-container .custom-accordion-item @@ -14,7 +15,7 @@ mixin managingYourSubscription() ) | Can I change plans or cancel later? span.custom-accordion-icon - i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down + +material-symbol-outlined("keyboard_arrow_down") .collapse(id="managingYourSubscriptionQ1") .custom-accordion-body span Yes, you can do this at any time by going to @@ -32,7 +33,7 @@ mixin managingYourSubscription() ) | If I change or cancel my Overleaf plan, will I lose my projects? span.custom-accordion-icon - i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down + +material-symbol-outlined("keyboard_arrow_down") .collapse(id="managingYourSubscriptionQ2") .custom-accordion-body | No. Changing or canceling your plan won’t affect your projects, the only change will be to the features available to you. You can see which features are available only on paid plans in the comparison table. @@ -48,7 +49,7 @@ mixin managingYourSubscription() ) | Can I pay by invoice or purchase order? span.custom-accordion-icon - i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down + +material-symbol-outlined("keyboard_arrow_down") .collapse(id="managingYourSubscriptionQ3") .custom-accordion-body | This is possible when you’re purchasing a group subscription for five or more people, or a site license. For individual subscriptions, we can only accept payment online via credit card, debit card, or PayPal. @@ -64,7 +65,7 @@ mixin managingYourSubscription() ) | How do I view/update the credit card being charged for my subscription? span.custom-accordion-icon - i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down + +material-symbol-outlined("keyboard_arrow_down") .collapse(id="managingYourSubscriptionQ4") .custom-accordion-body | You can view and update the card on file by going to Account > @@ -96,7 +97,7 @@ mixin overleafIndividualPlans() ) | How does the free trial work? span.custom-accordion-icon - i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down + +material-symbol-outlined("keyboard_arrow_down") .collapse(id="overleafIndividualPlansQ1") .custom-accordion-body span You get full access to your chosen plan during your 7-day free trial, and there’s no obligation to continue beyond the trial. Your card will be charged at the end of your trial unless you cancel before then. To cancel, go to @@ -124,7 +125,7 @@ mixin overleafIndividualPlans() ) | What’s a collaborator on an Overleaf individual subscription? span.custom-accordion-icon - i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down + +material-symbol-outlined("keyboard_arrow_down") .collapse(id="overleafIndividualPlansQ2") .custom-accordion-body | A collaborator is someone you invite to work with you on a project. So, for example, on our Standard plan you can have up to 10 people collaborating with you on any given project. @@ -141,7 +142,7 @@ mixin overleafIndividualPlans() ) | The individual Standard plan has 10 project collaborators, does it mean that 10 people will be upgraded? span.custom-accordion-icon - i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down + +material-symbol-outlined("keyboard_arrow_down") .collapse(id="overleafIndividualPlansQ3") .custom-accordion-body span No. Only the subscriber’s account will be upgraded. An individual Standard subscription allows you to invite 10 people per project to edit the project with you. Your collaborators can access features such as the full document history and extended compile time, but @@ -159,7 +160,7 @@ mixin overleafIndividualPlans() ) | Do collaborators also have access to the editing and collaboration features I’ve paid for? span.custom-accordion-icon - i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down + +material-symbol-outlined("keyboard_arrow_down") .collapse(id="overleafIndividualPlansQ4") .custom-accordion-body span If you have an Overleaf subscription, then your project collaborators will have access to features like real-time track changes and document history, but @@ -177,7 +178,7 @@ mixin overleafIndividualPlans() ) | Can I purchase an individual plan on behalf of someone else? span.custom-accordion-icon - i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down + +material-symbol-outlined("keyboard_arrow_down") .collapse(id="overleafIndividualPlansQ5") .custom-accordion-body | Individual subscriptions must be purchased by the account that will be the end user. If you want to purchase a plan for someone else, you’ll need to provide them with relevant payment details to enable them to make the purchase. @@ -193,7 +194,7 @@ mixin overleafIndividualPlans() ) | Who is eligible for the Student plan? span.custom-accordion-icon - i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down + +material-symbol-outlined("keyboard_arrow_down") .collapse(id="overleafIndividualPlansQ6") .custom-accordion-body | As the name suggests, the Student plan is only for students at educational institutions. This includes graduate students. @@ -209,7 +210,7 @@ mixin overleafIndividualPlans() ) | Can I transfer an individual subscription to someone else? span.custom-accordion-icon - i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down + +material-symbol-outlined("keyboard_arrow_down") .collapse(id="overleafIndividualPlansQ7") .custom-accordion-body | No. Individual plans can’t be transferred. @@ -232,7 +233,7 @@ mixin overleafGroupPlans() ) | What’s the difference between users and collaborators on an Overleaf group subscription? span.custom-accordion-icon - i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down + +material-symbol-outlined("keyboard_arrow_down") .collapse(id="overleafGroupPlansQ1") .custom-accordion-body div On any of our group plans, the number of users refers to the number of people you can invite to join your group. All of these people will have access to the plan’s paid-for features across all their projects, such as real-time track changes and document history. @@ -249,7 +250,7 @@ mixin overleafGroupPlans() ) | What is the benefit of purchasing an Overleaf Group plan? span.custom-accordion-icon - i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down + +material-symbol-outlined("keyboard_arrow_down") .collapse(id="overleafGroupPlansQ2") .custom-accordion-body | Our Group subscriptions allow you to purchase access to our premium features for multiple people. They’re easy to manage, help save on paperwork, and allow groups of 5 or more to purchase via purchase order (PO). We also offer discounts on purchases of Group subscriptions for more than 20 users; just get in touch with our @@ -275,7 +276,7 @@ mixin overleafGroupPlans() ) | Who is eligible for the educational discount? span.custom-accordion-icon - i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down + +material-symbol-outlined("keyboard_arrow_down") .collapse(id="overleafGroupPlansQ3") .custom-accordion-body | The educational discount for group subscriptions is for students or faculty who are using Overleaf primarily for teaching. @@ -291,7 +292,7 @@ mixin overleafGroupPlans() ) | How do I add more licenses to my group subscription, and what will it cost? span.custom-accordion-icon - i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down + +material-symbol-outlined("keyboard_arrow_down") .collapse(id="overleafGroupPlansQ4") .custom-accordion-body div @@ -340,7 +341,7 @@ mixin overleafGroupPlans() ) | How do I upgrade my plan from Group Standard to Group Professional? span.custom-accordion-icon - i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down + +material-symbol-outlined("keyboard_arrow_down") .collapse(id="overleafGroupPlansQ5") .custom-accordion-body | You can upgrade your plan from Group Standard to Group Professional on the diff --git a/services/web/app/views/subscriptions/team/invite-managed.pug b/services/web/app/views/subscriptions/team/invite-managed.pug index f59b8b4937..d31f12656b 100644 --- a/services/web/app/views/subscriptions/team/invite-managed.pug +++ b/services/web/app/views/subscriptions/team/invite-managed.pug @@ -13,6 +13,7 @@ block append meta meta(name="ol-groupSSOActive" data-type="boolean" content=groupSSOActive) meta(name="ol-subscriptionId" data-type="string" content=subscriptionId) meta(name="ol-user" data-type="json" content=user) + meta(name="ol-usersSubscription" data-type="json" content=usersSubscription) block content main.content.content-alt.team-invite#invite-managed-root diff --git a/services/web/app/views/subscriptions/team/invite.pug b/services/web/app/views/subscriptions/team/invite.pug index dc1b509cbf..1b2ecb4646 100644 --- a/services/web/app/views/subscriptions/team/invite.pug +++ b/services/web/app/views/subscriptions/team/invite.pug @@ -4,7 +4,7 @@ block entrypointVar - entrypoint = 'pages/user/subscription/invite' block append meta - meta(name="ol-hasIndividualRecurlySubscription" data-type="boolean" content=hasIndividualRecurlySubscription) + meta(name="ol-hasIndividualPaidSubscription" data-type="boolean" content=hasIndividualPaidSubscription) meta(name="ol-inviterName" data-type="string" content=inviterName) meta(name="ol-inviteToken" data-type="string" content=inviteToken) meta(name="ol-currentManagedUserAdminEmail" data-type="string" content=currentManagedUserAdminEmail) diff --git a/services/web/app/views/subscriptions/team/invite_logged_out.pug b/services/web/app/views/subscriptions/team/invite_logged_out.pug index d07fa5368c..e5930aba4f 100644 --- a/services/web/app/views/subscriptions/team/invite_logged_out.pug +++ b/services/web/app/views/subscriptions/team/invite_logged_out.pug @@ -1,4 +1,4 @@ -extends ../../layout-react +extends ../../layout-marketing block append meta meta(name="ol-user" data-type="json" content=user) @@ -14,7 +14,7 @@ block content .card-body .page-header // TODO: Remove `team-invite-name` once we fully migrated to Bootstrap 5 - h1.text-centered !{translate("invited_to_group", {inviterName: inviterName, appName: appName }, [{name: 'span', attrs: {class: 'team-invite-name'}}])} + h1.text-center !{translate("invited_to_group", {inviterName: inviterName, appName: appName }, [{name: 'span', attrs: {class: 'team-invite-name'}}])} if (accountExists) div diff --git a/services/web/app/views/user/accountSuspended.pug b/services/web/app/views/user/accountSuspended.pug index da57f4d9ff..7231713416 100644 --- a/services/web/app/views/user/accountSuspended.pug +++ b/services/web/app/views/user/accountSuspended.pug @@ -4,12 +4,12 @@ block vars - var suppressNavbar = true - var suppressFooter = true - metadata.robotsNoindexNofollow = true - - bootstrap5PageStatus = 'disabled' block content main.content.content-alt#main-content .container-custom-sm.mx-auto .card - h3 #{translate('your_account_is_suspended')} - p #{translate('sorry_this_account_has_been_suspended')} - p !{translate('please_contact_us_if_you_think_this_is_in_error', {}, [{name: 'a', attrs: {href: `mailto:${settings.adminEmail}`}}])} + .card-body + h3 #{translate('your_account_is_suspended')} + p #{translate('sorry_this_account_has_been_suspended')} + p !{translate('please_contact_us_if_you_think_this_is_in_error', {}, [{name: 'a', attrs: {href: `mailto:${settings.adminEmail}`}}])} diff --git a/services/web/app/views/user/confirm_email.pug b/services/web/app/views/user/confirm_email.pug index 37c04880b1..13e911f386 100644 --- a/services/web/app/views/user/confirm_email.pug +++ b/services/web/app/views/user/confirm_email.pug @@ -1,60 +1,57 @@ extends ../layout-marketing - -block vars - - bootstrap5PageStatus = 'disabled' +include ../_mixins/notification block content main.content.content-alt#main-content .container .row - .col-md-8.col-md-offset-2.col-lg-6.col-lg-offset-3 + .col-lg-8.offset-lg-2.col-xl-6.offset-xl-3 .card - .page-header(data-ol-hide-on-error-message="confirm-email-wrong-user") - h1 #{translate("confirm_email")} - form( - method="POST" - action="/logout" - id="logoutForm" - ) - input(type="hidden", name="_csrf", value=csrfToken) - input(type="hidden", name="redirect", value=currentUrlWithQueryParams) - form( - data-ol-async-form, - data-ol-auto-submit, - name="confirmEmailForm" - action="/user/emails/confirm", - method="POST", - id="confirmEmailForm", - ) - input(type="hidden", name="_csrf", value=csrfToken) - input(type="hidden", name="token", value=token) + .card-body + .page-header(data-ol-hide-on-error-message="confirm-email-wrong-user") + h1 #{translate("confirm_email")} + form( + method="POST" + action="/logout" + id="logoutForm" + ) + input(type="hidden", name="_csrf", value=csrfToken) + input(type="hidden", name="redirect", value=currentUrlWithQueryParams) + form( + data-ol-async-form, + data-ol-auto-submit, + name="confirmEmailForm" + action="/user/emails/confirm", + method="POST", + id="confirmEmailForm", + ) + input(type="hidden", name="_csrf", value=csrfToken) + input(type="hidden", name="token", value=token) + + div(data-ol-not-sent) + +formMessages() + div(data-ol-custom-form-message="confirm-email-wrong-user" hidden) + h1.h3 #{translate("we_cant_confirm_this_email")} + p !{translate("to_confirm_email_address_you_must_be_logged_in_with_the_requesting_account")} + p !{translate("you_are_currently_logged_in_as", {email: getUserEmail()})} + .actions + button.btn-primary.btn.w-100( + form="logoutForm" + ) #{translate('log_in_with_a_different_account')} - div(data-ol-not-sent) - +formMessages() - div(data-ol-custom-form-message="confirm-email-wrong-user" hidden) - h1.h3 #{translate("we_cant_confirm_this_email")} - p !{translate("to_confirm_email_address_you_must_be_logged_in_with_the_requesting_account")} - p !{translate("you_are_currently_logged_in_as", {email: getUserEmail()})} .actions - button.btn-primary.btn.btn-block( - form="logoutForm" - ) #{translate('log_in_with_a_different_account')} + button.btn-primary.btn.w-100( + type='submit', + data-ol-disabled-inflight + data-ol-hide-on-error-message="confirm-email-wrong-user" + ) + span(data-ol-inflight="idle") + | #{translate('confirm')} + span(hidden data-ol-inflight="pending") + span(role='status').spinner-border.spinner-border-sm.mx-2 - .actions - button.btn-primary.btn.btn-block( - type='submit', - data-ol-disabled-inflight - data-ol-hide-on-error-message="confirm-email-wrong-user" - ) - span(data-ol-inflight="idle") - | #{translate('confirm')} - span(hidden data-ol-inflight="pending") - i.fa.fa-fw.fa-spin.fa-spinner(aria-hidden="true") - |  #{translate('confirming')}… - - div(hidden data-ol-sent) - .alert.alert-success - | #{translate('thank_you_email_confirmed')} - div.text-center - a.btn.btn-primary(href="/user/settings") - | #{translate('go_to_account_settings')} + div(hidden data-ol-sent) + +notification({ariaLive: 'polite', type: 'success', className: 'mb-3', content: translate("thank_you_email_confirmed")}) + div.text-center + a.btn.btn-primary(href="/user/settings") + | #{translate('go_to_account_settings')} diff --git a/services/web/app/views/user/email-preferences.pug b/services/web/app/views/user/email-preferences.pug index 465ffede37..86ebc5f841 100644 --- a/services/web/app/views/user/email-preferences.pug +++ b/services/web/app/views/user/email-preferences.pug @@ -1,49 +1,47 @@ extends ../layout-marketing include ../_mixins/back_to_btns -block vars - - bootstrap5PageStatus = 'disabled' - block content main.content.content-alt#main-content .container .row - .col-md-10.col-md-offset-1.col-lg-8.col-lg-offset-2 + .col-lg-10.offset-lg-1.col-xl-8.offset-xl-2 .card - .page-header - h1 #{translate("newsletter_info_title")} - - p #{translate("newsletter_info_summary")} - - - var submitAction - if subscribed - - submitAction = '/user/newsletter/unsubscribe' - p !{translate("newsletter_info_subscribed", {}, ['strong'])} - else - - submitAction = '/user/newsletter/subscribe' - p !{translate("newsletter_info_unsubscribed", {}, ['strong'])} - - form( - data-ol-async-form - data-ol-reload-on-success - name="newsletterForm" - action=submitAction - method="POST" - ) - input(name='_csrf', type='hidden', value=csrfToken) - +formMessages() - p.actions.text-center - if subscribed - button.btn-danger.btn(type='submit', data-ol-disabled-inflight) - span(data-ol-inflight="idle") #{translate("unsubscribe")} - span(hidden data-ol-inflight="pending") #{translate("saving")}… - else - button.btn-primary.btn(type='submit', data-ol-disabled-inflight) - span(data-ol-inflight="idle") #{translate("subscribe")} - span(hidden data-ol-inflight="pending") #{translate("saving")}… - - if subscribed - p #{translate("newsletter_info_note")} - - .page-separator - +back-to-btns() + .card-body + .page-header + h1 #{translate("newsletter_info_title")} + + p #{translate("newsletter_info_summary")} + + - var submitAction + if subscribed + - submitAction = '/user/newsletter/unsubscribe' + p !{translate("newsletter_info_subscribed", {}, ['strong'])} + else + - submitAction = '/user/newsletter/subscribe' + p !{translate("newsletter_info_unsubscribed", {}, ['strong'])} + + form( + data-ol-async-form + data-ol-reload-on-success + name="newsletterForm" + action=submitAction + method="POST" + ) + input(name='_csrf', type='hidden', value=csrfToken) + +formMessages() + p.actions.text-center + if subscribed + button.btn-danger.btn(type='submit', data-ol-disabled-inflight) + span(data-ol-inflight="idle") #{translate("unsubscribe")} + span(hidden data-ol-inflight="pending") #{translate("saving")}… + else + button.btn-primary.btn(type='submit', data-ol-disabled-inflight) + span(data-ol-inflight="idle") #{translate("subscribe")} + span(hidden data-ol-inflight="pending") #{translate("saving")}… + + if subscribed + p #{translate("newsletter_info_note")} + + .page-separator + +back-to-btns() diff --git a/services/web/app/views/user/login.pug b/services/web/app/views/user/login.pug index 1ad77cb8b4..ffeb3eca89 100644 --- a/services/web/app/views/user/login.pug +++ b/services/web/app/views/user/login.pug @@ -23,10 +23,10 @@ block content | !{translate('password_compromised_try_again_or_use_known_device_or_reset', {}, [{name: 'a', attrs: {href: 'https://haveibeenpwned.com/passwords', rel: 'noopener noreferrer', target: '_blank'}}, {name: 'a', attrs: {href: '/user/password/reset', target: '_blank'}}])}. .form-group input.form-control( - type='email', + type=(settings.ldap && settings.ldap.enable) ? 'text' : 'email', name='email', required, - placeholder='email@example.com', + placeholder=(settings.ldap && settings.ldap.enable) ? settings.ldap.placeholder : 'email@example.com', autofocus="true" ) .form-group @@ -47,4 +47,21 @@ block content if login_support_text hr p.text-center !{login_support_text} - + if settings.saml && settings.saml.enable + .actions(style='margin-top: 30px;') + a.button.btn-secondary.btn( + href='/saml/login', + style="width: 100%;" + data-ol-disabled-inflight + ) + span(data-ol-inflight="idle") #{settings.saml.identityServiceName} + span(hidden data-ol-inflight="pending") #{translate("logging_in")}… + if settings.oidc && settings.oidc.enable + .actions(style='margin-top: 30px;') + a.button.btn-secondary.btn( + href='/oidc/login', + style="width: 100%;" + data-ol-disabled-inflight + ) + span(data-ol-inflight="idle") #{settings.oidc.identityServiceName} + span(hidden data-ol-inflight="pending") #{translate("logging_in")}… diff --git a/services/web/app/views/user/one_time_login.pug b/services/web/app/views/user/one_time_login.pug index 89e1491913..648f6d93c1 100644 --- a/services/web/app/views/user/one_time_login.pug +++ b/services/web/app/views/user/one_time_login.pug @@ -1,20 +1,18 @@ extends ../layout-marketing -block vars - - bootstrap5PageStatus = 'disabled' - block content main.content.content-alt#main-content .container .row - .col-md-6.col-md-offset-3.col-lg-4.col-lg-offset-4 + .col-lg-6.offset-lg-3.col-xl-4.offset-xl-4 .card - .page-header - h1 We're back! - p Overleaf is now running normally. - p - | Please - | - a(href="/login") log in - | - | to continue working on your projects. + .card-body + .page-header + h1 We're back! + p Overleaf is now running normally. + p + | Please + | + a(href="/login") log in + | + | to continue working on your projects. diff --git a/services/web/app/views/user/passwordReset-bs5.pug b/services/web/app/views/user/passwordReset-bs5.pug index 7637a91062..08e0a71b9d 100644 --- a/services/web/app/views/user/passwordReset-bs5.pug +++ b/services/web/app/views/user/passwordReset-bs5.pug @@ -1,10 +1,11 @@ -extends ../layout-website-redesign-bootstrap-5 +extends ../layout-website-redesign include ../_mixins/recaptcha include ../_mixins/notification block vars - var suppressNavbar = true - var suppressFooter = true + - isWebsiteRedesign = true block content - var showCaptcha = settings.recaptcha && settings.recaptcha.siteKey && !(settings.recaptcha.disabled && settings.recaptcha.disabled.passwordReset) diff --git a/services/web/app/views/user/passwordReset.pug b/services/web/app/views/user/passwordReset.pug index 410e79fbb2..4eb1adbca5 100644 --- a/services/web/app/views/user/passwordReset.pug +++ b/services/web/app/views/user/passwordReset.pug @@ -1,5 +1,6 @@ extends ../layout-marketing include ../_mixins/recaptcha +include ../_mixins/material_symbol block vars - bootstrap5PageStatus = 'disabled' @@ -48,11 +49,11 @@ block content div(data-ol-custom-form-message="no-password-allowed-due-to-sso" hidden) .notification.notification-type-error(aria-live="polite" style="margin-bottom: 10px;") .notification-icon - span.material-symbols.material-symbols-rounded(aria-hidden="true") error + +material-symbol-rounded("error") .notification-content-and-cta .notification-content p - | !{translate("you_cant_reset_password_due_to_sso", {}, [{name: 'a', attrs: {href: '/sso-login'}}])} + | !{translate("you_cant_reset_password_due_to_ldap_or_sso")} input(type="hidden", name="_csrf", value=csrfToken) .form-group.mb-3 diff --git a/services/web/app/views/user/primaryEmailCheck-bs5.pug b/services/web/app/views/user/primaryEmailCheck-bs5.pug index 0828c06e4b..b25136927a 100644 --- a/services/web/app/views/user/primaryEmailCheck-bs5.pug +++ b/services/web/app/views/user/primaryEmailCheck-bs5.pug @@ -1,4 +1,8 @@ -extends ../layout-website-redesign-bootstrap-5 +extends ../layout-website-redesign + +block vars + - bootstrap5PageStatus = 'enabled' + - isWebsiteRedesign = true block content main#main-content diff --git a/services/web/app/views/user/reconfirm-bs5.pug b/services/web/app/views/user/reconfirm-bs5.pug index 8d9d13955f..fce9a44295 100644 --- a/services/web/app/views/user/reconfirm-bs5.pug +++ b/services/web/app/views/user/reconfirm-bs5.pug @@ -1,69 +1,72 @@ -extends ../layout-website-redesign-bootstrap-5 +extends ../layout-website-redesign include ../_mixins/recaptcha +block vars + - isWebsiteRedesign = true + block content - - var email = reconfirm_email ? reconfirm_email : "" - - var showCaptcha = settings.recaptcha && settings.recaptcha.siteKey && !(settings.recaptcha.disabled && settings.recaptcha.disabled.passwordReset) + - var email = reconfirm_email ? reconfirm_email : "" + - var showCaptcha = settings.recaptcha && settings.recaptcha.siteKey && !(settings.recaptcha.disabled && settings.recaptcha.disabled.passwordReset) - if showCaptcha - script(type="text/javascript", nonce=scriptNonce, src="https://www.recaptcha.net/recaptcha/api.js?render=explicit") - div( - id="recaptcha" - class="g-recaptcha" - data-sitekey=settings.recaptcha.siteKey - data-size="invisible" - data-badge="inline" - ) + if showCaptcha + script(type="text/javascript", nonce=scriptNonce, src="https://www.recaptcha.net/recaptcha/api.js?render=explicit") + div( + id="recaptcha" + class="g-recaptcha" + data-sitekey=settings.recaptcha.siteKey + data-size="invisible" + data-badge="inline" + ) - main#main-content(data-ol-captcha-retry-trigger-area="") - .container.auth-aux-container(style="max-width: 420px;") - form( - data-ol-async-form - name="reconfirmAccountForm" - action="/user/reconfirm" - method="POST" - aria-label=translate('request_reconfirmation_email') - captcha=(showCaptcha ? '' : false) - captcha-action-name=(showCaptcha ? "passwordReset" : false) - ) - h1.h5.mb-3 #{translate("reconfirm_account")} - p #{translate('reconfirm_explained')} - | - a(href=`mailto:${settings.adminEmail}`) #{settings.adminEmail} - | . - - div(data-ol-not-sent) - +formMessagesNewStyle() + main#main-content(data-ol-captcha-retry-trigger-area="") + .container.auth-aux-container(style="max-width: 420px;") + form( + data-ol-async-form + name="reconfirmAccountForm" + action="/user/reconfirm" + method="POST" + aria-label=translate('request_reconfirmation_email') + captcha=(showCaptcha ? '' : false) + captcha-action-name=(showCaptcha ? "passwordReset" : false) + ) + h1.h5.mb-3 #{translate("reconfirm_account")} + p #{translate('reconfirm_explained')} + | + a(href=`mailto:${settings.adminEmail}`) #{settings.adminEmail} + | . + + div(data-ol-not-sent) + +formMessagesNewStyle() - input(type="hidden" name="_csrf" value=csrfToken) - .form-group.mb-3 - label.form-label(for='email') #{translate("please_enter_email")} - input.form-control( - aria-label="email" - type='email' - name='email' - placeholder='email@example.com' - required - autofocus - value=email - ) - .actions - button.btn.btn-primary.w-100( - style="white-space: normal;" - type='submit' - data-ol-disabled-inflight - aria-label=translate('request_password_reset_to_reconfirm') - ) - span(data-ol-inflight="idle") - | #{translate('request_password_reset_to_reconfirm')} - span(hidden data-ol-inflight="pending") - | #{translate('request_password_reset_to_reconfirm')}… - div(hidden data-ol-sent) - div.alert.alert-success( - role="alert" - aria-live="polite" - ) - span #{translate('password_reset_email_sent')} + input(type="hidden" name="_csrf" value=csrfToken) + .form-group.mb-3 + label.form-label(for='email') #{translate("please_enter_email")} + input.form-control( + aria-label="email" + type='email' + name='email' + placeholder='email@example.com' + required + autofocus + value=email + ) + .actions + button.btn.btn-primary.w-100( + style="white-space: normal;" + type='submit' + data-ol-disabled-inflight + aria-label=translate('request_password_reset_to_reconfirm') + ) + span(data-ol-inflight="idle") + | #{translate('request_password_reset_to_reconfirm')} + span(hidden data-ol-inflight="pending") + | #{translate('request_password_reset_to_reconfirm')}… + div(hidden data-ol-sent) + div.alert.alert-success( + role="alert" + aria-live="polite" + ) + span #{translate('password_reset_email_sent')} - if showCaptcha - +recaptchaConditions + if showCaptcha + +recaptchaConditions diff --git a/services/web/app/views/user/reconfirm.pug b/services/web/app/views/user/reconfirm.pug index 7c17423d5a..23b77d278d 100644 --- a/services/web/app/views/user/reconfirm.pug +++ b/services/web/app/views/user/reconfirm.pug @@ -23,7 +23,7 @@ block content .row .col-sm-12.col-md-6.col-md-offset-3 .card - h1.card-header #{translate("reconfirm")} #{translate("Account")} + h1.card-header #{translate("reconfirm_account")} p #{translate('reconfirm_explained')}  a(href=`mailto:${settings.adminEmail}`) #{settings.adminEmail} | . diff --git a/services/web/app/views/user/restricted.pug b/services/web/app/views/user/restricted.pug index eba1d2ab05..0140064a99 100644 --- a/services/web/app/views/user/restricted.pug +++ b/services/web/app/views/user/restricted.pug @@ -1,4 +1,5 @@ extends ../layout-marketing +include ../_mixins/material_symbol block content main.content#main-content @@ -6,8 +7,8 @@ block content .row .col-md-8.offset-md-2.text-center .page-header - h2 #{translate("restricted_no_permission")} + h1 #{translate("restricted_no_permission")} p - span.inline-material-symbols - a(href="/").material-symbols(aria-hidden="true") arrow_left_alt - a(href="/") #{translate("take_me_home")} + a.inline-material-symbols(href="/") + +material-symbol("arrow_left_alt") + | #{translate("take_me_home")} diff --git a/services/web/app/views/user/sessions.pug b/services/web/app/views/user/sessions.pug index 187c1dae75..ffd65a3548 100644 --- a/services/web/app/views/user/sessions.pug +++ b/services/web/app/views/user/sessions.pug @@ -1,72 +1,70 @@ extends ../layout-marketing -block vars - - bootstrap5PageStatus = 'disabled' - block content main.content.content-alt#main-content .container .row - .col-md-10.col-md-offset-1.col-lg-8.col-lg-offset-2 + .col-lg-10.offset-lg-1.col-xl-8.offset-xl-2 .card.clear-user-sessions - .page-header - h1 #{translate("your_sessions")} - - if currentSession.ip_address && currentSession.session_created - h3 #{translate("current_session")} - div - table.table.table-striped - thead - tr - th #{translate("ip_address")} - th #{translate("session_created_at")} - tr - td #{currentSession.ip_address} - td #{moment(currentSession.session_created).utc().format('Do MMM YYYY, h:mm a')} UTC - - h3 #{translate("other_sessions")} - div - p.small - | !{translate("clear_sessions_description")} - - form( - data-ol-async-form - action='/user/sessions/clear' - method='POST' - ) - input(name='_csrf' type='hidden' value=csrfToken) - div(data-ol-not-sent) - if sessions.length == 0 - p.text-center - | #{translate("no_other_sessions")} - - if sessions.length > 0 + .card-body + .page-header + h1 #{translate("your_sessions")} + + if currentSession.ip_address && currentSession.session_created + h3 #{translate("current_session")} + div table.table.table-striped thead tr th #{translate("ip_address")} th #{translate("session_created_at")} - for session in sessions tr - td #{session.ip_address} - td #{moment(session.session_created).utc().format('Do MMM YYYY, h:mm a')} UTC - - p.actions - .text-center - button.btn.btn-lg.btn-primary( - type="submit" - data-ol-disable-inflight - ) - span(data-ol-inflight="idle") #{translate('clear_sessions')} - span(hidden data-ol-inflight="pending") #{translate("processing")}… - - div(hidden data-ol-sent) - p.text-center - | #{translate("no_other_sessions")} - - p.text-success.text-center - | #{translate('clear_sessions_success')} - .page-separator - a.btn.btn-secondary(href='/user/settings') #{translate('back_to_account_settings')} - | - a.btn.btn-secondary(href='/project') #{translate('back_to_your_projects')} + td #{currentSession.ip_address} + td #{moment(currentSession.session_created).utc().format('Do MMM YYYY, h:mm a')} UTC + + h3 #{translate("other_sessions")} + div + p.small + | !{translate("clear_sessions_description")} + + form( + data-ol-async-form + action='/user/sessions/clear' + method='POST' + ) + input(name='_csrf' type='hidden' value=csrfToken) + div(data-ol-not-sent) + if sessions.length == 0 + p.text-center + | #{translate("no_other_sessions")} + + if sessions.length > 0 + table.table.table-striped + thead + tr + th #{translate("ip_address")} + th #{translate("session_created_at")} + for session in sessions + tr + td #{session.ip_address} + td #{moment(session.session_created).utc().format('Do MMM YYYY, h:mm a')} UTC + + p.actions + .text-center + button.btn.btn-lg.btn-primary( + type="submit" + data-ol-disable-inflight + ) + span(data-ol-inflight="idle") #{translate('clear_sessions')} + span(hidden data-ol-inflight="pending") #{translate("processing")}… + + div(hidden data-ol-sent) + p.text-center + | #{translate("no_other_sessions")} + + p.text-success.text-center + | #{translate('clear_sessions_success')} + .page-separator + .d-flex.gap-3 + a.btn.btn-secondary(href='/user/settings') #{translate('back_to_account_settings')} + a.btn.btn-secondary(href='/project') #{translate('back_to_your_projects')} diff --git a/services/web/app/views/user/setPassword-bs5.pug b/services/web/app/views/user/setPassword-bs5.pug index 007ae5e87c..83c3a531bb 100644 --- a/services/web/app/views/user/setPassword-bs5.pug +++ b/services/web/app/views/user/setPassword-bs5.pug @@ -1,90 +1,91 @@ -extends ../layout-website-redesign-bootstrap-5 +extends ../layout-website-redesign block vars - - var suppressNavbar = true - - var suppressFooter = true + - var suppressNavbar = true + - var suppressFooter = true + - isWebsiteRedesign = true block content - main#main-content - a.auth-aux-logo(href="/") - img(src=buildImgPath("ol-brand/overleaf-o-dark.svg") alt=settings.appName) - .auth-aux-container - form( - data-ol-async-form - name="passwordResetForm" - action="/user/password/set" - method="POST" - data-ol-hide-on-error="token-expired" - ) - div( - hidden - data-ol-sent - ) - h1.h3.mb-3.mt-0 #{translate("password_updated")} - p.mb-4 #{translate("your_password_has_been_successfully_changed")}. - a.btn.btn-primary.w-100(href='/login') #{translate("log_in_now")} + main#main-content + a.auth-aux-logo(href="/") + img(src=buildImgPath("ol-brand/overleaf-o-dark.svg") alt=settings.appName) + .auth-aux-container + form( + data-ol-async-form + name="passwordResetForm" + action="/user/password/set" + method="POST" + data-ol-hide-on-error="token-expired" + ) + div( + hidden + data-ol-sent + ) + h1.h3.mb-3.mt-0 #{translate("password_updated")} + p.mb-4 #{translate("your_password_has_been_successfully_changed")}. + a.btn.btn-primary.w-100(href='/login') #{translate("log_in_now")} - div(data-ol-not-sent) - h1.h3.mb-3.mt-0 #{translate("reset_your_password")} - p(data-ol-hide-on-error-message="token-expired") #{translate("create_a_new_password_for_your_account")}. - +formMessagesNewStyle() + div(data-ol-not-sent) + h1.h3.mb-3.mt-0 #{translate("reset_your_password")} + p(data-ol-hide-on-error-message="token-expired") #{translate("create_a_new_password_for_your_account")}. + +formMessagesNewStyle() - +customFormMessageNewStyle('password-contains-email', 'danger') - | #{translate('invalid_password_contains_email')}. - | #{translate('use_a_different_password')}. + +customFormMessageNewStyle('password-contains-email', 'danger') + | #{translate('invalid_password_contains_email')}. + | #{translate('use_a_different_password')}. - +customFormMessageNewStyle('password-too-similar', 'danger') - | #{translate('invalid_password_too_similar')}. - | #{translate('use_a_different_password')}. + +customFormMessageNewStyle('password-too-similar', 'danger') + | #{translate('invalid_password_too_similar')}. + | #{translate('use_a_different_password')}. - +customFormMessageNewStyle('token-expired', 'danger') - | #{translate('password_reset_token_expired')} - br - a(href="/user/password/reset") - | #{translate('request_new_password_reset_email')} + +customFormMessageNewStyle('token-expired', 'danger') + | #{translate('password_reset_token_expired')} + br + a(href="/user/password/reset") + | #{translate('request_new_password_reset_email')} - input(type="hidden" name="_csrf" value=csrfToken) - input(type="text" hidden name="email" autocomplete="username" value=email) + input(type="hidden" name="_csrf" value=csrfToken) + input(type="text" hidden name="email" autocomplete="username" value=email) - .form-group.mb-3 - label.form-label(for='passwordField', data-ol-hide-on-error-message="token-expired") #{translate("new_password")} - input.form-control.auth-aux-new-password#passwordField( - type='password' - name='password' - autocomplete="new-password" - autofocus - required - minlength=settings.passwordStrengthOptions.length.min - ) + .form-group.mb-3 + label.form-label(for='passwordField', data-ol-hide-on-error-message="token-expired") #{translate("new_password")} + input.form-control.auth-aux-new-password#passwordField( + type='password' + name='password' + autocomplete="new-password" + autofocus + required + minlength=settings.passwordStrengthOptions.length.min + ) - +customValidationMessageNewStyle('invalid-password') - | #{translate('invalid_password')}. + +customValidationMessageNewStyle('invalid-password') + | #{translate('invalid_password')}. - +customValidationMessageNewStyle('password-must-be-different') - | #{translate('password_cant_be_the_same_as_current_one')}. + +customValidationMessageNewStyle('password-must-be-different') + | #{translate('password_cant_be_the_same_as_current_one')}. - +customValidationMessageNewStyle('password-must-be-strong') - | !{translate('password_was_detected_on_a_public_list_of_known_compromised_passwords', {}, [{name: 'a', attrs: {href: 'https://haveibeenpwned.com/passwords', rel: 'noopener noreferrer', target: '_blank'}}])}. - | #{translate('use_a_different_password')}. + +customValidationMessageNewStyle('password-must-be-strong') + | !{translate('password_was_detected_on_a_public_list_of_known_compromised_passwords', {}, [{name: 'a', attrs: {href: 'https://haveibeenpwned.com/passwords', rel: 'noopener noreferrer', target: '_blank'}}])}. + | #{translate('use_a_different_password')}. - input( - type="hidden" - name="passwordResetToken" - value=passwordResetToken - ) - div(data-ol-hide-on-error-message="token-expired") - div #{translate('in_order_to_have_a_secure_account_make_sure_your_password')} - ul.mb-3.ps-4 - li #{translate('is_longer_than_n_characters', {n: settings.passwordStrengthOptions.length.min})} - li #{translate('does_not_contain_or_significantly_match_your_email')} - li #{translate('is_not_used_on_any_other_website')} - .actions - button.btn.btn-primary.w-100( - type='submit' - data-ol-disabled-inflight - aria-label=translate('set_new_password') - ) - span(data-ol-inflight="idle") - | #{translate('set_new_password')} - span(hidden data-ol-inflight="pending") - | #{translate('set_new_password')}… + input( + type="hidden" + name="passwordResetToken" + value=passwordResetToken + ) + div(data-ol-hide-on-error-message="token-expired") + div #{translate('in_order_to_have_a_secure_account_make_sure_your_password')} + ul.mb-3.ps-4 + li #{translate('is_longer_than_n_characters', {n: settings.passwordStrengthOptions.length.min})} + li #{translate('does_not_contain_or_significantly_match_your_email')} + li #{translate('is_not_used_on_any_other_website')} + .actions + button.btn.btn-primary.w-100( + type='submit' + data-ol-disabled-inflight + aria-label=translate('set_new_password') + ) + span(data-ol-inflight="idle") + | #{translate('set_new_password')} + span(hidden data-ol-inflight="pending") + | #{translate('set_new_password')}… diff --git a/services/web/app/views/user/settings.pug b/services/web/app/views/user/settings.pug index 4f939a41ca..4ac35bef71 100644 --- a/services/web/app/views/user/settings.pug +++ b/services/web/app/views/user/settings.pug @@ -32,6 +32,7 @@ block append meta meta(name="ol-gitBridgeEnabled" data-type="boolean" content=gitBridgeEnabled) meta(name="ol-isSaas" data-type="boolean" content=isSaas) meta(name="ol-memberOfSSOEnabledGroups" data-type="json" content=memberOfSSOEnabledGroups) + meta(name="ol-capabilities" data-type="json" content=capabilities) block content main.content.content-alt#main-content diff --git a/services/web/app/views/user_membership/group-members-react.pug b/services/web/app/views/user_membership/group-members-react.pug index 5e8971172d..05327c4b6d 100644 --- a/services/web/app/views/user_membership/group-members-react.pug +++ b/services/web/app/views/user_membership/group-members-react.pug @@ -10,6 +10,7 @@ block append meta meta(name="ol-groupName", data-type="string", content=name) meta(name="ol-groupSize", data-type="json", content=groupSize) meta(name="ol-managedUsersActive", data-type="boolean", content=managedUsersActive) + meta(name="ol-isUserGroupManager", data-type="boolean", content=isUserGroupManager) meta(name="ol-groupSSOActive", data-type="boolean", content=groupSSOActive) meta(name="ol-canUseFlexibleLicensing", data-type="boolean", content=canUseFlexibleLicensing) meta(name="ol-canUseAddSeatsFeature", data-type="boolean", content=canUseAddSeatsFeature) diff --git a/services/web/config/settings.defaults.js b/services/web/config/settings.defaults.js index a7ff970ef0..3b2e17593a 100644 --- a/services/web/config/settings.defaults.js +++ b/services/web/config/settings.defaults.js @@ -893,6 +893,7 @@ module.exports = { 'figcaption', 'span', 'source', + 'track', 'video', 'del', ], @@ -918,7 +919,7 @@ module.exports = { col: ['width'], figure: ['class', 'id', 'style'], figcaption: ['class', 'id', 'style'], - i: ['aria-hidden', 'aria-label', 'class', 'id'], + i: ['aria-hidden', 'aria-label', 'class', 'id', 'translate'], iframe: [ 'allowfullscreen', 'frameborder', @@ -943,6 +944,7 @@ module.exports = { 'style', ], tr: ['class'], + track: ['src', 'kind', 'srcLang', 'label'], video: ['alt', 'class', 'controls', 'height', 'width'], }, }, @@ -966,6 +968,7 @@ module.exports = { editorToolbarButtons: [], sourceEditorExtensions: [], sourceEditorComponents: [], + pdfLogEntryHeaderActionComponents: [], pdfLogEntryComponents: [], pdfLogEntriesComponents: [], pdfPreviewPromotions: [], @@ -996,8 +999,10 @@ module.exports = { toastGenerators: [], editorSidebarComponents: [], fileTreeToolbarComponents: [], + fullProjectSearchPanel: [], integrationPanelComponents: [], referenceSearchSetting: [], + errorLogsComponents: [], }, moduleImportSequence: [ @@ -1005,6 +1010,9 @@ module.exports = { 'launchpad', 'server-ce-scripts', 'user-activate', + 'authentication/ldap', + 'authentication/saml', + 'authentication/oidc', ], viewIncludes: {}, @@ -1031,6 +1039,20 @@ module.exports = { managedUsers: { enabled: false, }, + + oauthProviders: { + ...(process.env.EXTERNAL_AUTH && process.env.EXTERNAL_AUTH.includes('oidc') && { + [process.env.OVERLEAF_OIDC_PROVIDER_ID || 'oidc']: { + name: process.env.OVERLEAF_OIDC_PROVIDER_NAME || 'OIDC Provider', + descriptionKey: process.env.OVERLEAF_OIDC_PROVIDER_DESCRIPTION, + descriptionOptions: { link: process.env.OVERLEAF_OIDC_PROVIDER_INFO_LINK }, + hideWhenNotLinked: process.env.OVERLEAF_OIDC_PROVIDER_HIDE_NOT_LINKED ? + process.env.OVERLEAF_OIDC_PROVIDER_HIDE_NOT_LINKED.toLowerCase() === 'true' : undefined, + linkPath: '/oidc/login', + }, + }), + }, + } module.exports.mergeWith = function (overrides) { diff --git a/services/web/docker-compose.ci.yml b/services/web/docker-compose.ci.yml index 5cffe19810..33b5a3ca2e 100644 --- a/services/web/docker-compose.ci.yml +++ b/services/web/docker-compose.ci.yml @@ -13,6 +13,9 @@ services: logging: driver: local user: node + volumes: + - ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it + entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 -- command: npm run test:unit:app working_dir: /overleaf/services/web env_file: docker-compose.common.env @@ -39,6 +42,9 @@ services: OVERLEAF_CONFIG: extra_hosts: - 'www.overleaf.test:127.0.0.1' + volumes: + - ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it + entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 -- command: npm run test:acceptance:app user: root depends_on: @@ -86,7 +92,7 @@ services: user: root redis: - image: redis + image: redis:7.4.3 mongo: image: mongo:7.0.20 diff --git a/services/web/docker-compose.yml b/services/web/docker-compose.yml index 5314e94ed3..10e0a7842c 100644 --- a/services/web/docker-compose.yml +++ b/services/web/docker-compose.yml @@ -11,6 +11,7 @@ services: - .:/overleaf/services/web - ../../node_modules:/overleaf/node_modules - ../../libraries:/overleaf/libraries + - ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it working_dir: /overleaf/services/web env_file: docker-compose.common.env environment: @@ -20,6 +21,7 @@ services: LOG_LEVEL: ${LOG_LEVEL:-} NODE_ENV: test NODE_OPTIONS: "--unhandled-rejections=strict" + entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 -- command: npm run --silent test:unit:app user: node depends_on: @@ -31,6 +33,7 @@ services: - .:/overleaf/services/web - ../../node_modules:/overleaf/node_modules - ../../libraries:/overleaf/libraries + - ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it user: node working_dir: /overleaf/services/web env_file: docker-compose.common.env @@ -50,6 +53,7 @@ services: - mongo - saml - ldap + entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 -- command: npm run --silent test:acceptance:app test_frontend: @@ -84,7 +88,7 @@ services: - "cypress:run-ct" redis: - image: redis + image: redis:7.4.3 mongo: image: mongo:7.0.20 diff --git a/services/web/frontend/extracted-translations.json b/services/web/frontend/extracted-translations.json index 9862e47817..e01bb9aa94 100644 --- a/services/web/frontend/extracted-translations.json +++ b/services/web/frontend/extracted-translations.json @@ -1,7 +1,9 @@ { - "12x_more_compile_time": "", + "0_free_suggestions": "", "1_2_width": "", "1_4_width": "", + "1_free_suggestion": "", + "24x_more_compile_time": "", "3_4_width": "", "About": "", "Account": "", @@ -29,6 +31,7 @@ "about_to_enable_managed_users": "", "about_to_leave_project": "", "about_to_leave_projects": "", + "about_to_remove_user_preamble": "", "about_to_trash_projects": "", "abstract": "", "accept_and_continue": "", @@ -106,6 +109,7 @@ "agree_with_the_terms": "", "ai_assist_in_overleaf_is_included_via_writefull_groups": "", "ai_assist_in_overleaf_is_included_via_writefull_individual": "", + "ai_assist_unavailable_due_to_subscription_type": "", "ai_assistance_to_help_you": "", "ai_based_language_tools": "", "ai_can_make_mistakes": "", @@ -130,6 +134,7 @@ "an_email_has_already_been_sent_to": "", "an_error_occured_while_restoring_project": "", "an_error_occurred_when_verifying_the_coupon_code": "", + "and_upgrade_for_compile_time": "", "annual_discount": "", "anonymous": "", "anyone_with_link_can_edit": "", @@ -186,6 +191,7 @@ "blog": "", "bold": "", "booktabs": "", + "breadcrumbs": "", "browser": "", "bullet_list": "", "buy_licenses": "", @@ -197,6 +203,8 @@ "can_view_content": "", "cancel": "", "cancel_add_on": "", + "cancel_any_existing_subscriptions": "", + "cancel_any_existing_subscriptions_and_leave_any_group_subscriptions": "", "cancel_anytime": "", "cancel_my_account": "", "cancel_my_subscription": "", @@ -286,6 +294,8 @@ "compile_error_entry_description": "", "compile_error_handling": "", "compile_larger_projects": "", + "compile_limit_reached": "", + "compile_limit_upgrade_prompt": "", "compile_mode": "", "compile_terminated_by_user": "", "compiler": "", @@ -304,6 +314,7 @@ "confirm_reject_selected_changes": "", "confirm_reject_selected_changes_plural": "", "confirm_remove_sso_config_enter_email": "", + "confirm_remove_user_type_email_address": "", "confirm_secondary_email": "", "confirm_your_email": "", "confirming": "", @@ -410,7 +421,6 @@ "discount": "", "discount_of": "", "discover_the_fastest_way_to_search_and_cite": "", - "dismiss_error_popup": "", "display": "", "display_deleted_user": "", "display_math": "", @@ -529,6 +539,7 @@ "error": "", "error_assist": "", "error_log": "", + "error_logs_have_had_an_update": "", "error_opening_document": "", "error_opening_document_detail": "", "error_performing_request": "", @@ -622,6 +633,7 @@ "generic_if_problem_continues_contact_us": "", "generic_linked_file_compile_error": "", "generic_something_went_wrong": "", + "get_ai_assist": "", "get_collaborative_benefits": "", "get_discounted_plan": "", "get_error_assist": "", @@ -678,6 +690,8 @@ "go_next_page": "", "go_page": "", "go_prev_page": "", + "go_to_account_settings": "", + "go_to_code_location": "", "go_to_code_location_in_pdf": "", "go_to_overleaf": "", "go_to_pdf_location_in_code": "", @@ -835,6 +849,7 @@ "integrations": "", "integrations_like_github": "", "interested_in_cheaper_personal_plan": "", + "introducing_shorter_compile_timeout": "", "invalid_confirmation_code": "", "invalid_email": "", "invalid_file_name": "", @@ -967,6 +982,7 @@ "login_count": "", "login_to_accept_invitation": "", "login_with_service": "", + "logs": "", "logs_and_output_files": "", "looking_multiple_licenses": "", "looks_like_youre_at": "", @@ -1037,6 +1053,7 @@ "more_compile_time": "", "more_editor_toolbar_item": "", "more_info": "", + "more_logs_and_files": "", "more_options": "", "my_library": "", "n_items": "", @@ -1060,6 +1077,7 @@ "neither_agree_nor_disagree": "", "new_compile_domain_notice": "", "new_create_tables_and_equations": "", + "new_error_logs_panel": "", "new_file": "", "new_folder": "", "new_font_open_dyslexic": "", @@ -1128,10 +1146,11 @@ "on_free_plan_upgrade_to_access_features": "", "one_step_away_from_professional_features": "", "only_group_admin_or_managers_can_delete_your_account_1": "", - "only_group_admin_or_managers_can_delete_your_account_2": "", "only_group_admin_or_managers_can_delete_your_account_3": "", - "only_group_admin_or_managers_can_delete_your_account_4": "", - "only_group_admin_or_managers_can_delete_your_account_5": "", + "only_group_admin_or_managers_can_delete_your_account_6": "", + "only_group_admin_or_managers_can_delete_your_account_7": "", + "only_group_admin_or_managers_can_delete_your_account_8": "", + "only_group_admin_or_managers_can_delete_your_account_9": "", "only_importer_can_refresh": "", "open_action_menu": "", "open_advanced_reference_search": "", @@ -1146,7 +1165,6 @@ "organization_name": "", "organize_tags": "", "other": "", - "other_causes_of_compile_timeouts": "", "other_logs_and_files": "", "other_output_files": "", "our_team_will_get_back_to_you_shortly": "", @@ -1291,6 +1309,7 @@ "project_ownership_transfer_confirmation_2": "", "project_renamed_or_deleted": "", "project_renamed_or_deleted_detail": "", + "project_search": "", "project_search_file_count": "", "project_search_file_count_plural": "", "project_search_result_count": "", @@ -1326,6 +1345,8 @@ "reactivate_subscription": "", "read_lines_from_path": "", "read_more": "", + "read_more_about_compile_timeout_changes": "", + "read_more_about_fix_prevent_timeout": "", "read_more_about_free_compile_timeouts_servers": "", "read_only_dropbox_sync_message": "", "read_only_token": "", @@ -1381,6 +1402,7 @@ "remove_secondary_email_addresses": "", "remove_sso_login_option": "", "remove_tag": "", + "remove_user": "", "removed_from_project": "", "removing": "", "rename": "", @@ -1401,7 +1423,6 @@ "resend": "", "resend_confirmation_code": "", "resend_confirmation_email": "", - "resend_email": "", "resend_group_invite": "", "resend_link_sso": "", "resend_managed_user_invite": "", @@ -1481,6 +1502,7 @@ "search_whole_word": "", "search_within_selection": "", "searched_path_for_lines_containing": "", + "searching_all_project_files_is_now_available": "", "security": "", "see_suggestions_from_collaborators": "", "select_a_column_or_a_merged_cell_to_align": "", @@ -1522,7 +1544,6 @@ "send_message": "", "send_request": "", "sending": "", - "sent": "", "server_error": "", "server_pro_license_entitlement_line_1": "", "server_pro_license_entitlement_line_2": "", @@ -1543,6 +1564,8 @@ "sharelatex_beta_program": "", "shortcut_to_open_advanced_reference_search": "", "show_all_projects": "", + "show_breadcrumbs": "", + "show_breadcrumbs_in_toolbar": "", "show_document_preamble": "", "show_equation_preview": "", "show_file_tree": "", @@ -1638,6 +1661,7 @@ "start_a_free_trial": "", "start_by_adding_your_email": "", "start_by_fixing_the_first_error_in_your_doc": "", + "start_by_fixing_the_first_error_in_your_document": "", "start_free_trial": "", "start_free_trial_without_exclamation": "", "start_the_conversation_by_saying_hello_or_sharing_an_update": "", @@ -1672,6 +1696,7 @@ "suggest_a_different_fix": "", "suggest_fix": "", "suggested": "", + "suggested_code": "", "suggested_fix_for_error_in_path": "", "suggestion_applied": "", "suggests_code_completions_while_typing": "", @@ -1753,6 +1778,12 @@ "there_is_an_unrecoverable_latex_error": "", "there_was_a_problem_restoring_the_project_please_try_again_in_a_few_moments_or_contact_us": "", "they_lose_access_to_account": "", + "they_will_be_removed_from_the_group": "", + "they_will_continue_to_have_access_to_any_projects_shared_with_them": "", + "they_will_no_longer_be_a_managed_user": "", + "they_will_retain_ownership_of_projects_currently_owned_by_them_and_collaborators_will_become_read_only": "", + "they_will_retain_their_existing_account_on_the_free_plan": "", + "they_wont_be_able_to_log_in_with_sso_they_will_need_to_set_password": "", "this_action_cannot_be_reversed": "", "this_action_cannot_be_undone": "", "this_address_will_be_shown_on_the_invoice": "", @@ -1764,6 +1795,7 @@ "this_is_a_new_feature": "", "this_is_the_file_that_references_pulled_from_your_reference_manager_will_be_added_to": "", "this_project_already_has_maximum_collaborators": "", + "this_project_compiled_but_soon_might_not": "", "this_project_contains_a_file_called_output": "", "this_project_exceeded_collaborator_limit": "", "this_project_exceeded_compile_timeout_limit_on_free_plan": "", @@ -1948,7 +1980,7 @@ "updating": "", "upgrade": "", "upgrade_cc_btn": "", - "upgrade_for_12x_more_compile_time": "", + "upgrade_for_more_compile_time": "", "upgrade_my_plan": "", "upgrade_now": "", "upgrade_plan": "", @@ -1979,6 +2011,7 @@ "user_deletion_error": "", "user_deletion_password_reset_tip": "", "user_first_name_attribute": "", + "user_has_left_organization_and_need_to_transfer_their_projects": "", "user_last_name_attribute": "", "user_sessions": "", "using_latex": "", @@ -2045,8 +2078,8 @@ "were_making_some_changes_to_project_sharing_this_means_you_will_be_visible": "", "were_performing_maintenance": "", "were_redesigning_our_editor_to_make_it_easier_to_use": "", - "weve_recently_reduced_the_compile_timeout_limit_which_may_have_affected_this_project": "", - "weve_recently_reduced_the_compile_timeout_limit_which_may_have_affected_your_project": "", + "were_reducing_compile_timeout": "", + "what_did_you_find_most_helpful": "", "what_do_you_need_help_with": "", "what_does_this_mean": "", "what_does_this_mean_for_you": "", @@ -2104,6 +2137,7 @@ "you_can_select_or_invite_collaborator": "", "you_can_select_or_invite_collaborator_plural": "", "you_can_still_use_your_premium_features": "", + "you_cant_add_or_change_password_due_to_ldap_or_sso": "", "you_cant_add_or_change_password_due_to_sso": "", "you_cant_join_this_group_subscription": "", "you_dont_have_any_add_ons_on_your_account": "", @@ -2117,6 +2151,7 @@ "you_have_been_removed_from_this_project_and_will_be_redirected_to_project_dashboard": "", "you_have_x_licenses_and_your_plan_supports_up_to_y": "", "you_have_x_licenses_on_your_subscription": "", + "you_may_be_able_to_fix_issues_to_speed_up_the_compile": "", "you_need_to_configure_your_sso_settings": "", "you_unpaused_your_subscription": "", "you_will_be_able_to_reassign_subscription": "", @@ -2151,6 +2186,7 @@ "your_plan_is_limited_to_n_editors": "", "your_plan_is_limited_to_n_editors_plural": "", "your_premium_plan_is_paused": "", + "your_project_compiled_but_soon_might_not": "", "your_project_exceeded_collaborator_limit": "", "your_project_exceeded_compile_timeout_limit_on_free_plan": "", "your_project_near_compile_timeout_limit": "", diff --git a/services/web/frontend/fonts/material-symbols/MaterialSymbolsRoundedUnfilledPartialSlice.woff2 b/services/web/frontend/fonts/material-symbols/MaterialSymbolsRoundedUnfilledPartialSlice.woff2 index df942df176..a507329c8e 100644 Binary files a/services/web/frontend/fonts/material-symbols/MaterialSymbolsRoundedUnfilledPartialSlice.woff2 and b/services/web/frontend/fonts/material-symbols/MaterialSymbolsRoundedUnfilledPartialSlice.woff2 differ diff --git a/services/web/frontend/fonts/material-symbols/unfilled-symbols.mjs b/services/web/frontend/fonts/material-symbols/unfilled-symbols.mjs index baefac05aa..222be1fd36 100644 --- a/services/web/frontend/fonts/material-symbols/unfilled-symbols.mjs +++ b/services/web/frontend/fonts/material-symbols/unfilled-symbols.mjs @@ -4,26 +4,32 @@ // You may need to hard reload your browser window to see the changes. export default /** @type {const} */ ([ + 'auto_delete', 'book_5', 'brush', 'code', + 'content_copy', 'create_new_folder', 'delete', 'description', + 'error', 'experiment', 'forum', 'help', 'image', 'info', 'integration_instructions', + 'lightbulb', 'note_add', 'picture_as_pdf', 'rate_review', 'report', + 'search', 'settings', 'space_dashboard', 'table_chart', + 'thumb_down', + 'thumb_up', 'upload_file', 'web_asset', - 'error', ]) diff --git a/services/web/frontend/js/features/chat/context/chat-context.tsx b/services/web/frontend/js/features/chat/context/chat-context.tsx index 9feca60579..2ba0ff5f5d 100644 --- a/services/web/frontend/js/features/chat/context/chat-context.tsx +++ b/services/web/frontend/js/features/chat/context/chat-context.tsx @@ -193,7 +193,7 @@ export const ChatContext = createContext< >(undefined) export const ChatProvider: FC = ({ children }) => { - const chatEnabled = getMeta('ol-chatEnabled') + const chatEnabled = getMeta('ol-capabilities')?.includes('chat') const clientId = useRef() if (clientId.current === undefined) { diff --git a/services/web/frontend/js/features/contact-form/index.js b/services/web/frontend/js/features/contact-form/index.js index 0b4a4898aa..51aff806e3 100644 --- a/services/web/frontend/js/features/contact-form/index.js +++ b/services/web/frontend/js/features/contact-form/index.js @@ -23,7 +23,7 @@ document }) document.querySelectorAll('[data-ol-contact-form]').forEach(el => { - el.addEventListener('submit', function (e) { + el.addEventListener('submit', function () { const emailValue = document.querySelector( '[data-ol-contact-form-email-input]' ).value diff --git a/services/web/frontend/js/features/contact-form/search.js b/services/web/frontend/js/features/contact-form/search.js index 10e2ab2f63..1787a068be 100644 --- a/services/web/frontend/js/features/contact-form/search.js +++ b/services/web/frontend/js/features/contact-form/search.js @@ -47,8 +47,9 @@ export function setupSearch(formEl) { const iconEl = document.createElement('i') iconEl.className = 'material-symbols dropdown-item-trailing-icon' - iconEl.innerText = 'open_in_new' + iconEl.textContent = 'open_in_new' iconEl.setAttribute('aria-hidden', 'true') + iconEl.translate = false linkEl.append(iconEl) resultsEl.append(liEl) diff --git a/services/web/frontend/js/features/editor-left-menu/context/project-settings-context.tsx b/services/web/frontend/js/features/editor-left-menu/context/project-settings-context.tsx index e40c4c6872..e5cd576ba1 100644 --- a/services/web/frontend/js/features/editor-left-menu/context/project-settings-context.tsx +++ b/services/web/frontend/js/features/editor-left-menu/context/project-settings-context.tsx @@ -27,6 +27,7 @@ type ProjectSettingsSetterContextValue = { setLineHeight: (lineHeight: UserSettings['lineHeight']) => void setPdfViewer: (pdfViewer: UserSettings['pdfViewer']) => void setMathPreview: (mathPreview: UserSettings['mathPreview']) => void + setBreadcrumbs: (breadcrumbs: UserSettings['breadcrumbs']) => void } type ProjectSettingsContextValue = Partial & @@ -74,6 +75,8 @@ export const ProjectSettingsProvider: FC = ({ setPdfViewer, mathPreview, setMathPreview, + breadcrumbs, + setBreadcrumbs, } = useUserWideSettings() useProjectWideSettingsSocketListener() @@ -110,6 +113,8 @@ export const ProjectSettingsProvider: FC = ({ setPdfViewer, mathPreview, setMathPreview, + breadcrumbs, + setBreadcrumbs, }), [ compiler, @@ -142,6 +147,8 @@ export const ProjectSettingsProvider: FC = ({ setPdfViewer, mathPreview, setMathPreview, + breadcrumbs, + setBreadcrumbs, ] ) diff --git a/services/web/frontend/js/features/editor-left-menu/hooks/use-root-doc-id.tsx b/services/web/frontend/js/features/editor-left-menu/hooks/use-root-doc-id.tsx index ca2e85841f..8a704f87ac 100644 --- a/services/web/frontend/js/features/editor-left-menu/hooks/use-root-doc-id.tsx +++ b/services/web/frontend/js/features/editor-left-menu/hooks/use-root-doc-id.tsx @@ -6,7 +6,7 @@ import useSaveProjectSettings from './use-save-project-settings' export default function useRootDocId() { const [rootDocId] = - useScopeValue('project.rootDoc_id') + useScopeValue('project.rootDocId') const { permissionsLevel } = useEditorContext() const saveProjectSettings = useSaveProjectSettings() diff --git a/services/web/frontend/js/features/editor-left-menu/hooks/use-save-project-settings.tsx b/services/web/frontend/js/features/editor-left-menu/hooks/use-save-project-settings.tsx index 07a20a10fa..f34c506708 100644 --- a/services/web/frontend/js/features/editor-left-menu/hooks/use-save-project-settings.tsx +++ b/services/web/frontend/js/features/editor-left-menu/hooks/use-save-project-settings.tsx @@ -19,13 +19,7 @@ export default function useSaveProjectSettings() { await saveProjectSettings(projectId, { [key]: newSetting, }) - - // rootDocId is used in our tsx and our endpoint, but rootDoc_id is used in our project $scope, etc - // as we use both namings in many files, and convert back and forth, - // its complicated to seperate and choose one name for all usages - // todo: make rootDocId or rootDoc_id consistent, and remove need for this/ other conversions - const settingsKey = key === 'rootDocId' ? 'rootDoc_id' : key - setProjectSettings({ ...projectSettings, [settingsKey]: newSetting }) + setProjectSettings({ ...projectSettings, [key]: newSetting }) } } } diff --git a/services/web/frontend/js/features/editor-left-menu/hooks/use-user-wide-settings.tsx b/services/web/frontend/js/features/editor-left-menu/hooks/use-user-wide-settings.tsx index 70202c9446..978148721a 100644 --- a/services/web/frontend/js/features/editor-left-menu/hooks/use-user-wide-settings.tsx +++ b/services/web/frontend/js/features/editor-left-menu/hooks/use-user-wide-settings.tsx @@ -20,6 +20,7 @@ export default function useUserWideSettings() { lineHeight, pdfViewer, mathPreview, + breadcrumbs, } = userSettings const setOverallTheme = useSetOverallTheme() @@ -93,6 +94,13 @@ export default function useUserWideSettings() { [saveUserSettings] ) + const setBreadcrumbs = useCallback( + (breadcrumbs: UserSettings['breadcrumbs']) => { + saveUserSettings('breadcrumbs', breadcrumbs) + }, + [saveUserSettings] + ) + return { autoComplete, setAutoComplete, @@ -116,5 +124,7 @@ export default function useUserWideSettings() { setPdfViewer, mathPreview, setMathPreview, + breadcrumbs, + setBreadcrumbs, } } diff --git a/services/web/frontend/js/features/editor-navigation-toolbar/components/toolbar-header.tsx b/services/web/frontend/js/features/editor-navigation-toolbar/components/toolbar-header.tsx index 4304768c48..87bcbc0aac 100644 --- a/services/web/frontend/js/features/editor-navigation-toolbar/components/toolbar-header.tsx +++ b/services/web/frontend/js/features/editor-navigation-toolbar/components/toolbar-header.tsx @@ -80,7 +80,7 @@ const ToolbarHeader = React.memo(function ToolbarHeader({ openShareModal: () => void trackChangesVisible: boolean | undefined }) { - const chatEnabled = getMeta('ol-chatEnabled') + const chatEnabled = getMeta('ol-capabilities')?.includes('chat') const { t } = useTranslation() const shouldDisplayPublishButton = hasPublishPermissions && PublishButton diff --git a/services/web/frontend/js/features/event-tracking/search-events.ts b/services/web/frontend/js/features/event-tracking/search-events.ts index cd9ff4b8ba..630d07aeaa 100644 --- a/services/web/frontend/js/features/event-tracking/search-events.ts +++ b/services/web/frontend/js/features/event-tracking/search-events.ts @@ -6,7 +6,7 @@ type SearchEventSegmentation = { searchType: 'full-project' } & ( | { method: 'keyboard' } - | { method: 'button'; location: 'toolbar' | 'search-form' } + | { method: 'button'; location: 'toolbar' | 'search-form' | 'rail' } )) | ({ searchType: 'document' diff --git a/services/web/frontend/js/features/file-tree/components/file-tree-create/modes/file-tree-upload-doc.tsx b/services/web/frontend/js/features/file-tree/components/file-tree-create/modes/file-tree-upload-doc.tsx index 2ffd591032..909e1a1962 100644 --- a/services/web/frontend/js/features/file-tree/components/file-tree-create/modes/file-tree-upload-doc.tsx +++ b/services/web/frontend/js/features/file-tree/components/file-tree-create/modes/file-tree-upload-doc.tsx @@ -176,7 +176,6 @@ export default function FileTreeUploadDoc() { // close the modal when all the uploads completed successfully .on('complete', result => { if (!result.failed.length) { - // $scope.$emit('done', { name: name }) cancel() } }) diff --git a/services/web/frontend/js/features/form-helpers/create-icon.js b/services/web/frontend/js/features/form-helpers/create-icon.js index fc26724bee..13b2a04bf3 100644 --- a/services/web/frontend/js/features/form-helpers/create-icon.js +++ b/services/web/frontend/js/features/form-helpers/create-icon.js @@ -2,6 +2,7 @@ export default function createIcon(type) { const icon = document.createElement('span') icon.className = 'material-symbols' icon.setAttribute('aria-hidden', 'true') + icon.setAttribute('translate', 'no') icon.textContent = type return icon } diff --git a/services/web/frontend/js/features/group-management/components/members-table/dropdown-button.tsx b/services/web/frontend/js/features/group-management/components/members-table/dropdown-button.tsx index bd3b5ee10e..9e7038363a 100644 --- a/services/web/frontend/js/features/group-management/components/members-table/dropdown-button.tsx +++ b/services/web/frontend/js/features/group-management/components/members-table/dropdown-button.tsx @@ -20,6 +20,7 @@ import getMeta from '@/utils/meta' import MaterialIcon from '@/shared/components/material-icon' import DropdownListItem from '@/features/ui/components/bootstrap-5/dropdown-list-item' import { Spinner } from 'react-bootstrap' +import { sendMB } from '@/infrastructure/event-tracking' type resendInviteResponse = { success: boolean @@ -28,6 +29,7 @@ type resendInviteResponse = { type ManagedUserDropdownButtonProps = { user: User openOffboardingModalForUser: (user: User) => void + openRemoveModalForUser: (user: User) => void openUnlinkUserModal: (user: User) => void groupId: string setGroupUserAlert: Dispatch> @@ -36,6 +38,7 @@ type ManagedUserDropdownButtonProps = { export default function DropdownButton({ user, openOffboardingModalForUser, + openRemoveModalForUser, openUnlinkUserModal, groupId, setGroupUserAlert, @@ -57,7 +60,8 @@ export default function DropdownButton({ const managedUsersActive = getMeta('ol-managedUsersActive') const groupSSOActive = getMeta('ol-groupSSOActive') - + const userId = getMeta('ol-user_id') + const isUserGroupManager = getMeta('ol-isUserGroupManager') const userPending = user.invite const isGroupSSOLinked = !userPending && user.enrollment?.sso?.some(sso => sso.groupId === groupId) @@ -169,9 +173,15 @@ export default function DropdownButton({ } const onDeleteUserClick = () => { + sendMB('delete-managed-user-selected') openOffboardingModalForUser(user) } + const onReleaseUserClick = () => { + sendMB('remove-managed-user-selected') + openRemoveModalForUser(user) + } + const onRemoveFromGroup = () => { removeMember(user) } @@ -229,10 +239,13 @@ export default function DropdownButton({ ) } - if (isUserManaged && !user.isEntityAdmin) { + if ( + isUserManaged && + !user.isEntityAdmin && + (!isUserGroupManager || userId !== user._id) + ) { buttons.push( ) + buttons.push( + + {t('remove_user')} + + ) } else if (!isUserManaged) { buttons.push( {t('remove_from_group')} @@ -256,7 +277,7 @@ export default function DropdownButton({ if (buttons.length === 0) { buttons.push( - + void + openRemoveModalForUser: (user: User) => void openUnlinkUserModal: (user: User) => void groupId: string setGroupUserAlert: Dispatch> @@ -24,6 +25,7 @@ type ManagedUserRowProps = { export default function MemberRow({ user, openOffboardingModalForUser, + openRemoveModalForUser, openUnlinkUserModal, setGroupUserAlert, groupId, @@ -112,6 +114,7 @@ export default function MemberRow({ ( undefined ) + const [userToRemove, setUserToRemove] = useState(undefined) const [groupUserAlert, setGroupUserAlert] = useState(undefined) const [userToUnlink, setUserToUnlink] = useState(undefined) @@ -101,6 +103,7 @@ export default function MembersList({ groupId }: ManagedUsersListProps) { key={user.email} user={user} openOffboardingModalForUser={setUserToOffboard} + openRemoveModalForUser={setUserToRemove} openUnlinkUserModal={setUserToUnlink} setGroupUserAlert={setGroupUserAlert} groupId={groupId} @@ -116,6 +119,13 @@ export default function MembersList({ groupId }: ManagedUsersListProps) { onClose={() => setUserToOffboard(undefined)} /> )} + {userToRemove && ( + setUserToRemove(undefined)} + /> + )} {userToUnlink && ( { + const handleDeleteUserSubmit = (event: React.FormEvent) => { event.preventDefault() + sendMB('delete-managed-user-confirmed') runAsync( postJSON(`/manage/groups/${groupId}/offboardManagedUser/${user._id}`, { body: { diff --git a/services/web/frontend/js/features/group-management/components/members-table/remove-managed-user-modal.tsx b/services/web/frontend/js/features/group-management/components/members-table/remove-managed-user-modal.tsx new file mode 100644 index 0000000000..c3c6f8caa4 --- /dev/null +++ b/services/web/frontend/js/features/group-management/components/members-table/remove-managed-user-modal.tsx @@ -0,0 +1,138 @@ +import { User } from '../../../../../../types/group-management/user' +import { useState } from 'react' +import useAsync from '@/shared/hooks/use-async' +import { useTranslation, Trans } from 'react-i18next' +import { useLocation } from '@/shared/hooks/use-location' +import { FetchError, postJSON } from '@/infrastructure/fetch-json' +import { debugConsole } from '@/utils/debugging' +import OLModal, { + OLModalBody, + OLModalFooter, + OLModalHeader, + OLModalTitle, +} from '@/features/ui/components/ol/ol-modal' +import OLFormGroup from '@/features/ui/components/ol/ol-form-group' +import OLButton from '@/features/ui/components/ol/ol-button' +import OLNotification from '@/features/ui/components/ol/ol-notification' +import OLFormControl from '@/features/ui/components/ol/ol-form-control' +import OLFormLabel from '@/features/ui/components/ol/ol-form-label' +import { sendMB } from '@/infrastructure/event-tracking' + +type RemoveManagedUserModalProps = { + user: User + groupId: string + onClose: () => void +} + +export default function RemoveManagedUserModal({ + user, + groupId, + onClose, +}: RemoveManagedUserModalProps) { + const { t } = useTranslation() + const location = useLocation() + const { isLoading, isSuccess, error, setError, runAsync } = useAsync< + any, + any + >() + const [suppliedEmail, setSuppliedEmail] = useState() + const shouldEnableRemoveUserButton = suppliedEmail === user.email + const userFullName = user.last_name + ? `${user.first_name || ''} ${user.last_name || ''}` + : user.first_name + + const handleReleaseUserSubmit = (event: React.FormEvent) => { + event.preventDefault() + sendMB('remove-managed-user-confirmed') + runAsync( + postJSON(`/manage/groups/${groupId}/release-managed-user/${user._id}`, { + body: { + verificationEmail: suppliedEmail, + }, + }) + .then(() => { + location.reload() + }) + .catch(err => { + setError( + err instanceof FetchError ? err.getUserFacingMessage() : err.message + ) + debugConsole.error(err) + }) + ) + } + + return ( + +
+ + {t('remove_user')} + + +

+ {t('about_to_remove_user_preamble', { + userName: userFullName, + userEmail: user.email, + })} +

+
    +
  • {t('they_will_be_removed_from_the_group')}
  • +
  • {t('they_will_no_longer_be_a_managed_user')}
  • +
  • + {t('they_will_retain_their_existing_account_on_the_free_plan')} +
  • +
  • + {t( + 'they_will_retain_ownership_of_projects_currently_owned_by_them_and_collaborators_will_become_read_only' + )} +
  • +
  • + {t( + 'they_will_continue_to_have_access_to_any_projects_shared_with_them' + )} +
  • +
  • + {t( + 'they_wont_be_able_to_log_in_with_sso_they_will_need_to_set_password' + )} +
  • +
+

+ ]} // eslint-disable-line react/jsx-key + /> +

+ + + {t('confirm_remove_user_type_email_address', { + userName: userFullName, + })} + + setSuppliedEmail(e.target.value)} + /> + + {error && ( + + )} +
+ + + {t('cancel')} + + + {t('remove_user')} + + +
+
+ ) +} diff --git a/services/web/frontend/js/features/history/components/change-list/dropdown/actions-dropdown.tsx b/services/web/frontend/js/features/history/components/change-list/dropdown/actions-dropdown.tsx index 9029260057..df31a6c58f 100644 --- a/services/web/frontend/js/features/history/components/change-list/dropdown/actions-dropdown.tsx +++ b/services/web/frontend/js/features/history/components/change-list/dropdown/actions-dropdown.tsx @@ -2,20 +2,21 @@ import React, { ReactNode } from 'react' import { Dropdown, DropdownMenu, + DropdownToggle, } from '@/features/ui/components/bootstrap-5/dropdown-menu' -import DropdownToggleWithTooltip from '@/features/ui/components/bootstrap-5/dropdown-toggle-with-tooltip' +import OLTooltip from '@/features/ui/components/ol/ol-tooltip' type ActionDropdownProps = { id: string children: React.ReactNode isOpened: boolean iconTag: ReactNode - toolTipDescription: string + tooltipDescription: string setIsOpened: (isOpened: boolean) => void } function ActionsDropdown(props: ActionDropdownProps) { - const { id, children, isOpened, iconTag, setIsOpened, toolTipDescription } = + const { id, children, isOpened, iconTag, setIsOpened, tooltipDescription } = props return ( setIsOpened(open)} > - + {/* OverlayTrigger won't fire unless the child is a non-react html element (e.g div, span) */} + + + {iconTag} + + + {children} diff --git a/services/web/frontend/js/features/history/components/change-list/dropdown/compare-version-dropdown.tsx b/services/web/frontend/js/features/history/components/change-list/dropdown/compare-version-dropdown.tsx index 91f0bf991a..11967e3302 100644 --- a/services/web/frontend/js/features/history/components/change-list/dropdown/compare-version-dropdown.tsx +++ b/services/web/frontend/js/features/history/components/change-list/dropdown/compare-version-dropdown.tsx @@ -21,7 +21,7 @@ function CompareVersionDropdown({ id={id} isOpened={isOpened} setIsOpened={setIsOpened} - toolTipDescription={t('compare')} + tooltipDescription={t('compare')} iconTag={ {permissions.labelVersion && ( - + )} diff --git a/services/web/frontend/js/features/history/components/change-list/dropdown/menu-item/add-label.tsx b/services/web/frontend/js/features/history/components/change-list/dropdown/menu-item/add-label.tsx index 882bb9a439..1381b620d4 100644 --- a/services/web/frontend/js/features/history/components/change-list/dropdown/menu-item/add-label.tsx +++ b/services/web/frontend/js/features/history/components/change-list/dropdown/menu-item/add-label.tsx @@ -4,18 +4,12 @@ import OLDropdownMenuItem from '@/features/ui/components/ol/ol-dropdown-menu-ite import OLTagIcon from '@/features/ui/components/ol/icons/ol-tag-icon' import AddLabelModal from '../../add-label-modal' -type DownloadProps = { - projectId: string +type AddLabelProps = { version: number closeDropdown: () => void } -function AddLabel({ - version, - projectId, - closeDropdown, - ...props -}: DownloadProps) { +function AddLabel({ version, closeDropdown, ...props }: AddLabelProps) { const { t } = useTranslation() const [showModal, setShowModal] = useState(false) diff --git a/services/web/frontend/js/features/history/components/change-list/dropdown/menu-item/compare-items.tsx b/services/web/frontend/js/features/history/components/change-list/dropdown/menu-item/compare-items.tsx index 78ba0aae75..dd236ed98b 100644 --- a/services/web/frontend/js/features/history/components/change-list/dropdown/menu-item/compare-items.tsx +++ b/services/web/frontend/js/features/history/components/change-list/dropdown/menu-item/compare-items.tsx @@ -34,7 +34,7 @@ function CompareItems({ toVTimestamp: selRange.toVTimestamp, }} closeDropdown={closeDropdown} - toolTipDescription={t('history_compare_from_this_version')} + tooltipDescription={t('history_compare_from_this_version')} icon={ void } function Compare({ comparisonRange, closeDropdown, - toolTipDescription, + tooltipDescription, icon, }: CompareProps) { const { setSelection } = useHistoryContext() @@ -32,12 +32,12 @@ function Compare({ return ( diff --git a/services/web/frontend/js/features/history/components/file-tree/history-file-tree-doc.tsx b/services/web/frontend/js/features/history/components/file-tree/history-file-tree-doc.tsx index 3b788eb046..e3543ef527 100644 --- a/services/web/frontend/js/features/history/components/file-tree/history-file-tree-doc.tsx +++ b/services/web/frontend/js/features/history/components/file-tree/history-file-tree-doc.tsx @@ -1,9 +1,12 @@ import { memo } from 'react' import classNames from 'classnames' import HistoryFileTreeItem from './history-file-tree-item' -import iconTypeFromName from '../../../file-tree/util/icon-type-from-name' +import iconTypeFromName, { + newEditorIconTypeFromName, +} from '../../../file-tree/util/icon-type-from-name' import type { FileDiff } from '../../services/types/file' import MaterialIcon from '@/shared/components/material-icon' +import { useIsNewEditorEnabled } from '@/features/ide-redesign/utils/new-editor-utils' type HistoryFileTreeDocProps = { file: FileDiff @@ -20,6 +23,16 @@ function HistoryFileTreeDoc({ onClick, onKeyDown, }: HistoryFileTreeDocProps) { + const newEditor = useIsNewEditorEnabled() + const icon = newEditor ? ( + + ) : ( + + ) return (
  • - } + icons={icon} />
  • ) diff --git a/services/web/frontend/js/features/history/components/file-tree/history-file-tree-folder.tsx b/services/web/frontend/js/features/history/components/file-tree/history-file-tree-folder.tsx index 6c2c912f8c..44cb7f2921 100644 --- a/services/web/frontend/js/features/history/components/file-tree/history-file-tree-folder.tsx +++ b/services/web/frontend/js/features/history/components/file-tree/history-file-tree-folder.tsx @@ -6,6 +6,7 @@ import HistoryFileTreeFolderList from './history-file-tree-folder-list' import type { HistoryDoc, HistoryFileTree } from '../../utils/file-tree' import MaterialIcon from '@/shared/components/material-icon' +import { useIsNewEditorEnabled } from '@/features/ide-redesign/utils/new-editor-utils' type HistoryFileTreeFolderProps = { name: string @@ -35,6 +36,7 @@ function HistoryFileTreeFolder({ docs, }: HistoryFileTreeFolderProps) { const { t } = useTranslation() + const newEditor = useIsNewEditorEnabled() const [expanded, setExpanded] = useState(() => { return hasChanges({ name, folders, docs }) @@ -52,10 +54,12 @@ function HistoryFileTreeFolder({ className="file-tree-expand-icon" /> - + {!newEditor && ( + + )} ) @@ -79,7 +83,11 @@ function HistoryFileTreeFolder({ {expanded ? ( - + ) : null} ) diff --git a/services/web/frontend/js/features/history/extensions/highlights.ts b/services/web/frontend/js/features/history/extensions/highlights.ts index ce274cf724..1f81f82e74 100644 --- a/services/web/frontend/js/features/history/extensions/highlights.ts +++ b/services/web/frontend/js/features/history/extensions/highlights.ts @@ -238,7 +238,7 @@ class EmptyLineAdditionMarkerWidget extends WidgetType { super() } - toDOM(view: EditorView): HTMLElement { + toDOM(): HTMLElement { const element = document.createElement('span') element.classList.add( 'ol-cm-empty-line-addition-marker', @@ -255,7 +255,7 @@ class EmptyLineDeletionMarkerWidget extends WidgetType { super() } - toDOM(view: EditorView): HTMLElement { + toDOM(): HTMLElement { const element = document.createElement('span') element.classList.add( 'ol-cm-empty-line-deletion-marker', @@ -297,7 +297,7 @@ class ChangeGutterMarker extends GutterMarker { super() } - toDOM(view: EditorView) { + toDOM() { const el = document.createElement('div') el.className = 'ol-cm-changed-line-gutter' el.style.setProperty('--hue', this.hue.toString()) diff --git a/services/web/frontend/js/features/ide-react/components/layout/main-layout.tsx b/services/web/frontend/js/features/ide-react/components/layout/main-layout.tsx index b0a65e12bb..93382d613a 100644 --- a/services/web/frontend/js/features/ide-react/components/layout/main-layout.tsx +++ b/services/web/frontend/js/features/ide-react/components/layout/main-layout.tsx @@ -47,7 +47,8 @@ export const MainLayout: FC = () => { handlePaneExpand: handleChatExpand, } = useChatPane() - const chatEnabled = getMeta('ol-chatEnabled') && !isRestrictedTokenMember + const chatEnabled = + getMeta('ol-capabilities')?.includes('chat') && !isRestrictedTokenMember const { t } = useTranslation() diff --git a/services/web/frontend/js/features/ide-react/context/command-registry-context.tsx b/services/web/frontend/js/features/ide-react/context/command-registry-context.tsx index e8bec19b8b..ff54c21f2a 100644 --- a/services/web/frontend/js/features/ide-react/context/command-registry-context.tsx +++ b/services/web/frontend/js/features/ide-react/context/command-registry-context.tsx @@ -1,4 +1,11 @@ -import { createContext, useCallback, useContext, useState } from 'react' +import { isMac } from '@/shared/utils/os' +import { + createContext, + useCallback, + useContext, + useMemo, + useState, +} from 'react' type CommandInvocationContext = { location?: string @@ -10,17 +17,21 @@ export type Command = { handler?: (context: CommandInvocationContext) => void href?: string disabled?: boolean - // TODO: Keybinding? } const CommandRegistryContext = createContext( undefined ) +export type Shortcut = { key: string } + +export type Shortcuts = Record + type CommandRegistry = { registry: Map register: (...elements: Command[]) => void unregister: (...id: string[]) => void + shortcuts: Shortcuts } export const CommandRegistryProvider: React.FC = ({ @@ -43,8 +54,35 @@ export const CommandRegistryProvider: React.FC = ({ ) }, []) + // NOTE: This is where we'd add functionality for customising shortcuts. + const shortcuts: Record = useMemo( + () => ({ + undo: [ + { + key: 'Mod-z', + }, + ], + redo: [ + { + key: 'Mod-y', + }, + { + key: 'Mod-Shift-Z', + }, + ], + find: [{ key: 'Mod-f' }], + 'select-all': [{ key: 'Mod-a' }], + 'insert-comment': [{ key: 'Mod-Shift-C' }], + 'format-bold': [{ key: 'Mod-b' }], + 'format-italics': [{ key: 'Mod-i' }], + }), + [] + ) + return ( - + {children} ) @@ -59,3 +97,92 @@ export const useCommandRegistry = (): CommandRegistry => { } return context } + +function parseShortcut(shortcut: Shortcut) { + // Based on KeyBinding type of CodeMirror 6 + let alt = false + let ctrl = false + let shift = false + let meta = false + + let character = null + // isMac ? shortcut.mac : shortcut.key etc. + const shortcutString = shortcut.key ?? '' + const keys = shortcutString.split(/-(?!$)/) ?? [] + + for (let i = 0; i < keys.length; i++) { + const isLast = i === keys.length - 1 + const key = keys[i] + if (!key) { + throw new Error('Empty key in shortcut: ' + shortcutString) + } + if (key === 'Alt' || (!isLast && key === 'a')) { + alt = true + } else if ( + key === 'Ctrl' || + key === 'Control' || + (!isLast && key === 'c') + ) { + ctrl = true + } else if (key === 'Shift' || (!isLast && key === 's')) { + shift = true + } else if (key === 'Meta' || key === 'Cmd' || (!isLast && key === 'm')) { + meta = true + } else if (key === 'Mod') { + if (isMac) { + meta = true + } else { + ctrl = true + } + } else { + if (key === 'Space') { + character = ' ' + } + if (!isLast) { + throw new Error( + 'Character key must be last in shortcut: ' + shortcutString + ) + } + if (key.length !== 1) { + throw new Error(`Invalid key '${key}' in shortcut: ${shortcutString}`) + } + if (character) { + throw new Error('Multiple characters in shortcut: ' + shortcutString) + } + character = key + } + } + if (!character) { + throw new Error('No character in shortcut: ' + shortcutString) + } + + return { + alt, + ctrl, + shift, + meta, + character, + } +} + +export const formatShortcut = (shortcut: Shortcut): string => { + const { alt, ctrl, shift, meta, character } = parseShortcut(shortcut) + + if (isMac) { + return [ + ctrl ? '⌃' : '', + alt ? '⌥' : '', + shift ? '⇧' : '', + meta ? '⌘' : '', + character.toUpperCase(), + ].join('') + } + + return [ + ctrl ? 'Ctrl' : '', + shift ? 'Shift' : '', + meta ? 'Meta' : '', + alt ? 'Alt' : '', + character.toUpperCase(), + ].join(' ') +} diff --git a/services/web/frontend/js/features/ide-react/context/editor-manager-context.tsx b/services/web/frontend/js/features/ide-react/context/editor-manager-context.tsx index e1bb49c39c..e830d7ec1a 100644 --- a/services/web/frontend/js/features/ide-react/context/editor-manager-context.tsx +++ b/services/web/frontend/js/features/ide-react/context/editor-manager-context.tsx @@ -18,6 +18,7 @@ import { useConnectionContext } from '@/features/ide-react/context/connection-co import { debugConsole } from '@/utils/debugging' import { DocumentContainer } from '@/features/ide-react/editor/document-container' import { useLayoutContext } from '@/shared/context/layout-context' +import { useUserContext } from '@/shared/context/user-context' import { GotoLineOptions } from '@/features/ide-react/types/goto-line-options' import { Doc } from '../../../../../types/doc' import { useFileTreeData } from '@/shared/context/file-tree-data-context' @@ -99,6 +100,7 @@ export const EditorManagerProvider: FC = ({ const { view, setView } = useLayoutContext() const { showGenericMessageModal, genericModalVisible, showOutOfSyncModal } = useModalsContext() + const { id: userId } = useUserContext() const [showSymbolPalette, setShowSymbolPalette] = useScopeValue( 'editor.showSymbolPalette' @@ -309,7 +311,7 @@ export const EditorManagerProvider: FC = ({ const tryToggle = () => { const saved = doc.getInflightOp() == null && doc.getPendingOp() == null if (saved) { - doc.setTrackingChanges(want) + doc.setTrackChangesUserId(want ? userId : null) setTrackChanges(want) } else { syncTimeoutRef.current = window.setTimeout(tryToggle, 100) @@ -318,7 +320,7 @@ export const EditorManagerProvider: FC = ({ tryToggle() }, - [setTrackChanges] + [setTrackChanges, userId] ) const doOpenNewDocument = useCallback( diff --git a/services/web/frontend/js/features/ide-react/context/ide-react-context.tsx b/services/web/frontend/js/features/ide-react/context/ide-react-context.tsx index bb3d0c1a3c..51ecbdc6c9 100644 --- a/services/web/frontend/js/features/ide-react/context/ide-react-context.tsx +++ b/services/web/frontend/js/features/ide-react/context/ide-react-context.tsx @@ -16,7 +16,6 @@ import { } from '@/features/ide-react/create-ide-event-emitter' import { JoinProjectPayload } from '@/features/ide-react/connection/join-project-payload' import { useConnectionContext } from '@/features/ide-react/context/connection-context' -import { getMockIde } from '@/shared/context/mock/mock-ide' import { populateEditorScope } from '@/features/ide-react/scope-adapters/editor-manager-context-adapter' import { postJSON } from '@/infrastructure/fetch-json' import { ReactScopeEventEmitter } from '@/features/ide-react/scope-event-emitter/react-scope-event-emitter' @@ -128,10 +127,11 @@ export const IdeReactProvider: FC = ({ children }) => { // Populate scope values when joining project, then fire project:joined event useEffect(() => { function handleJoinProjectResponse({ - project, + project: { rootDoc_id: rootDocId, ..._project }, permissionsLevel, }: JoinProjectPayload) { - scopeStore.set('project', { rootDoc_id: null, ...project }) + const project = { ..._project, rootDocId } + scopeStore.set('project', project) scopeStore.set('permissionsLevel', permissionsLevel) // Make watchers update immediately scopeStore.flushUpdates() @@ -157,11 +157,11 @@ export const IdeReactProvider: FC = ({ children }) => { const ide = useMemo(() => { return { - ...getMockIde(), + _id: projectId, socket, reportError, } - }, [socket, reportError]) + }, [projectId, socket, reportError]) const value = useMemo( () => ({ diff --git a/services/web/frontend/js/features/ide-react/context/online-users-context.tsx b/services/web/frontend/js/features/ide-react/context/online-users-context.tsx index 1dba40e6d7..1195f9ae7c 100644 --- a/services/web/frontend/js/features/ide-react/context/online-users-context.tsx +++ b/services/web/frontend/js/features/ide-react/context/online-users-context.tsx @@ -95,7 +95,7 @@ export const OnlineUsersProvider: FC = ({ for (const [clientId, user] of Object.entries(onlineUsers)) { const decoratedUser = { ...user } const docId = user.doc_id - if (docId) { + if (docId && fileTreeData) { decoratedUser.doc = findDocEntityById(fileTreeData, docId) } diff --git a/services/web/frontend/js/features/ide-react/context/snapshot-context.tsx b/services/web/frontend/js/features/ide-react/context/snapshot-context.tsx index 70f170a8b0..817e03fe86 100644 --- a/services/web/frontend/js/features/ide-react/context/snapshot-context.tsx +++ b/services/web/frontend/js/features/ide-react/context/snapshot-context.tsx @@ -24,10 +24,14 @@ export const StubSnapshotUtils = { throw new Error('not implemented') } }, + // unused vars kept to document the interface + // eslint-disable-next-line @typescript-eslint/no-unused-vars buildFileTree(snapshot: Snapshot): Folder { throw new Error('not implemented') }, - createFolder(_id: string, name: string): Folder { + // unused vars kept to document the interface + // eslint-disable-next-line @typescript-eslint/no-unused-vars + createFolder(id: string, name: string): Folder { throw new Error('not implemented') }, } diff --git a/services/web/frontend/js/features/ide-react/editor/document-container.ts b/services/web/frontend/js/features/ide-react/editor/document-container.ts index fee359f146..28bcb955d1 100644 --- a/services/web/frontend/js/features/ide-react/editor/document-container.ts +++ b/services/web/frontend/js/features/ide-react/editor/document-container.ts @@ -196,9 +196,13 @@ export class DocumentContainer extends EventEmitter { return this.doc?.hasBufferedOps() } - setTrackingChanges(track_changes: boolean) { + setTrackChangesUserId(userId: string | null) { + this.track_changes_as = userId if (this.doc) { - this.doc.track_changes = track_changes + this.doc.setTrackChangesUserId(userId) + } + if (this.cm6) { + this.cm6.setTrackChangesUserId(userId) } } @@ -595,7 +599,7 @@ export class DocumentContainer extends EventEmitter { this.doc.on('remoteop', (...ops: AnyOperation[]) => { return this.trigger('remoteop', ...ops) }) - this.doc.on('op:sent', (op: AnyOperation) => { + this.doc.on('op:sent', () => { return this.trigger('op:sent') }) this.doc.on('op:acknowledged', (op: AnyOperation) => { @@ -605,7 +609,7 @@ export class DocumentContainer extends EventEmitter { }) return this.trigger('op:acknowledged') }) - this.doc.on('op:timeout', (op: AnyOperation) => { + this.doc.on('op:timeout', () => { this.trigger('op:timeout') return this.onError(new Error('op timed out')) }) diff --git a/services/web/frontend/js/features/ide-react/editor/share-js-doc.ts b/services/web/frontend/js/features/ide-react/editor/share-js-doc.ts index 96e866afec..5b362299d2 100644 --- a/services/web/frontend/js/features/ide-react/editor/share-js-doc.ts +++ b/services/web/frontend/js/features/ide-react/editor/share-js-doc.ts @@ -12,17 +12,20 @@ import { Message, ShareJsConnectionState, ShareJsOperation, - ShareJsTextType, TrackChangesIdSeeds, } from '@/features/ide-react/editor/types/document' import { EditorFacade } from '@/features/source-editor/extensions/realtime' import { recordDocumentFirstChangeEvent } from '@/features/event-tracking/document-first-change-event' import getMeta from '@/utils/meta' -import { HistoryOTType } from './share-js-history-ot-type' -import { StringFileData } from 'overleaf-editor-core/index' +import { historyOTType } from './share-js-history-ot-type' +import { + StringFileData, + TrackedChangeList, + EditOperationBuilder, +} from 'overleaf-editor-core' import { - RawEditOperation, StringFileRawData, + RawEditOperation, } from 'overleaf-editor-core/lib/types' // All times below are in milliseconds @@ -68,19 +71,17 @@ export class ShareJsDoc extends EventEmitter { readonly type: OTType = 'sharejs-text-ot' ) { super() - let sharejsType: ShareJsTextType = sharejs.types.text + let sharejsType // Decode any binary bits of data let snapshot: string | StringFileData if (this.type === 'history-ot') { snapshot = StringFileData.fromRaw( docLines as unknown as StringFileRawData ) - sharejsType = new HistoryOTType(snapshot) as ShareJsTextType< - StringFileData, - RawEditOperation[] - > + sharejsType = historyOTType } else { snapshot = docLines.map(line => decodeUtf8(line)).join('\n') + sharejsType = sharejs.types.text } this.connection = { @@ -159,6 +160,18 @@ export class ShareJsDoc extends EventEmitter { this.removeCarriageReturnCharFromShareJsDoc() } + setTrackChangesUserId(userId: string | null) { + this.track_changes = userId != null + } + + getTrackedChanges() { + if (this._doc.otType === 'history-ot') { + return this._doc.snapshot.getTrackedChanges() as TrackedChangeList + } else { + return null + } + } + private removeCarriageReturnCharFromShareJsDoc() { const doc = this._doc let nextPos @@ -253,7 +266,15 @@ export class ShareJsDoc extends EventEmitter { // issues are resolved. processUpdateFromServer(message: Message) { try { - this._doc._onMessage(message) + if (this.type === 'history-ot' && message.op != null) { + const ops = message.op as RawEditOperation[] + this._doc._onMessage({ + ...message, + op: ops.map(EditOperationBuilder.fromJSON), + }) + } else { + this._doc._onMessage(message) + } } catch (error) { // Version mismatches are thrown as errors debugConsole.log(error) diff --git a/services/web/frontend/js/features/ide-react/editor/share-js-history-ot-type.ts b/services/web/frontend/js/features/ide-react/editor/share-js-history-ot-type.ts index fde66d89a1..81243bb8c7 100644 --- a/services/web/frontend/js/features/ide-react/editor/share-js-history-ot-type.ts +++ b/services/web/frontend/js/features/ide-react/editor/share-js-history-ot-type.ts @@ -1,49 +1,79 @@ -import EventEmitter from '@/utils/EventEmitter' import { - EditOperationBuilder, + EditOperation, EditOperationTransformer, - InsertOp, - RemoveOp, - RetainOp, StringFileData, - TextOperation, } from 'overleaf-editor-core' -import { RawEditOperation } from 'overleaf-editor-core/lib/types' +import { ShareDoc } from '../../../../../types/share-doc' -export class HistoryOTType extends EventEmitter { - // stub interface, these are actually on the Doc - api: HistoryOTType - snapshot: StringFileData +type Api = { + otType: 'history-ot' + trackChangesUserId: string | null - constructor(snapshot: StringFileData) { - super() - this.api = this - this.snapshot = snapshot - } + getText(): string + getLength(): number +} - transformX(raw1: RawEditOperation[], raw2: RawEditOperation[]) { - const [a, b] = EditOperationTransformer.transform( - EditOperationBuilder.fromJSON(raw1[0]), - EditOperationBuilder.fromJSON(raw2[0]) - ) - return [[a.toJSON()], [b.toJSON()]] - } +const api: Api & ThisType = { + otType: 'history-ot', + trackChangesUserId: null, - apply(snapshot: StringFileData, rawEditOperation: RawEditOperation[]) { - const operation = EditOperationBuilder.fromJSON(rawEditOperation[0]) + getText() { + return this.snapshot.getContent({ filterTrackedDeletes: true }) + }, + + getLength() { + return this.snapshot.getStringLength() + }, +} + +export const historyOTType = { + api, + + transformX(ops1: EditOperation[], ops2: EditOperation[]) { + // Dynamic programming algorithm: gradually transform both sides in a nested + // loop. + const left = [...ops1] + const right = [...ops2] + for (let i = 0; i < left.length; i++) { + for (let j = 0; j < right.length; j++) { + // At this point: + // left[0..i] is ops1[0..i] rebased over ops2[0..j-1] + // right[0..j] is ops2[0..j] rebased over ops1[0..i-1] + const [a, b] = EditOperationTransformer.transform(left[i], right[j]) + left[i] = a + right[j] = b + } + } + return [left, right] + }, + + apply(snapshot: StringFileData, ops: EditOperation[]) { const afterFile = StringFileData.fromRaw(snapshot.toRaw()) - afterFile.edit(operation) - this.snapshot = afterFile + for (const op of ops) { + afterFile.edit(op) + } return afterFile - } + }, - compose(op1: RawEditOperation[], op2: RawEditOperation[]) { - return [ - EditOperationBuilder.fromJSON(op1[0]) - .compose(EditOperationBuilder.fromJSON(op2[0])) - .toJSON(), - ] - } + compose(ops1: EditOperation[], ops2: EditOperation[]) { + const ops = [...ops1, ...ops2] + let currentOp = ops.shift() + if (currentOp === undefined) { + // No ops to process + return [] + } + const result = [] + for (const op of ops) { + if (currentOp.canBeComposedWith(op)) { + currentOp = currentOp.compose(op) + } else { + result.push(currentOp) + currentOp = op + } + } + result.push(currentOp) + return result + }, // Do not provide normalize, used by submitOp to fixup bad input. // normalize(op: TextOperation) {} @@ -51,83 +81,4 @@ export class HistoryOTType extends EventEmitter { // Do not provide invert, only needed for reverting a rejected update. // We are displaying an out-of-sync modal when an op is rejected. // invert(op: TextOperation) {} - - // API - insert(pos: number, text: string, fromUndo: boolean) { - const old = this.getText() - const op = new TextOperation() - op.retain(pos) - op.insert(text) - op.retain(old.length - pos) - this.submitOp([op.toJSON()]) - } - - del(pos: number, length: number, fromUndo: boolean) { - const old = this.getText() - const op = new TextOperation() - op.retain(pos) - op.remove(length) - op.retain(old.length - pos - length) - this.submitOp([op.toJSON()]) - } - - getText() { - return this.snapshot.getContent({ filterTrackedDeletes: true }) - } - - getLength() { - return this.getText().length - } - - _register() { - this.on( - 'remoteop', - (rawEditOperation: RawEditOperation[], oldSnapshot: StringFileData) => { - const operation = EditOperationBuilder.fromJSON(rawEditOperation[0]) - if (operation instanceof TextOperation) { - const str = oldSnapshot.getContent() - if (str.length !== operation.baseLength) - throw new TextOperation.ApplyError( - "The operation's base length must be equal to the string's length.", - operation, - str - ) - - let outputCursor = 0 - let inputCursor = 0 - for (const op of operation.ops) { - if (op instanceof RetainOp) { - inputCursor += op.length - outputCursor += op.length - } else if (op instanceof InsertOp) { - this.emit( - 'insert', - outputCursor, - op.insertion, - op.insertion.length - ) - outputCursor += op.insertion.length - } else if (op instanceof RemoveOp) { - this.emit( - 'delete', - outputCursor, - str.slice(inputCursor, inputCursor + op.length) - ) - inputCursor += op.length - } - } - - if (inputCursor !== str.length) - throw new TextOperation.ApplyError( - "The operation didn't operate on the whole string.", - operation, - str - ) - } - } - ) - } - - // stub-interface, provided by sharejs.Doc - submitOp(op: RawEditOperation[]) {} } diff --git a/services/web/frontend/js/features/ide-react/editor/types/document.ts b/services/web/frontend/js/features/ide-react/editor/types/document.ts index fbed3ab8f1..f6e5f6aebb 100644 --- a/services/web/frontend/js/features/ide-react/editor/types/document.ts +++ b/services/web/frontend/js/features/ide-react/editor/types/document.ts @@ -1,5 +1,6 @@ import { StringFileData } from 'overleaf-editor-core' import { AnyOperation } from '../../../../../../types/change' +import { RawEditOperation } from 'overleaf-editor-core/lib/types' export type Version = number @@ -36,4 +37,5 @@ export type Message = { doc?: string snapshot?: string | StringFileData type?: ShareJsTextType + op?: AnyOperation[] | RawEditOperation[] } diff --git a/services/web/frontend/js/features/ide-redesign/components/breadcrumbs.tsx b/services/web/frontend/js/features/ide-redesign/components/breadcrumbs.tsx index f148e0142e..9949b98c7f 100644 --- a/services/web/frontend/js/features/ide-redesign/components/breadcrumbs.tsx +++ b/services/web/frontend/js/features/ide-redesign/components/breadcrumbs.tsx @@ -1,4 +1,7 @@ -import { findInTreeOrThrow } from '@/features/file-tree/util/find-in-tree' +import { + findInTree, + findInTreeOrThrow, +} from '@/features/file-tree/util/find-in-tree' import { useFileTreeOpenContext } from '@/features/ide-react/context/file-tree-open-context' import { useOutlineContext } from '@/features/ide-react/context/outline-context' import useNestedOutline from '@/features/outline/hooks/use-nested-outline' @@ -39,35 +42,41 @@ export default function Breadcrumbs() { const { highlightedLine, canShowOutline } = useOutlineContext() const folderHierarchy = useMemo(() => { - if (!openEntity || !fileTreeData) { + if (openEntity?.type !== 'doc' || !fileTreeData) { return [] } - return openEntity.path - .filter(id => id !== fileTreeData._id) // Filter out the root folder - .map(id => { - return findInTreeOrThrow(fileTreeData, id)?.entity - }) + try { + return openEntity.path + .filter(id => id !== fileTreeData._id) // Filter out the root folder + .map(id => { + return findInTreeOrThrow(fileTreeData, id)?.entity + }) + } catch { + // If any of the folders in the path are not found, the entire hierarchy + // is invalid. + return [] + } }, [openEntity, fileTreeData]) const fileName = useMemo(() => { // NOTE: openEntity.entity.name may not always be accurate, so we read it // from the file tree data instead. - if (!openEntity || !fileTreeData) { + if (openEntity?.type !== 'doc' || !fileTreeData) { return undefined } - return findInTreeOrThrow(fileTreeData, openEntity.entity._id)?.entity.name + return findInTree(fileTreeData, openEntity.entity._id)?.entity.name }, [fileTreeData, openEntity]) const outlineHierarchy = useMemo(() => { - if (!canShowOutline || !outline) { + if (openEntity?.type !== 'doc' || !canShowOutline || !outline) { return [] } return constructOutlineHierarchy(outline.items, highlightedLine) - }, [outline, highlightedLine, canShowOutline]) + }, [outline, highlightedLine, canShowOutline, openEntity]) - if (!openEntity || !fileTreeData) { + if (openEntity?.type !== 'doc' || !fileTreeData) { return null } diff --git a/services/web/frontend/js/features/ide-redesign/components/chat/chat.tsx b/services/web/frontend/js/features/ide-redesign/components/chat/chat.tsx index 9ebe33e065..54d098c6c8 100644 --- a/services/web/frontend/js/features/ide-redesign/components/chat/chat.tsx +++ b/services/web/frontend/js/features/ide-redesign/components/chat/chat.tsx @@ -9,8 +9,8 @@ import { useUserContext } from '@/shared/context/user-context' import { lazy, Suspense, useEffect } from 'react' import { useTranslation } from 'react-i18next' import classNames from 'classnames' -import { RailPanelHeader } from '../rail' import { RailIndicator } from '../rail-indicator' +import RailPanelHeader from '../rail-panel-header' const MessageList = lazy(() => import('../../../chat/components/message-list')) diff --git a/services/web/frontend/js/features/ide-redesign/components/errors.tsx b/services/web/frontend/js/features/ide-redesign/components/error-logs/error-indicator.tsx similarity index 56% rename from services/web/frontend/js/features/ide-redesign/components/errors.tsx rename to services/web/frontend/js/features/ide-redesign/components/error-logs/error-indicator.tsx index 2313022d3c..7b721a1d51 100644 --- a/services/web/frontend/js/features/ide-redesign/components/errors.tsx +++ b/services/web/frontend/js/features/ide-redesign/components/error-logs/error-indicator.tsx @@ -1,9 +1,7 @@ -import PdfLogsViewer from '@/features/pdf-preview/components/pdf-logs-viewer' -import { PdfPreviewProvider } from '@/features/pdf-preview/components/pdf-preview-provider' import { useDetachCompileContext as useCompileContext } from '@/shared/context/detach-compile-context' -import { RailIndicator } from './rail-indicator' +import { RailIndicator } from '../rail-indicator' -export const ErrorIndicator = () => { +export default function ErrorIndicator() { const { logEntries } = useCompileContext() if (!logEntries) { @@ -25,11 +23,3 @@ export const ErrorIndicator = () => { /> ) } - -export const ErrorPane = () => { - return ( - - - - ) -} diff --git a/services/web/frontend/js/features/ide-redesign/components/error-logs/error-logs-header.tsx b/services/web/frontend/js/features/ide-redesign/components/error-logs/error-logs-header.tsx new file mode 100644 index 0000000000..2f3a54b095 --- /dev/null +++ b/services/web/frontend/js/features/ide-redesign/components/error-logs/error-logs-header.tsx @@ -0,0 +1,98 @@ +import { useTranslation } from 'react-i18next' +import RailPanelHeader from '../rail-panel-header' +import OLIconButton from '@/features/ui/components/ol/ol-icon-button' +import { useDetachCompileContext as useCompileContext } from '@/shared/context/detach-compile-context' +import { + Dropdown, + DropdownMenu, + DropdownToggle, +} from '@/features/ui/components/bootstrap-5/dropdown-menu' +import PdfFileList from '@/features/pdf-preview/components/pdf-file-list' +import { forwardRef } from 'react' +import OLTooltip from '@/features/ui/components/ol/ol-tooltip' + +export default function ErrorLogsHeader() { + const { t } = useTranslation() + + return ( + , + , + ]} + /> + ) +} + +const ClearCacheButton = () => { + const { compiling, clearCache, clearingCache } = useCompileContext() + const { t } = useTranslation() + + return ( + + clearCache()} + className="rail-panel-header-button-subdued" + icon="auto_delete" + isLoading={clearingCache} + disabled={clearingCache || compiling} + accessibilityLabel={t('clear_cached_files')} + size="sm" + /> + + ) +} + +const DownloadFileDropdown = () => { + const { fileList } = useCompileContext() + + const { t } = useTranslation() + + return ( + + + {t('other_logs_and_files')} + + {fileList && ( + + + + )} + + ) +} + +const DownloadFileDropdownToggleButton = forwardRef< + HTMLButtonElement, + { onClick: React.MouseEventHandler } +>(function DownloadFileDropdownToggleButton({ onClick }, ref) { + const { compiling, fileList } = useCompileContext() + const { t } = useTranslation() + + return ( + + + + ) +}) diff --git a/services/web/frontend/js/features/ide-redesign/components/error-logs/error-logs-panel.tsx b/services/web/frontend/js/features/ide-redesign/components/error-logs/error-logs-panel.tsx new file mode 100644 index 0000000000..2cff048256 --- /dev/null +++ b/services/web/frontend/js/features/ide-redesign/components/error-logs/error-logs-panel.tsx @@ -0,0 +1,14 @@ +import { PdfPreviewProvider } from '@/features/pdf-preview/components/pdf-preview-provider' +import ErrorLogs from './error-logs' +import ErrorLogsHeader from './error-logs-header' + +export default function ErrorLogsPanel() { + return ( + +
    + + +
    +
    + ) +} diff --git a/services/web/frontend/js/features/ide-redesign/components/error-logs/error-logs.tsx b/services/web/frontend/js/features/ide-redesign/components/error-logs/error-logs.tsx new file mode 100644 index 0000000000..a6a62e998a --- /dev/null +++ b/services/web/frontend/js/features/ide-redesign/components/error-logs/error-logs.tsx @@ -0,0 +1,142 @@ +import { useTranslation } from 'react-i18next' +import { ElementType, memo, useMemo, useState } from 'react' +import { usePdfPreviewContext } from '@/features/pdf-preview/components/pdf-preview-provider' +import StopOnFirstErrorPrompt from '@/features/pdf-preview/components/stop-on-first-error-prompt' +import PdfPreviewError from '@/features/pdf-preview/components/pdf-preview-error' +import PdfValidationIssue from '@/features/pdf-preview/components/pdf-validation-issue' +import PdfLogsEntries from '@/features/pdf-preview/components/pdf-logs-entries' +import PdfPreviewErrorBoundaryFallback from '@/features/pdf-preview/components/pdf-preview-error-boundary-fallback' +import withErrorBoundary from '@/infrastructure/error-boundary' +import { useDetachCompileContext as useCompileContext } from '@/shared/context/detach-compile-context' +import { Nav, NavLink, TabContainer, TabContent } from 'react-bootstrap' +import { LogEntry as LogEntryData } from '@/features/pdf-preview/util/types' +import LogEntry from './log-entry' +import importOverleafModules from '../../../../../macros/import-overleaf-module.macro' + +const logsComponents: Array<{ + import: { default: ElementType } + path: string +}> = importOverleafModules('errorLogsComponents') + +type ErrorLogTab = { + key: string + label: string + entries: LogEntryData[] | undefined +} + +function ErrorLogs() { + const { error, logEntries, rawLog, validationIssues, stoppedOnFirstError } = + useCompileContext() + + const tabs = useMemo(() => { + return [ + { key: 'all', label: 'All', entries: logEntries?.all }, + { key: 'errors', label: 'Errors', entries: logEntries?.errors }, + { key: 'warnings', label: 'Warnings', entries: logEntries?.warnings }, + { key: 'info', label: 'Info', entries: logEntries?.typesetting }, + ] + }, [logEntries]) + + const { loadingError } = usePdfPreviewContext() + + const { t } = useTranslation() + + const [activeTab, setActiveTab] = useState('all') + + const entries = useMemo(() => { + return tabs.find(tab => tab.key === activeTab)?.entries || [] + }, [activeTab, tabs]) + + const includeErrors = activeTab === 'all' || activeTab === 'errors' + const includeWarnings = activeTab === 'all' || activeTab === 'warnings' + + return ( + + + {logsComponents.map(({ import: { default: Component }, path }) => ( + + ))} + +
    + {stoppedOnFirstError && includeErrors && } + + {loadingError && ( + + )} + + {error && ( + + )} + + {includeErrors && + validationIssues && + Object.entries(validationIssues).map(([name, issue]) => ( + + ))} + + {entries && ( + 0} + /> + )} + + {rawLog && activeTab === 'all' && ( + + )} +
    +
    +
    + ) +} + +function formatErrorNumber(num: number | undefined) { + if (num === undefined) { + return undefined + } + + if (num > 99) { + return '99+' + } + + return Math.floor(num).toString() +} + +const TabHeader = ({ tab, active }: { tab: ErrorLogTab; active: boolean }) => { + return ( + + {tab.label} +
    + {/* TODO: it would be nice if this number included custom errors */} + {formatErrorNumber(tab.entries?.length)} +
    +
    + ) +} + +export default withErrorBoundary(memo(ErrorLogs), () => ( + +)) diff --git a/services/web/frontend/js/features/ide-redesign/components/error-logs/log-entry-header.tsx b/services/web/frontend/js/features/ide-redesign/components/error-logs/log-entry-header.tsx new file mode 100644 index 0000000000..ce43af3744 --- /dev/null +++ b/services/web/frontend/js/features/ide-redesign/components/error-logs/log-entry-header.tsx @@ -0,0 +1,167 @@ +import classNames from 'classnames' +import { useState, useRef, MouseEventHandler, ElementType } from 'react' +import { useTranslation } from 'react-i18next' +import OLTooltip from '@/features/ui/components/ol/ol-tooltip' +import { + ErrorLevel, + SourceLocation, + LogEntry as LogEntryData, +} from '@/features/pdf-preview/util/types' +import useResizeObserver from '@/features/preview/hooks/use-resize-observer' +import OLIconButton from '@/features/ui/components/ol/ol-icon-button' +import importOverleafModules from '../../../../../macros/import-overleaf-module.macro' +import MaterialIcon from '@/shared/components/material-icon' + +const actionComponents = importOverleafModules( + 'pdfLogEntryHeaderActionComponents' +) as { + import: { default: ElementType } + path: string +}[] + +function LogEntryHeader({ + sourceLocation, + level, + headerTitle, + logType, + showSourceLocationLink = true, + onSourceLocationClick, + collapsed, + onToggleCollapsed, + id, + logEntry, + actionButtonsOverride, + openCollapseIconOverride, +}: { + headerTitle: string | React.ReactNode + level: ErrorLevel + logType?: string + sourceLocation?: SourceLocation + showSourceLocationLink?: boolean + onSourceLocationClick?: MouseEventHandler + collapsed: boolean + onToggleCollapsed: () => void + id?: string + logEntry?: LogEntryData + actionButtonsOverride?: React.ReactNode + openCollapseIconOverride?: string +}) { + const { t } = useTranslation() + const logLocationSpanRef = useRef(null) + const [locationSpanOverflown, setLocationSpanOverflown] = useState(false) + + useResizeObserver( + logLocationSpanRef, + locationSpanOverflown, + checkLocationSpanOverflow + ) + + const file = sourceLocation ? sourceLocation.file : null + const line = sourceLocation ? sourceLocation.line : null + const logEntryHeaderTextClasses = classNames('log-entry-header-text', { + 'log-entry-header-text-error': level === 'error', + 'log-entry-header-text-warning': level === 'warning', + 'log-entry-header-text-info': level === 'info' || level === 'typesetting', + 'log-entry-header-text-success': level === 'success', + 'log-entry-header-text-raw': level === 'raw', + }) + + function checkLocationSpanOverflow(observedElement: ResizeObserverEntry) { + const spanEl = observedElement.target + const isOverflowing = spanEl.scrollWidth > spanEl.clientWidth + setLocationSpanOverflown(isOverflowing) + } + + const locationText = + showSourceLocationLink && file ? `${file}${line ? `, ${line}` : ''}` : null + + // Because we want an ellipsis on the left-hand side (e.g. "...longfilename.tex"), the + // `log-entry-location` class has text laid out from right-to-left using the CSS + // rule `direction: rtl;`. + // This works most of the times, except when the first character of the filename is considered + // a punctuation mark, like `/` (e.g. `/foo/bar/baz.sty`). In this case, because of + // right-to-left writing rules, the punctuation mark is moved to the right-side of the string, + // resulting in `...bar/baz.sty/` instead of `...bar/baz.sty`. + // To avoid this edge-case, we wrap the `logLocationLinkText` in two directional formatting + // characters: + // * \u202A LEFT-TO-RIGHT EMBEDDING Treat the following text as embedded left-to-right. + // * \u202C POP DIRECTIONAL FORMATTING End the scope of the last LRE, RLE, RLO, or LRO. + // This essentially tells the browser that, althought the text is laid out from right-to-left, + // the wrapped portion of text should follow left-to-right writing rules. + const formattedLocationText = locationText ? ( + + {`\u202A${locationText}\u202C`} + + ) : null + + const headerTitleText = logType ? `${logType} ${headerTitle}` : headerTitle + + return ( +
    + + + + + {actionButtonsOverride ?? ( +
    + {showSourceLocationLink && ( + + + + )} + {actionComponents.map(({ import: { default: Component }, path }) => ( + + ))} +
    + )} +
    + ) +} + +export default LogEntryHeader diff --git a/services/web/frontend/js/features/ide-redesign/components/error-logs/log-entry.tsx b/services/web/frontend/js/features/ide-redesign/components/error-logs/log-entry.tsx new file mode 100644 index 0000000000..a7539450ce --- /dev/null +++ b/services/web/frontend/js/features/ide-redesign/components/error-logs/log-entry.tsx @@ -0,0 +1,140 @@ +import { + Dispatch, + MouseEventHandler, + useCallback, + memo, + SetStateAction, + useState, +} from 'react' +import HumanReadableLogsHints from '../../../../ide/human-readable-logs/HumanReadableLogsHints' +import { sendMB } from '@/infrastructure/event-tracking' +import { + ErrorLevel, + LogEntry as LogEntryData, + SourceLocation, +} from '@/features/pdf-preview/util/types' +import LogEntryHeader from './log-entry-header' +import PdfLogEntryContent from '@/features/pdf-preview/components/pdf-log-entry-content' +import classNames from 'classnames' + +type LogEntryProps = { + headerTitle: string | React.ReactNode + level: ErrorLevel + ruleId?: string + rawContent?: string + logType?: string + formattedContent?: React.ReactNode + extraInfoURL?: string | null + sourceLocation?: SourceLocation + showSourceLocationLink?: boolean + entryAriaLabel?: string + contentDetails?: string[] + onSourceLocationClick?: (sourceLocation: SourceLocation) => void + index?: number + logEntry?: LogEntryData + id?: string + alwaysExpandRawContent?: boolean + className?: string + actionButtonsOverride?: React.ReactNode + openCollapseIconOverride?: string +} + +function LogEntry(props: LogEntryProps) { + const [collapsed, setCollapsed] = useState(true) + + return ( + + ) +} + +export function ControlledLogEntry({ + ruleId, + headerTitle, + rawContent, + logType, + formattedContent, + extraInfoURL, + level, + sourceLocation, + showSourceLocationLink = true, + entryAriaLabel = undefined, + contentDetails, + onSourceLocationClick, + index, + logEntry, + id, + alwaysExpandRawContent = false, + className, + collapsed, + setCollapsed, + actionButtonsOverride, + openCollapseIconOverride, +}: LogEntryProps & { + collapsed: boolean + setCollapsed: Dispatch> +}) { + if (ruleId && HumanReadableLogsHints[ruleId]) { + const hint = HumanReadableLogsHints[ruleId] + formattedContent = hint.formattedContent(contentDetails) + extraInfoURL = hint.extraInfoURL + } + + const handleLogEntryLinkClick: MouseEventHandler = + useCallback( + event => { + event.preventDefault() + + if (onSourceLocationClick && sourceLocation) { + onSourceLocationClick(sourceLocation) + + const parts = sourceLocation?.file?.split('.') + const extension = + parts?.length && parts?.length > 1 ? parts.pop() : '' + sendMB('log-entry-link-click', { level, ruleId, extension }) + } + }, + [level, onSourceLocationClick, ruleId, sourceLocation] + ) + + return ( +
    + setCollapsed(collapsed => !collapsed)} + id={id} + logEntry={logEntry} + actionButtonsOverride={actionButtonsOverride} + openCollapseIconOverride={openCollapseIconOverride} + /> +
    + +
    + ) +} + +export default memo(LogEntry) diff --git a/services/web/frontend/js/features/ide-redesign/components/error-logs/new-error-logs-promo.tsx b/services/web/frontend/js/features/ide-redesign/components/error-logs/new-error-logs-promo.tsx new file mode 100644 index 0000000000..1589fa819d --- /dev/null +++ b/services/web/frontend/js/features/ide-redesign/components/error-logs/new-error-logs-promo.tsx @@ -0,0 +1,59 @@ +import Close from '@/shared/components/close' +import { useEditorContext } from '@/shared/context/editor-context' +import useTutorial from '@/shared/hooks/promotions/use-tutorial' +import { useCallback, useEffect } from 'react' +import { Overlay, Popover } from 'react-bootstrap' +import { useTranslation } from 'react-i18next' + +const TUTORIAL_KEY = 'new-error-logs-promo' +const EVENT_DATA = { name: 'new-error-logs-promotion' } + +export default function NewErrorLogsPromo({ + target, +}: { + target: HTMLElement | null +}) { + const { t } = useTranslation() + + const { inactiveTutorials } = useEditorContext() + const { showPopup, tryShowingPopup, hideUntilReload, completeTutorial } = + useTutorial(TUTORIAL_KEY, EVENT_DATA) + + useEffect(() => { + if (!inactiveTutorials.includes(TUTORIAL_KEY)) { + tryShowingPopup() + } + }, [tryShowingPopup, inactiveTutorials]) + + const onHide = useCallback(() => { + hideUntilReload() + }, [hideUntilReload]) + + const onClose = useCallback(() => { + completeTutorial({ + action: 'complete', + event: 'promo-dismiss', + }) + }, [completeTutorial]) + + if (!target) { + return null + } + + return ( + + + + {t('error_logs_have_had_an_update')} + + + + + ) +} diff --git a/services/web/frontend/js/features/ide-redesign/components/error-logs/old-error-pane.tsx b/services/web/frontend/js/features/ide-redesign/components/error-logs/old-error-pane.tsx new file mode 100644 index 0000000000..7794747d30 --- /dev/null +++ b/services/web/frontend/js/features/ide-redesign/components/error-logs/old-error-pane.tsx @@ -0,0 +1,10 @@ +import PdfLogsViewer from '@/features/pdf-preview/components/pdf-logs-viewer' +import { PdfPreviewProvider } from '@/features/pdf-preview/components/pdf-preview-provider' + +export default function OldErrorPane() { + return ( + + + + ) +} diff --git a/services/web/frontend/js/features/ide-redesign/components/full-project-search-panel.tsx b/services/web/frontend/js/features/ide-redesign/components/full-project-search-panel.tsx new file mode 100644 index 0000000000..926341ce89 --- /dev/null +++ b/services/web/frontend/js/features/ide-redesign/components/full-project-search-panel.tsx @@ -0,0 +1,19 @@ +import { ElementType } from 'react' +import importOverleafModules from '../../../../macros/import-overleaf-module.macro' + +const componentModule = importOverleafModules('fullProjectSearchPanel')[0] as + | { + import: { default: ElementType } + path: string + } + | undefined + +export const FullProjectSearchPanel = () => { + if (!componentModule) { + return null + } + const FullProjectSearch = componentModule.import.default + return +} + +export const hasFullProjectSearch = Boolean(componentModule) diff --git a/services/web/frontend/js/features/ide-redesign/components/integrations-panel/integrations-panel.tsx b/services/web/frontend/js/features/ide-redesign/components/integrations-panel/integrations-panel.tsx index d1e4358907..e477602e3e 100644 --- a/services/web/frontend/js/features/ide-redesign/components/integrations-panel/integrations-panel.tsx +++ b/services/web/frontend/js/features/ide-redesign/components/integrations-panel/integrations-panel.tsx @@ -1,7 +1,7 @@ import { ElementType } from 'react' import importOverleafModules from '../../../../../macros/import-overleaf-module.macro' -import { RailPanelHeader } from '../rail' import { useTranslation } from 'react-i18next' +import RailPanelHeader from '../rail-panel-header' const integrationPanelComponents = importOverleafModules( 'integrationPanelComponents' diff --git a/services/web/frontend/js/features/ide-redesign/components/main-layout.tsx b/services/web/frontend/js/features/ide-redesign/components/main-layout.tsx index 2c422af279..8ec00a397e 100644 --- a/services/web/frontend/js/features/ide-redesign/components/main-layout.tsx +++ b/services/web/frontend/js/features/ide-redesign/components/main-layout.tsx @@ -56,6 +56,9 @@ export default function MainLayout() { {pdfLayout === 'sideBySide' && ( - } diff --git a/services/web/frontend/js/features/pdf-preview/components/compile-time-warning-upgrade-prompt.tsx b/services/web/frontend/js/features/pdf-preview/components/compile-time-warning-upgrade-prompt.tsx index 6595df854c..164c3c3275 100644 --- a/services/web/frontend/js/features/pdf-preview/components/compile-time-warning-upgrade-prompt.tsx +++ b/services/web/frontend/js/features/pdf-preview/components/compile-time-warning-upgrade-prompt.tsx @@ -1,23 +1,56 @@ -import { memo, useCallback, useEffect, useState } from 'react' +import { memo, useCallback, useEffect, useMemo, useState } from 'react' import * as eventTracking from '@/infrastructure/event-tracking' import { useDetachCompileContext } from '@/shared/context/detach-compile-context' import usePersistedState from '@/shared/hooks/use-persisted-state' import { CompileTimeWarningUpgradePromptInner } from '@/features/pdf-preview/components/compile-time-warning-upgrade-prompt-inner' import getMeta from '@/utils/meta' +import { CompileTimeoutChangingSoon } from './compile-time-changing-soon' function CompileTimeWarningUpgradePrompt() { const { isProjectOwner, deliveryLatencies, compiling, showLogs, error } = useDetachCompileContext() const [showWarning, setShowWarning] = useState(false) + const [showChangingSoon, setShowChangingSoon] = useState(false) const [dismissedUntilWarning, setDismissedUntilWarning] = usePersistedState< Date | undefined >(`has-dismissed-10s-compile-time-warning-until`) + const { reducedTimeoutWarning } = getMeta('ol-compileSettings') + const warningThreshold = reducedTimeoutWarning === 'enabled' ? 7 : 10 + + const sharedSegmentation = useMemo( + () => ({ + '10s-timeout-warning': reducedTimeoutWarning, + 'is-owner': isProjectOwner, + }), + [isProjectOwner, reducedTimeoutWarning] + ) + + const warningSegmentation = useMemo( + () => ({ + content: 'warning', + compileTime: warningThreshold, + ...sharedSegmentation, + }), + [sharedSegmentation, warningThreshold] + ) + + const changingSoonSegmentation = useMemo( + () => ({ + content: 'changes', + compileTime: 10, + ...sharedSegmentation, + }), + [sharedSegmentation] + ) const handleNewCompile = useCallback( (compileTime: number) => { setShowWarning(false) - if (compileTime > 10000) { + setShowChangingSoon(false) + if (reducedTimeoutWarning === 'enabled' && compileTime > 10000) { + setShowChangingSoon(true) + } else if (compileTime > warningThreshold * 1000) { if (isProjectOwner) { if ( !dismissedUntilWarning || @@ -25,26 +58,52 @@ function CompileTimeWarningUpgradePrompt() { ) { setShowWarning(true) eventTracking.sendMB('compile-time-warning-displayed', { - time: 10, + compileTime: warningThreshold, isProjectOwner, }) } } } }, - [isProjectOwner, dismissedUntilWarning] + [ + isProjectOwner, + dismissedUntilWarning, + reducedTimeoutWarning, + warningThreshold, + ] ) const handleDismissWarning = useCallback(() => { eventTracking.sendMB('compile-time-warning-dismissed', { - time: 10, + compileTime: warningThreshold, isProjectOwner, }) + eventTracking.sendMB('paywall-dismiss', { + 'paywall-type': 'compile-time-warning', + content: 'warning', + compileTime: warningThreshold, + ...sharedSegmentation, + }) setShowWarning(false) const until = new Date() until.setDate(until.getDate() + 1) // 1 day setDismissedUntilWarning(until) - }, [isProjectOwner, setDismissedUntilWarning]) + }, [ + isProjectOwner, + setDismissedUntilWarning, + warningThreshold, + sharedSegmentation, + ]) + + const handleDismissChangingSoon = useCallback(() => { + eventTracking.sendMB('paywall-dismiss', { + 'paywall-type': 'compile-time-warning', + compileTime: 10, + content: 'changes', + ...sharedSegmentation, + }) + setShowChangingSoon(false) + }, [sharedSegmentation]) useEffect(() => { if (compiling || error || showLogs) return @@ -55,21 +114,32 @@ function CompileTimeWarningUpgradePrompt() { return null } - if (compiling || error || showLogs) { + if ( + compiling || + error || + showLogs || + !deliveryLatencies.compileTimeServerE2E + ) { return null } - if (!showWarning) { + if (!showWarning && !showChangingSoon) { return null } - // if showWarning is true then the 10s warning is shown - return (
    {showWarning && isProjectOwner && ( + )} + {showChangingSoon && ( + )}
    diff --git a/services/web/frontend/js/features/pdf-preview/components/pdf-log-entry-content.tsx b/services/web/frontend/js/features/pdf-preview/components/pdf-log-entry-content.tsx index 8c9a9d7761..17378f6c74 100644 --- a/services/web/frontend/js/features/pdf-preview/components/pdf-log-entry-content.tsx +++ b/services/web/frontend/js/features/pdf-preview/components/pdf-log-entry-content.tsx @@ -3,6 +3,7 @@ import PdfLogEntryRawContent from './pdf-log-entry-raw-content' import importOverleafModules from '../../../../macros/import-overleaf-module.macro' import { LogEntry } from '../util/types' import { ElementType } from 'react' +import classNames from 'classnames' const pdfLogEntryComponents = importOverleafModules( 'pdfLogEntryComponents' @@ -17,17 +18,21 @@ export default function PdfLogEntryContent({ extraInfoURL, index, logEntry, + alwaysExpandRawContent = false, + className, }: { rawContent?: string formattedContent?: React.ReactNode extraInfoURL?: string | null index?: number logEntry?: LogEntry + alwaysExpandRawContent?: boolean + className?: string }) { const { t } = useTranslation() return ( -
    +
    {formattedContent && (
    {formattedContent}
    )} @@ -48,7 +53,11 @@ export default function PdfLogEntryContent({ )} {rawContent && ( - + )}
    ) diff --git a/services/web/frontend/js/features/pdf-preview/components/pdf-log-entry-raw-content.tsx b/services/web/frontend/js/features/pdf-preview/components/pdf-log-entry-raw-content.tsx index 39f46fbed3..0e9cc5246d 100644 --- a/services/web/frontend/js/features/pdf-preview/components/pdf-log-entry-raw-content.tsx +++ b/services/web/frontend/js/features/pdf-preview/components/pdf-log-entry-raw-content.tsx @@ -8,20 +8,24 @@ import Icon from '../../../shared/components/icon' export default function PdfLogEntryRawContent({ rawContent, collapsedSize = 0, + alwaysExpanded = false, }: { rawContent: string collapsedSize?: number + alwaysExpanded?: boolean }) { - const [expanded, setExpanded] = useState(false) - const [needsExpander, setNeedsExpander] = useState(true) + const [expanded, setExpanded] = useState(alwaysExpanded) + const [needsExpander, setNeedsExpander] = useState(!alwaysExpanded) const { elementRef } = useResizeObserver( useCallback( (element: Element) => { if (element.scrollHeight === 0) return // skip update when logs-pane is closed - setNeedsExpander(element.scrollHeight > collapsedSize) + setNeedsExpander( + !alwaysExpanded && element.scrollHeight > collapsedSize + ) }, - [collapsedSize] + [collapsedSize, alwaysExpanded] ) ) diff --git a/services/web/frontend/js/features/pdf-preview/components/pdf-log-entry.tsx b/services/web/frontend/js/features/pdf-preview/components/pdf-log-entry.tsx index 349ad79047..23ae2dca5d 100644 --- a/services/web/frontend/js/features/pdf-preview/components/pdf-log-entry.tsx +++ b/services/web/frontend/js/features/pdf-preview/components/pdf-log-entry.tsx @@ -1,4 +1,3 @@ -import classNames from 'classnames' import { memo, MouseEventHandler, useCallback } from 'react' import PreviewLogEntryHeader from '../../preview/components/preview-log-entry-header' import PdfLogEntryContent from './pdf-log-entry-content' @@ -6,6 +5,9 @@ import HumanReadableLogsHints from '../../../ide/human-readable-logs/HumanReadab import { sendMB } from '@/infrastructure/event-tracking' import getMeta from '@/utils/meta' import { ErrorLevel, LogEntry, SourceLocation } from '../util/types' +import { useIsNewEditorEnabled } from '@/features/ide-redesign/utils/new-editor-utils' +import NewLogEntry from '@/features/ide-redesign/components/error-logs/log-entry' +import { useFeatureFlag } from '@/shared/context/split-test-context' function PdfLogEntry({ ruleId, @@ -18,12 +20,9 @@ function PdfLogEntry({ level, sourceLocation, showSourceLocationLink = true, - showCloseButton = false, entryAriaLabel = undefined, - customClass, contentDetails, onSourceLocationClick, - onClose, index, logEntry, id, @@ -38,12 +37,9 @@ function PdfLogEntry({ extraInfoURL?: string | null sourceLocation?: SourceLocation showSourceLocationLink?: boolean - showCloseButton?: boolean entryAriaLabel?: string - customClass?: string contentDetails?: string[] onSourceLocationClick?: (sourceLocation: SourceLocation) => void - onClose?: () => void index?: number logEntry?: LogEntry id?: string @@ -73,9 +69,34 @@ function PdfLogEntry({ [level, onSourceLocationClick, ruleId, sourceLocation] ) + const newEditor = useIsNewEditorEnabled() + const newErrorlogs = useFeatureFlag('new-editor-error-logs-redesign') + + if (newEditor && newErrorlogs) { + return ( + + ) + } + return (
    {(rawContent || formattedContent || showAiErrorAssistant) && ( diff --git a/services/web/frontend/js/features/pdf-preview/components/pdf-logs-viewer.tsx b/services/web/frontend/js/features/pdf-preview/components/pdf-logs-viewer.tsx index ec834432fe..f9fbcae42a 100644 --- a/services/web/frontend/js/features/pdf-preview/components/pdf-logs-viewer.tsx +++ b/services/web/frontend/js/features/pdf-preview/components/pdf-logs-viewer.tsx @@ -21,7 +21,6 @@ function PdfLogsViewer({ alwaysVisible = false }: { alwaysVisible?: boolean }) { const { codeCheckFailed, error, - hasShortCompileTimeout, logEntries, rawLog, validationIssues, @@ -32,6 +31,8 @@ function PdfLogsViewer({ alwaysVisible = false }: { alwaysVisible?: boolean }) { const { loadingError } = usePdfPreviewContext() + const { compileTimeout } = getMeta('ol-compileSettings') + const { t } = useTranslation() const [ @@ -58,7 +59,7 @@ function PdfLogsViewer({ alwaysVisible = false }: { alwaysVisible?: boolean }) { {loadingError && } - {hasShortCompileTimeout && error === 'timedout' ? ( + {compileTimeout < 60 && error === 'timedout' ? ( isCompileTimeoutPaywallDisplay ? ( - startCompile()} - />, - ]} - /> -
    -
    - , - ]} - /> - - } - level="warning" - /> + includeWarnings && ( + + startCompile()} + />, + ]} + /> +
    +
    + , + ]} + /> + + } + level="warning" + /> + ) ) case 'rendering-error': return ( - - {t('something_went_wrong_rendering_pdf')} -   - , - ]} - /> - {getMeta('ol-compilesUserContentDomain') && ( - <> -
    -
    - , - /* eslint-disable-next-line jsx-a11y/anchor-has-content */ -
    , - ]} - /> - - )} - + includeErrors && ( + + {t('something_went_wrong_rendering_pdf')} +   + , + ]} + /> + {getMeta('ol-compilesUserContentDomain') && ( + <> +
    +
    + , + /* eslint-disable-next-line jsx-a11y/anchor-has-content */ +
    , + ]} + /> + + )} + + ) ) case 'clsi-maintenance': return ( - - {t('clsi_maintenance')} - + includeErrors && ( + + {t('clsi_maintenance')} + + ) ) case 'clsi-unavailable': return ( - - {t('clsi_unavailable')} - + includeErrors && ( + + {t('clsi_unavailable')} + + ) ) case 'too-recently-compiled': return ( - - {t('too_recently_compiled')} - + includeErrors && ( + + {t('too_recently_compiled')} + + ) ) case 'terminated': return ( - - {t('compile_terminated_by_user')} - + includeErrors && ( + + {t('compile_terminated_by_user')} + + ) ) case 'rate-limited': return ( - - {t('project_flagged_too_many_compiles')} - + includeErrors && ( + + {t('project_flagged_too_many_compiles')} + + ) ) case 'compile-in-progress': return ( - - {t('pdf_compile_try_again')} - + includeErrors && ( + + {t('pdf_compile_try_again')} + + ) ) case 'autocompile-disabled': return ( - - {t('autocompile_disabled_reason')} - + includeErrors && ( + + {t('autocompile_disabled_reason')} + + ) ) case 'project-too-large': return ( - - {t('project_too_much_editable_text')} - + includeErrors && ( + + {t('project_too_much_editable_text')} + + ) ) case 'timedout': - return + return includeErrors && case 'failure': return ( - - {t('no_pdf_error_explanation')} + includeErrors && ( + + {t('no_pdf_error_explanation')} -
      -
    • {t('no_pdf_error_reason_unrecoverable_error')}
    • -
    • - }} - /> -
    • -
    • - }} - /> -
    • -
    -
    +
      +
    • {t('no_pdf_error_reason_unrecoverable_error')}
    • +
    • + }} + /> +
    • +
    • + }} + /> +
    • +
    +
    + ) ) case 'clear-cache': return ( - - {t('somthing_went_wrong_compiling')} - + includeErrors && ( + + {t('somthing_went_wrong_compiling')} + + ) ) case 'pdf-viewer-loading-error': return ( - - , - // eslint-disable-next-line jsx-a11y/anchor-has-content -
    , - // eslint-disable-next-line jsx-a11y/anchor-has-content - , - ]} - /> - + includeErrors && ( + + , + // eslint-disable-next-line jsx-a11y/anchor-has-content + , + // eslint-disable-next-line jsx-a11y/anchor-has-content + , + ]} + /> + + ) ) case 'validation-problems': @@ -207,9 +241,11 @@ function PdfPreviewError({ error }: { error: string }) { case 'error': default: return ( - - {t('somthing_went_wrong_compiling')} - + includeErrors && ( + + {t('somthing_went_wrong_compiling')} + + ) ) } } diff --git a/services/web/frontend/js/features/pdf-preview/components/pdf-preview-pane.tsx b/services/web/frontend/js/features/pdf-preview/components/pdf-preview-pane.tsx index 7bbecbc327..e063c20c76 100644 --- a/services/web/frontend/js/features/pdf-preview/components/pdf-preview-pane.tsx +++ b/services/web/frontend/js/features/pdf-preview/components/pdf-preview-pane.tsx @@ -12,9 +12,12 @@ import PdfPreviewHybridToolbarNew from '@/features/ide-redesign/components/pdf-p import PdfErrorState from '@/features/ide-redesign/components/pdf-preview/pdf-error-state' import { useIsNewEditorEnabled } from '@/features/ide-redesign/utils/new-editor-utils' import importOverleafModules from '../../../../macros/import-overleaf-module.macro' +import PdfCodeCheckFailedBanner from '@/features/ide-redesign/components/pdf-preview/pdf-code-check-failed-banner' +import getMeta from '@/utils/meta' function PdfPreviewPane() { - const { pdfUrl, hasShortCompileTimeout } = useCompileContext() + const { pdfUrl } = useCompileContext() + const { compileTimeout } = getMeta('ol-compileSettings') const classes = classNames('pdf', 'full-size', { 'pdf-empty': !pdfUrl, }) @@ -32,8 +35,9 @@ function PdfPreviewPane() { ) : ( )} + {newEditor && } - {hasShortCompileTimeout && } + {compileTimeout < 60 && } }>
    diff --git a/services/web/frontend/js/features/pdf-preview/components/timeout-message-after-paywall-dismissal.tsx b/services/web/frontend/js/features/pdf-preview/components/timeout-message-after-paywall-dismissal.tsx index db6140085f..64ef0fbfc1 100644 --- a/services/web/frontend/js/features/pdf-preview/components/timeout-message-after-paywall-dismissal.tsx +++ b/services/web/frontend/js/features/pdf-preview/components/timeout-message-after-paywall-dismissal.tsx @@ -1,39 +1,30 @@ import getMeta from '@/utils/meta' import { Trans, useTranslation } from 'react-i18next' -import { memo, useCallback, useEffect } from 'react' +import { memo, useMemo } from 'react' import { useDetachCompileContext } from '@/shared/context/detach-compile-context' import StartFreeTrialButton from '@/shared/components/start-free-trial-button' import MaterialIcon from '@/shared/components/material-icon' -import { useStopOnFirstError } from '@/shared/hooks/use-stop-on-first-error' import * as eventTracking from '@/infrastructure/event-tracking' import PdfLogEntry from './pdf-log-entry' -function TimeoutMessageAfterPaywallDismissal() { - const { - startCompile, - lastCompileOptions, - setAnimateCompileDropdownArrow, - isProjectOwner, - } = useDetachCompileContext() - - const { enableStopOnFirstError } = useStopOnFirstError({ - eventSource: 'timeout-new', - }) - - const handleEnableStopOnFirstErrorClick = useCallback(() => { - enableStopOnFirstError() - startCompile({ stopOnFirstError: true }) - setAnimateCompileDropdownArrow(true) - }, [enableStopOnFirstError, startCompile, setAnimateCompileDropdownArrow]) +type TimeoutMessageProps = { + segmentation?: eventTracking.Segmentation +} +function TimeoutMessageAfterPaywallDismissal({ + segmentation, +}: TimeoutMessageProps) { + const { lastCompileOptions, isProjectOwner } = useDetachCompileContext() return (
    - + {getMeta('ol-ExposedSettings').enableSubscriptions && ( )}
    @@ -42,26 +33,22 @@ function TimeoutMessageAfterPaywallDismissal() { type CompileTimeoutProps = { isProjectOwner: boolean + segmentation?: eventTracking.Segmentation } const CompileTimeout = memo(function CompileTimeout({ isProjectOwner, + segmentation, }: CompileTimeoutProps) { const { t } = useTranslation() - useEffect(() => { - eventTracking.sendMB('paywall-prompt', { - 'paywall-type': 'compile-timeout', + const eventSegmentation = useMemo( + () => ({ + ...segmentation, 'paywall-version': 'secondary', - }) - }, []) - - function onPaywallClick() { - eventTracking.sendMB('paywall-click', { - 'paywall-type': 'compile-timeout', - 'paywall-version': 'secondary', - }) - } + }), + [segmentation] + ) return ( {t('try_for_free')} @@ -124,22 +111,50 @@ const CompileTimeout = memo(function CompileTimeout({ type PreventTimeoutHelpMessageProps = { lastCompileOptions: any - handleEnableStopOnFirstErrorClick: () => void - isProjectOwner: boolean + segmentation?: eventTracking.Segmentation } const PreventTimeoutHelpMessage = memo(function PreventTimeoutHelpMessage({ lastCompileOptions, - handleEnableStopOnFirstErrorClick, - isProjectOwner, + segmentation, }: PreventTimeoutHelpMessageProps) { const { t } = useTranslation() + function sendInfoClickEvent() { + eventTracking.sendMB('paywall-info-click', { + 'paywall-type': 'compile-timeout', + content: 'blog', + ...segmentation, + }) + } + + const compileTimeoutChangesBlogLink = ( + /* eslint-disable-next-line jsx-a11y/anchor-has-content, react/jsx-key */ +
    + ) + return ( + {segmentation?.['10s-timeout-warning'] === 'enabled' && ( +

    + + + +

    + )} +

    {t('common_causes_of_compile_timeouts_include')}:

    -
    +
    Digital Science
    diff --git a/services/web/frontend/js/features/project-list/components/sidebar/tags-list.tsx b/services/web/frontend/js/features/project-list/components/sidebar/tags-list.tsx index 452b003b2b..1c6298603c 100644 --- a/services/web/frontend/js/features/project-list/components/sidebar/tags-list.tsx +++ b/services/web/frontend/js/features/project-list/components/sidebar/tags-list.tsx @@ -57,6 +57,7 @@ export default function TagsList() {
    + +
    + ) + + cy.findByRole('button', { name: btnText }).as('button') + cy.get('@button').trigger('mouseover') + cy.findByText(description) + cy.get('@button').trigger('mouseout') + cy.get('@button').focus() + cy.findByText(description) + cy.get('body').type('{esc}') + cy.findByText(description).should('not.exist') + }) }) diff --git a/services/web/test/frontend/features/chat/components/chat-pane.test.jsx b/services/web/test/frontend/features/chat/components/chat-pane.test.jsx index 5a59b9b19f..f990a8c6ce 100644 --- a/services/web/test/frontend/features/chat/components/chat-pane.test.jsx +++ b/services/web/test/frontend/features/chat/components/chat-pane.test.jsx @@ -7,10 +7,7 @@ import { import fetchMock from 'fetch-mock' import ChatPane from '../../../../../frontend/js/features/chat/components/chat-pane' -import { - cleanUpContext, - renderWithEditorContext, -} from '../../../helpers/render-with-context' +import { renderWithEditorContext } from '../../../helpers/render-with-context' import { stubMathJax, tearDownMathJaxStubs } from './stubs' describe('', function () { @@ -22,7 +19,6 @@ describe('', function () { beforeEach(function () { window.metaAttributesCache.set('ol-user', user) - window.metaAttributesCache.set('ol-chatEnabled', true) window.metaAttributesCache.set('ol-preventCompileOnLoad', true) }) @@ -47,8 +43,6 @@ describe('', function () { beforeEach(function () { fetchMock.removeRoutes().clearHistory() - cleanUpContext() - stubMathJax() }) diff --git a/services/web/test/frontend/features/chat/context/chat-context.test.jsx b/services/web/test/frontend/features/chat/context/chat-context.test.jsx index ddb69d3025..a930ba3a9c 100644 --- a/services/web/test/frontend/features/chat/context/chat-context.test.jsx +++ b/services/web/test/frontend/features/chat/context/chat-context.test.jsx @@ -9,7 +9,6 @@ import { useChatContext, chatClientIdGenerator, } from '@/features/chat/context/chat-context' -import { cleanUpContext } from '../../../helpers/render-with-context' import { stubMathJax, tearDownMathJaxStubs } from '../components/stubs' import { SocketIOMock } from '@/ide/connection/SocketIoShim' import { EditorProviders } from '../../../helpers/editor-providers' @@ -24,12 +23,10 @@ describe('ChatContext', function () { beforeEach(function () { fetchMock.removeRoutes().clearHistory() - cleanUpContext() stubMathJax() window.metaAttributesCache.set('ol-user', user) - window.metaAttributesCache.set('ol-chatEnabled', true) window.metaAttributesCache.set('ol-preventCompileOnLoad', true) this.stub = sinon.stub(chatClientIdGenerator, 'generate').returns(uuidValue) diff --git a/services/web/test/frontend/features/dictionary/components/dictionary-modal-content.spec.jsx b/services/web/test/frontend/features/dictionary/components/dictionary-modal-content.spec.jsx index c28eef66ef..c8cdd931b3 100644 --- a/services/web/test/frontend/features/dictionary/components/dictionary-modal-content.spec.jsx +++ b/services/web/test/frontend/features/dictionary/components/dictionary-modal-content.spec.jsx @@ -19,7 +19,7 @@ describe('', function () { }) it('list words', function () { - cy.then(win => { + cy.then(() => { learnedWords.global = new Set(['foo', 'bar']) }) @@ -34,7 +34,7 @@ describe('', function () { }) it('shows message when empty', function () { - cy.then(win => { + cy.then(() => { learnedWords.global = new Set([]) }) @@ -50,7 +50,7 @@ describe('', function () { it('removes words', function () { cy.intercept('/spelling/unlearn', { statusCode: 200 }) - cy.then(win => { + cy.then(() => { learnedWords.global = new Set(['Foo', 'bar']) }) @@ -76,7 +76,7 @@ describe('', function () { it('handles errors', function () { cy.intercept('/spelling/unlearn', { statusCode: 500 }).as('unlearn') - cy.then(win => { + cy.then(() => { learnedWords.global = new Set(['foo']) }) diff --git a/services/web/test/frontend/features/editor-navigation-toolbar/components/toolbar-header.test.jsx b/services/web/test/frontend/features/editor-navigation-toolbar/components/toolbar-header.test.jsx index 84b1e680ef..be7894fc73 100644 --- a/services/web/test/frontend/features/editor-navigation-toolbar/components/toolbar-header.test.jsx +++ b/services/web/test/frontend/features/editor-navigation-toolbar/components/toolbar-header.test.jsx @@ -27,7 +27,6 @@ describe('', function () { } beforeEach(function () { - window.metaAttributesCache.set('ol-chatEnabled', true) window.metaAttributesCache.set('ol-preventCompileOnLoad', true) }) diff --git a/services/web/test/frontend/features/group-management/components/members-table/dropdown-button.spec.tsx b/services/web/test/frontend/features/group-management/components/members-table/dropdown-button.spec.tsx index 9213069699..93d24865b2 100644 --- a/services/web/test/frontend/features/group-management/components/members-table/dropdown-button.spec.tsx +++ b/services/web/test/frontend/features/group-management/components/members-table/dropdown-button.spec.tsx @@ -24,6 +24,7 @@ function mountDropDownComponent(user: User, subscriptionId: string) { { win.metaAttributesCache.set('ol-users', [user]) + win.metaAttributesCache.set('ol-isUserGroupManager', true) }) mountDropDownComponent(user, subscriptionId) }) @@ -189,6 +191,7 @@ describe('DropdownButton', function () { cy.findByRole('button', { name: /actions/i }).click() cy.findByTestId('delete-user-action').should('be.visible') + cy.findByTestId('release-user-action') cy.findByTestId('remove-user-action').should('not.exist') cy.findByTestId('resend-managed-user-invite-action').should('not.exist') @@ -566,6 +569,7 @@ describe('DropdownButton', function () { cy.findByTestId('unlink-user-action').should('be.visible') cy.findByTestId('delete-user-action').should('not.exist') + cy.findByTestId('release-user-action').should('not.exist') cy.findByTestId('resend-sso-link-invite-action').should('not.exist') cy.findByTestId('no-actions-available').should('not.exist') }) @@ -608,6 +612,7 @@ describe('DropdownButton', function () { ) cy.findByTestId('remove-user-action').should('be.visible') cy.findByTestId('delete-user-action').should('not.exist') + cy.findByTestId('release-user-action').should('not.exist') cy.findByTestId('resend-sso-link-invite-action').should('exist') cy.findByTestId('no-actions-available').should('not.exist') @@ -633,6 +638,7 @@ describe('DropdownButton', function () { beforeEach(function () { cy.window().then(win => { win.metaAttributesCache.set('ol-users', [user]) + win.metaAttributesCache.set('ol-isUserGroupManager', true) }) mountDropDownComponent(user, subscriptionId) }) @@ -648,6 +654,7 @@ describe('DropdownButton', function () { cy.findByRole('button', { name: /actions/i }).click() cy.findByTestId('delete-user-action').should('be.visible') + cy.findByTestId('release-user-action') cy.findByTestId('remove-user-action').should('not.exist') cy.findByTestId('resend-managed-user-invite-action').should('not.exist') @@ -682,6 +689,7 @@ describe('DropdownButton', function () { beforeEach(function () { cy.window().then(win => { win.metaAttributesCache.set('ol-users', [user]) + win.metaAttributesCache.set('ol-isUserGroupManager', true) }) mountDropDownComponent(user, subscriptionId) }) @@ -697,6 +705,7 @@ describe('DropdownButton', function () { cy.findByRole('button', { name: /actions/i }).click() cy.findByTestId('delete-user-action').should('be.visible') + cy.findByTestId('release-user-action') cy.findByTestId('remove-user-action').should('not.exist') cy.findByTestId('resend-managed-user-invite-action').should('not.exist') @@ -745,6 +754,7 @@ describe('DropdownButton', function () { cy.findByTestId('resend-managed-user-invite-action').should('not.exist') cy.findByTestId('remove-user-action').should('not.exist') cy.findByTestId('delete-user-action').should('not.exist') + cy.findByTestId('release-user-action').should('not.exist') cy.findByTestId('no-actions-available').should('not.exist') }) }) @@ -793,6 +803,7 @@ describe('DropdownButton', function () { cy.findByTestId('no-actions-available').should('not.exist') cy.findByTestId('delete-user-action').should('not.exist') + cy.findByTestId('release-user-action').should('not.exist') cy.findByTestId('remove-user-action').should('not.exist') cy.findByTestId('resend-managed-user-invite-action').should('not.exist') cy.findByTestId('resend-sso-link-invite-action').should('not.exist') diff --git a/services/web/test/frontend/features/group-management/components/members-table/member-row.spec.tsx b/services/web/test/frontend/features/group-management/components/members-table/member-row.spec.tsx index 538e3036b8..0ae6ee8d04 100644 --- a/services/web/test/frontend/features/group-management/components/members-table/member-row.spec.tsx +++ b/services/web/test/frontend/features/group-management/components/members-table/member-row.spec.tsx @@ -30,6 +30,7 @@ describe('MemberRow', function () { + ) + }) + + it('should render the modal', function () { + cy.findByTestId('release-user-form') + }) + + it('should render content', function () { + cy.findByText( + `You’re about to remove ${user.first_name} ${user.last_name} (${user.email}). Doing this will mean:` + ) + cy.findAllByRole('listitem') + .eq(0) + .contains(/they will be removed from the group/i) + cy.findAllByRole('listitem') + .eq(1) + .contains(/they will no longer be a managed user/i) + cy.findAllByRole('listitem') + .eq(2) + .contains( + /they will retain their existing account on the .* free plan/i + ) + cy.findAllByRole('listitem') + .eq(3) + .contains( + /they will retain ownership of projects currently owned by them and any collaborators on those projects will become read-only/i + ) + cy.findAllByRole('listitem') + .eq(4) + .contains( + /they will continue to have access to any projects shared with them/i + ) + cy.findAllByRole('listitem') + .eq(5) + .contains( + /they won’t be able to log in with SSO \(if you have this enabled\)\. they will need to set an .* password/i + ) + cy.contains( + /in cases where a user has left your organization and you need to transfer their projects, the delete user option should be used/i + ) + }) + + it('should disable the remove button if the email does not match the user', function () { + // Button should be disabled initially + cy.findByRole('button', { name: /remove user/i }).should('be.disabled') + + // Fill in the email input, with the wrong email address + cy.findByLabelText( + /to confirm you want to remove .* please type the email address associated with their account/i + ).type('totally.wrong@example.com') + + // Button still disabled + cy.findByRole('button', { name: /remove user/i }).should('be.disabled') + }) + + it('should fill out the form, and enable the remove button', function () { + // Button should be disabled initially + cy.findByRole('button', { name: /remove user/i }).should('be.disabled') + + // Fill in the email input + cy.findByLabelText( + /to confirm you want to remove .* please type the email address associated with their account/i + ).type(user.email) + + // Button should be enabled now + cy.findByRole('button', { name: /remove user/i }).should('be.enabled') + }) + }) +}) diff --git a/services/web/test/frontend/features/history/components/change-list.spec.tsx b/services/web/test/frontend/features/history/components/change-list.spec.tsx index b3a1071015..763845db54 100644 --- a/services/web/test/frontend/features/history/components/change-list.spec.tsx +++ b/services/web/test/frontend/features/history/components/change-list.spec.tsx @@ -372,7 +372,7 @@ describe('change list (Bootstrap 5)', function () { cy.findAllByTestId('history-version-details') .eq(1) .within(() => { - cy.get('[aria-label="Compare"]').click() + cy.findByRole('button', { name: /compare/i }).click() cy.findByRole('menu').within(() => { cy.findByRole('menuitem', { name: /compare up to this version/i, diff --git a/services/web/test/frontend/features/ide-react/unit/share-js-history-ot-type.ts b/services/web/test/frontend/features/ide-react/unit/share-js-history-ot-type.ts new file mode 100644 index 0000000000..8418c59ed0 --- /dev/null +++ b/services/web/test/frontend/features/ide-react/unit/share-js-history-ot-type.ts @@ -0,0 +1,134 @@ +import { expect } from 'chai' +import { + StringFileData, + TextOperation, + AddCommentOperation, + Range, +} from 'overleaf-editor-core' +import { historyOTType } from '@/features/ide-react/editor/share-js-history-ot-type' + +describe('historyOTType', function () { + let snapshot: StringFileData + let opsA: TextOperation[] + let opsB: TextOperation[] + + beforeEach(function () { + snapshot = new StringFileData('one plus two equals three') + + // After opsA: "seven plus five equals twelve" + opsA = [new TextOperation(), new TextOperation(), new TextOperation()] + + opsA[0].remove(3) + opsA[0].insert('seven') + opsA[0].retain(22) + + opsA[1].retain(11) + opsA[1].remove(3) + opsA[1].insert('five') + opsA[1].retain(13) + + opsA[2].retain(23) + opsA[2].remove(5) + opsA[2].insert('twelve') + + // After ops2: "one times two equals two" + opsB = [new TextOperation(), new TextOperation()] + + opsB[0].retain(4) + opsB[0].remove(4) + opsB[0].insert('times') + opsB[0].retain(17) + + opsB[1].retain(21) + opsB[1].remove(5) + opsB[1].insert('two') + }) + + describe('apply', function () { + it('supports an empty operations array', function () { + const result = historyOTType.apply(snapshot, []) + expect(result.getContent()).to.equal('one plus two equals three') + }) + + it('applies operations to the snapshot (opsA)', function () { + const result = historyOTType.apply(snapshot, opsA) + expect(result.getContent()).to.equal('seven plus five equals twelve') + }) + + it('applies operations to the snapshot (opsB)', function () { + const result = historyOTType.apply(snapshot, opsB) + expect(result.getContent()).to.equal('one times two equals two') + }) + }) + + describe('compose', function () { + it('supports empty operations', function () { + const ops = historyOTType.compose([], []) + expect(ops).to.deep.equal([]) + }) + + it('supports an empty operation on the left', function () { + const ops = historyOTType.compose([], opsA) + const result = historyOTType.apply(snapshot, ops) + expect(result.getContent()).to.equal('seven plus five equals twelve') + }) + + it('supports an empty operation on the right', function () { + const ops = historyOTType.compose(opsA, []) + const result = historyOTType.apply(snapshot, ops) + expect(result.getContent()).to.equal('seven plus five equals twelve') + }) + + it('supports operations on both sides', function () { + const ops = historyOTType.compose(opsA.slice(0, 2), opsA.slice(2)) + const result = historyOTType.apply(snapshot, ops) + expect(ops.length).to.equal(1) + expect(result.getContent()).to.equal('seven plus five equals twelve') + }) + + it("supports operations that can't be composed", function () { + const comment = new AddCommentOperation('comment-id', [new Range(3, 10)]) + const ops = historyOTType.compose(opsA.slice(0, 2), [ + comment, + ...opsA.slice(2), + ]) + expect(ops.length).to.equal(3) + const result = historyOTType.apply(snapshot, ops) + expect(result.getContent()).to.equal('seven plus five equals twelve') + }) + }) + + describe('transformX', function () { + it('supports empty operations', function () { + const [aPrime, bPrime] = historyOTType.transformX([], []) + expect(aPrime).to.deep.equal([]) + expect(bPrime).to.deep.equal([]) + }) + + it('supports an empty operation on the left', function () { + const [aPrime, bPrime] = historyOTType.transformX([], opsB) + expect(aPrime).to.deep.equal([]) + expect(bPrime).to.deep.equal(opsB) + }) + + it('supports an empty operation on the right', function () { + const [aPrime, bPrime] = historyOTType.transformX(opsA, []) + expect(aPrime).to.deep.equal(opsA) + expect(bPrime).to.deep.equal([]) + }) + + it('supports operations on both sides (a then b)', function () { + const [, bPrime] = historyOTType.transformX(opsA, opsB) + const ops = historyOTType.compose(opsA, bPrime) + const result = historyOTType.apply(snapshot, ops) + expect(result.getContent()).to.equal('seven times five equals twelvetwo') + }) + + it('supports operations on both sides (b then a)', function () { + const [aPrime] = historyOTType.transformX(opsA, opsB) + const ops = historyOTType.compose(opsB, aPrime) + const result = historyOTType.apply(snapshot, ops) + expect(result.getContent()).to.equal('seven times five equals twelvetwo') + }) + }) +}) diff --git a/services/web/test/frontend/features/project-list/components/notifications.test.tsx b/services/web/test/frontend/features/project-list/components/notifications.test.tsx index 78c732ebe3..9a845283d7 100644 --- a/services/web/test/frontend/features/project-list/components/notifications.test.tsx +++ b/services/web/test/frontend/features/project-list/components/notifications.test.tsx @@ -441,7 +441,7 @@ describe('', function () { ), ]) window.metaAttributesCache.set( - 'ol-hasIndividualRecurlySubscription', + 'ol-hasIndividualPaidSubscription', true ) diff --git a/services/web/test/frontend/features/settings/components/emails/add-email-input.test.tsx b/services/web/test/frontend/features/settings/components/emails/add-email-input.test.tsx index 50220152c6..694a13f32c 100644 --- a/services/web/test/frontend/features/settings/components/emails/add-email-input.test.tsx +++ b/services/web/test/frontend/features/settings/components/emails/add-email-input.test.tsx @@ -13,7 +13,7 @@ const testInstitutionData = [ describe('', function () { const defaultProps = { - onChange: (value: string) => {}, + onChange: () => {}, handleAddNewEmail: () => {}, } diff --git a/services/web/test/frontend/features/share-project-modal/components/share-project-modal.test.jsx b/services/web/test/frontend/features/share-project-modal/components/share-project-modal.test.jsx index 88f3482c4b..b86207fb0f 100644 --- a/services/web/test/frontend/features/share-project-modal/components/share-project-modal.test.jsx +++ b/services/web/test/frontend/features/share-project-modal/components/share-project-modal.test.jsx @@ -5,10 +5,7 @@ import fetchMock from 'fetch-mock' import userEvent from '@testing-library/user-event' import ShareProjectModal from '../../../../../frontend/js/features/share-project-modal/components/share-project-modal' -import { - renderWithEditorContext, - cleanUpContext, -} from '../../../helpers/render-with-context' +import { renderWithEditorContext } from '../../../helpers/render-with-context' import { EditorProviders, USER_EMAIL, @@ -100,7 +97,6 @@ describe('', function () { afterEach(function () { this.locationWrapperSandbox.restore() fetchMock.removeRoutes().clearHistory() - cleanUpContext() }) it('renders the modal', async function () { @@ -617,7 +613,7 @@ describe('', function () { fetchMock.post( 'express:/project/:projectId/invite', - ({ args: [url, req] }) => { + ({ args: [, req] }) => { const data = JSON.parse(req.body) if (data.email === 'a@b.c') { diff --git a/services/web/test/frontend/features/source-editor/helpers/mock-doc.ts b/services/web/test/frontend/features/source-editor/helpers/mock-doc.ts index 4c239c1f60..a4944c1e97 100644 --- a/services/web/test/frontend/features/source-editor/helpers/mock-doc.ts +++ b/services/web/test/frontend/features/source-editor/helpers/mock-doc.ts @@ -1,4 +1,4 @@ -import { ShareDoc } from '../../../../../types/share-doc' +import { ShareLatexOTShareDoc } from '../../../../../types/share-doc' import { EventEmitter } from 'events' export const docId = 'test-doc' @@ -36,6 +36,9 @@ const defaultContent = mockDocContent(contentLines.join('\n')) const MAX_DOC_LENGTH = 2 * 1024 * 1024 // ol-maxDocLength class MockShareDoc extends EventEmitter { + otType = 'sharejs-text-ot' as const + snapshot = '' + constructor(public text: string) { super() } @@ -51,16 +54,21 @@ class MockShareDoc extends EventEmitter { del() { // do nothing } + + submitOp() { + // do nothing + } } export const mockDoc = ( content = defaultContent, { rangesOptions = {} } = {} ) => { - const mockShareJSDoc: ShareDoc = new MockShareDoc(content) + const mockShareJSDoc: ShareLatexOTShareDoc = new MockShareDoc(content) return { doc_id: docId, + getType: () => 'sharejs-text-ot', getSnapshot: () => { return content }, @@ -98,10 +106,11 @@ export const mockDoc = ( removeCommentId: () => {}, ...rangesOptions, }, + // eslint-disable-next-line @typescript-eslint/no-unused-vars submitOp: (op: any) => {}, setTrackChangesIdSeeds: () => {}, getTrackingChanges: () => true, - setTrackingChanges: () => {}, + setTrackChangesUserId: () => {}, getInflightOp: () => null, getPendingOp: () => null, hasBufferedOps: () => false, diff --git a/services/web/test/frontend/features/subscription/components/dashboard/personal-subscription.test.tsx b/services/web/test/frontend/features/subscription/components/dashboard/personal-subscription.test.tsx index 8edc881caa..a61c9fca7f 100644 --- a/services/web/test/frontend/features/subscription/components/dashboard/personal-subscription.test.tsx +++ b/services/web/test/frontend/features/subscription/components/dashboard/personal-subscription.test.tsx @@ -190,7 +190,9 @@ describe('', function () { }) it('shows different payment email address section', async function () { - fetchMock.post('/user/subscription/account/email', 200) + fetchMock.post('/user/subscription/account/email', { + status: 200, + }) const usersEmail = 'foo@example.com' renderWithSubscriptionDashContext(, { metaTags: [ diff --git a/services/web/test/frontend/features/subscription/components/group-invite/group-invite.test.tsx b/services/web/test/frontend/features/subscription/components/group-invite/group-invite.test.tsx index cc70eff90d..d7b769fd20 100644 --- a/services/web/test/frontend/features/subscription/components/group-invite/group-invite.test.tsx +++ b/services/web/test/frontend/features/subscription/components/group-invite/group-invite.test.tsx @@ -18,10 +18,7 @@ describe('group invite', function () { describe('when user has personal subscription', function () { beforeEach(function () { - window.metaAttributesCache.set( - 'ol-hasIndividualRecurlySubscription', - true - ) + window.metaAttributesCache.set('ol-hasIndividualPaidSubscription', true) }) it('renders cancel personal subscription view', async function () { @@ -55,10 +52,7 @@ describe('group invite', function () { describe('when user does not have a personal subscription', function () { beforeEach(function () { - window.metaAttributesCache.set( - 'ol-hasIndividualRecurlySubscription', - false - ) + window.metaAttributesCache.set('ol-hasIndividualPaidSubscription', false) window.metaAttributesCache.set('ol-inviteToken', 'token123') }) diff --git a/services/web/test/frontend/features/subscription/fixtures/subscriptions.ts b/services/web/test/frontend/features/subscription/fixtures/subscriptions.ts index 08690742d3..8011c5206d 100644 --- a/services/web/test/frontend/features/subscription/fixtures/subscriptions.ts +++ b/services/web/test/frontend/features/subscription/fixtures/subscriptions.ts @@ -25,7 +25,6 @@ export const annualActiveSubscription: PaidSubscription = { admin_id: 'abc123', teamInvites: [], planCode: 'collaborator-annual', - recurlySubscription_id: 'ghi789', plan: { planCode: 'collaborator-annual', name: 'Standard (Collaborator) Annual', @@ -68,7 +67,6 @@ export const annualActiveSubscriptionEuro: PaidSubscription = { admin_id: 'abc123', teamInvites: [], planCode: 'collaborator-annual', - recurlySubscription_id: 'ghi789', plan: { planCode: 'collaborator-annual', name: 'Standard (Collaborator) Annual', @@ -111,7 +109,6 @@ export const annualActiveSubscriptionPro: PaidSubscription = { admin_id: 'abc123', teamInvites: [], planCode: 'professional', - recurlySubscription_id: 'ghi789', plan: { planCode: 'professional', name: 'Professional', @@ -153,7 +150,6 @@ export const pastDueExpiredSubscription: PaidSubscription = { admin_id: 'abc123', teamInvites: [], planCode: 'collaborator-annual', - recurlySubscription_id: 'ghi789', plan: { planCode: 'collaborator-annual', name: 'Standard (Collaborator) Annual', @@ -196,7 +192,6 @@ export const canceledSubscription: PaidSubscription = { admin_id: 'abc123', teamInvites: [], planCode: 'collaborator-annual', - recurlySubscription_id: 'ghi789', plan: { planCode: 'collaborator-annual', name: 'Standard (Collaborator) Annual', @@ -239,7 +234,6 @@ export const pendingSubscriptionChange: PaidSubscription = { admin_id: 'abc123', teamInvites: [], planCode: 'collaborator-annual', - recurlySubscription_id: 'ghi789', plan: { planCode: 'collaborator-annual', name: 'Standard (Collaborator) Annual', @@ -290,7 +284,6 @@ export const groupActiveSubscription: GroupSubscription = { admin_id: 'abc123', teamInvites: [], planCode: 'group_collaborator_10_enterprise', - recurlySubscription_id: 'ghi789', plan: { planCode: 'group_collaborator_10_enterprise', name: 'Overleaf Standard (Collaborator) - Group Account (10 licenses) - Enterprise', @@ -338,7 +331,6 @@ export const groupActiveSubscriptionWithPendingLicenseChange: GroupSubscription admin_id: 'abc123', teamInvites: [], planCode: 'group_collaborator_10_enterprise', - recurlySubscription_id: 'ghi789', plan: { planCode: 'group_collaborator_10_enterprise', name: 'Overleaf Standard (Collaborator) - Group Account (10 licenses) - Enterprise', @@ -396,7 +388,6 @@ export const trialSubscription: PaidSubscription = { admin_id: 'abc123', teamInvites: [], planCode: 'paid-personal_free_trial_7_days', - recurlySubscription_id: 'ghi789', plan: { planCode: 'paid-personal_free_trial_7_days', name: 'Personal', @@ -439,7 +430,6 @@ export const customSubscription: CustomSubscription = { admin_id: 'abc123', teamInvites: [], planCode: 'collaborator-annual', - recurlySubscription_id: 'ghi789', plan: { planCode: 'collaborator-annual', name: 'Standard (Collaborator) Annual', @@ -460,7 +450,6 @@ export const trialCollaboratorSubscription: PaidSubscription = { admin_id: 'abc123', teamInvites: [], planCode: 'collaborator_free_trial_7_days', - recurlySubscription_id: 'ghi789', plan: { planCode: 'collaborator_free_trial_7_days', name: 'Standard (Collaborator)', @@ -503,7 +492,6 @@ export const monthlyActiveCollaborator: PaidSubscription = { admin_id: 'abc123', teamInvites: [], planCode: 'collaborator', - recurlySubscription_id: 'ghi789', plan: { planCode: 'collaborator', name: 'Standard (Collaborator)', diff --git a/services/web/test/frontend/features/word-count-modal/components/word-count-modal.spec.tsx b/services/web/test/frontend/features/word-count-modal/components/word-count-modal.spec.tsx index 2ed1dc9448..9fc5887535 100644 --- a/services/web/test/frontend/features/word-count-modal/components/word-count-modal.spec.tsx +++ b/services/web/test/frontend/features/word-count-modal/components/word-count-modal.spec.tsx @@ -12,7 +12,7 @@ describe('', function () { }) cy.mount( - + ) @@ -30,7 +30,7 @@ describe('', function () { }) cy.mount( - + ) @@ -48,7 +48,7 @@ describe('', function () { }) cy.mount( - + ) @@ -64,7 +64,7 @@ describe('', function () { }) cy.mount( - + ) @@ -87,7 +87,7 @@ describe('', function () { }) cy.mount( - + ) diff --git a/services/web/test/frontend/helpers/editor-providers.jsx b/services/web/test/frontend/helpers/editor-providers.jsx index a6bc9c32c6..1fe143a8e3 100644 --- a/services/web/test/frontend/helpers/editor-providers.jsx +++ b/services/web/test/frontend/helpers/editor-providers.jsx @@ -1,7 +1,6 @@ // Disable prop type checks for test harnesses /* eslint-disable react/prop-types */ -import sinon from 'sinon' -import { get, merge } from 'lodash' +import { merge } from 'lodash' import { SocketIOMock } from '@/ide/connection/SocketIoShim' import { IdeContext } from '@/shared/context/ide-context' import React, { useEffect, useState } from 'react' @@ -48,8 +47,7 @@ export function EditorProviders({ compiler = 'pdflatex', socket = new SocketIOMock(), isRestrictedTokenMember = false, - clsiServerId = '1234', - scope = {}, + scope: defaultScope = {}, features = { referencesSearch: true, }, @@ -71,18 +69,6 @@ export function EditorProviders({ }, ], ui = { view: 'editor', pdfLayout: 'sideBySide', chatOpen: true }, - fileTreeManager = { - findEntityById: () => null, - findEntityByPath: () => null, - getEntityPath: () => '', - getRootDocDirname: () => '', - getPreviewByPath: path => ({ url: path, extension: 'png' }), - }, - editorManager = { - getCurrentDocumentId: () => 'foo', - getCurrentDocValue: () => {}, - openDoc: sinon.stub(), - }, userSettings = {}, providers = {}, }) { @@ -99,7 +85,9 @@ export function EditorProviders({ merge({}, defaultUserSettings, userSettings) ) - const $scope = merge( + window.metaAttributesCache.set('ol-capabilities', ['chat', 'dropbox']) + + const scope = merge( { user, editor: { @@ -117,31 +105,17 @@ export function EditorProviders({ name: PROJECT_NAME, owner: projectOwner, features: projectFeatures, - rootDoc_id: rootDocId, + rootDocId, rootFolder, imageName, compiler, }, ui, - $watch: (path, callback) => { - callback(get($scope, path)) - return () => null - }, - $on: sinon.stub(), - $applyAsync: sinon.stub(), permissionsLevel, }, - scope + defaultScope ) - window._ide = { - $scope, - socket, - clsiServerId, - editorManager, - fileTreeManager, - } - // Add details for useUserContext window.metaAttributesCache.set('ol-user', { ...user, features }) window.metaAttributesCache.set('ol-project_id', projectId) @@ -149,8 +123,8 @@ export function EditorProviders({ return ( @@ -159,79 +133,85 @@ export function EditorProviders({ ) } -const ConnectionProvider = ({ children }) => { - const [value] = useState(() => ({ - socket: window._ide.socket, - connectionState: { - readyState: WebSocket.OPEN, - forceDisconnected: false, - inactiveDisconnect: false, - reconnectAt: null, - forcedDisconnectDelay: 0, - lastConnectionAttempt: 0, - error: '', - }, - isConnected: true, - isStillReconnecting: false, - secondsUntilReconnect: () => 0, - tryReconnectNow: () => {}, - registerUserActivity: () => {}, - disconnect: () => {}, - })) - - return ( - - {children} - - ) -} - -const IdeReactProvider = ({ children }) => { - const [startedFreeTrial, setStartedFreeTrial] = useState(false) - - const [ideReactContextValue] = useState(() => ({ - projectId: PROJECT_ID, - eventEmitter: new IdeEventEmitter(), - startedFreeTrial, - setStartedFreeTrial, - reportError: () => {}, - projectJoined: true, - })) - - const [ideContextValue] = useState(() => { - const ide = window._ide - - const scopeStore = createReactScopeValueStore(PROJECT_ID) - for (const [key, value] of Object.entries(ide.$scope)) { - // TODO: path for nested entries - scopeStore.set(key, value) - } - scopeStore.set('editor.sharejs_doc', ide.$scope.editor.sharejs_doc) - scopeStore.set('ui.chatOpen', ide.$scope.ui.chatOpen) - const scopeEventEmitter = new ReactScopeEventEmitter(new IdeEventEmitter()) - - return { - ...ide, - scopeStore, - scopeEventEmitter, - } - }) - - useEffect(() => { - window.overleaf = { - ...window.overleaf, - unstable: { - ...window.overleaf?.unstable, - store: ideContextValue.scopeStore, +const makeConnectionProvider = socket => { + const ConnectionProvider = ({ children }) => { + const [value] = useState(() => ({ + socket, + connectionState: { + readyState: WebSocket.OPEN, + forceDisconnected: false, + inactiveDisconnect: false, + reconnectAt: null, + forcedDisconnectDelay: 0, + lastConnectionAttempt: 0, + error: '', }, - } - }, [ideContextValue.scopeStore]) + isConnected: true, + isStillReconnecting: false, + secondsUntilReconnect: () => 0, + tryReconnectNow: () => {}, + registerUserActivity: () => {}, + disconnect: () => {}, + })) - return ( - - + return ( + {children} - - - ) + + ) + } + return ConnectionProvider +} + +const makeIdeReactProvider = (scope, socket) => { + const IdeReactProvider = ({ children }) => { + const [startedFreeTrial, setStartedFreeTrial] = useState(false) + + const [ideReactContextValue] = useState(() => ({ + projectId: PROJECT_ID, + eventEmitter: new IdeEventEmitter(), + startedFreeTrial, + setStartedFreeTrial, + reportError: () => {}, + projectJoined: true, + })) + + const [ideContextValue] = useState(() => { + const scopeStore = createReactScopeValueStore(PROJECT_ID) + for (const [key, value] of Object.entries(scope)) { + // TODO: path for nested entries + scopeStore.set(key, value) + } + scopeStore.set('editor.sharejs_doc', scope.editor.sharejs_doc) + scopeStore.set('ui.chatOpen', scope.ui.chatOpen) + const scopeEventEmitter = new ReactScopeEventEmitter( + new IdeEventEmitter() + ) + + return { + socket, + scopeStore, + scopeEventEmitter, + } + }) + + useEffect(() => { + window.overleaf = { + ...window.overleaf, + unstable: { + ...window.overleaf?.unstable, + store: ideContextValue.scopeStore, + }, + } + }, [ideContextValue.scopeStore]) + + return ( + + + {children} + + + ) + } + return IdeReactProvider } diff --git a/services/web/test/frontend/helpers/render-with-context.jsx b/services/web/test/frontend/helpers/render-with-context.jsx index e3aba6264d..31ee64d5be 100644 --- a/services/web/test/frontend/helpers/render-with-context.jsx +++ b/services/web/test/frontend/helpers/render-with-context.jsx @@ -18,7 +18,3 @@ export function renderWithEditorContext( ...renderOptions, }) } - -export function cleanUpContext() { - delete window._ide -} diff --git a/services/web/test/frontend/helpers/reset-meta.ts b/services/web/test/frontend/helpers/reset-meta.ts index f5a979828a..e59e62342d 100644 --- a/services/web/test/frontend/helpers/reset-meta.ts +++ b/services/web/test/frontend/helpers/reset-meta.ts @@ -2,6 +2,7 @@ export function resetMeta() { window.metaAttributesCache = new Map() window.metaAttributesCache.set('ol-projectHistoryBlobsEnabled', true) window.metaAttributesCache.set('ol-i18n', { currentLangCode: 'en' }) + window.metaAttributesCache.set('ol-capabilities', ['chat', 'dropbox']) window.metaAttributesCache.set('ol-ExposedSettings', { appName: 'Overleaf', maxEntitiesPerProject: 10, diff --git a/services/web/test/frontend/ide/log-parser/logParserTests.js b/services/web/test/frontend/ide/log-parser/logParserTests.js index 098ee056b9..59cdd5d22e 100644 --- a/services/web/test/frontend/ide/log-parser/logParserTests.js +++ b/services/web/test/frontend/ide/log-parser/logParserTests.js @@ -6,7 +6,7 @@ const fixturePath = '../../helpers/fixtures/logs/' const fs = require('fs') const path = require('path') -describe('logParser', function (done) { +describe('logParser', function () { it('should parse errors', function () { const { errors } = parseLatexLog('errors.log', { ignoreDuplicates: true }) expect(errors.map(e => [e.line, e.message])).to.deep.equal([ diff --git a/services/web/test/unit/bootstrap.js b/services/web/test/unit/bootstrap.js index f3d3f382f2..00bcc3e958 100644 --- a/services/web/test/unit/bootstrap.js +++ b/services/web/test/unit/bootstrap.js @@ -1,7 +1,24 @@ const Path = require('path') const sinon = require('sinon') require('./common_bootstrap') +const chai = require('chai') +/* + * Chai configuration + */ + +// add chai.should() +chai.should() + +// Load sinon-chai assertions so expect(stubFn).to.have.been.calledWith('abc') +// has a nicer failure messages +chai.use(require('sinon-chai')) + +// Load promise support for chai +chai.use(require('chai-as-promised')) + +// Do not truncate assertion errors +chai.config.truncateThreshold = 0 /* * Global stubs */ diff --git a/services/web/test/unit/common_bootstrap.js b/services/web/test/unit/common_bootstrap.js index d74fee60b2..a77aad61c6 100644 --- a/services/web/test/unit/common_bootstrap.js +++ b/services/web/test/unit/common_bootstrap.js @@ -1,22 +1,3 @@ -const chai = require('chai') - -/* - * Chai configuration - */ - -// add chai.should() -chai.should() - -// Load sinon-chai assertions so expect(stubFn).to.have.been.calledWith('abc') -// has a nicer failure messages -chai.use(require('sinon-chai')) - -// Load promise support for chai -chai.use(require('chai-as-promised')) - -// Do not truncate assertion errors -chai.config.truncateThreshold = 0 - // add support for mongoose in sinon require('sinon-mongoose') diff --git a/services/web/test/unit/src/Analytics/AnalyticsUTMTrackingMiddleware.test.mjs b/services/web/test/unit/src/Analytics/AnalyticsUTMTrackingMiddleware.test.mjs index fff5224b48..463407b180 100644 --- a/services/web/test/unit/src/Analytics/AnalyticsUTMTrackingMiddleware.test.mjs +++ b/services/web/test/unit/src/Analytics/AnalyticsUTMTrackingMiddleware.test.mjs @@ -1,8 +1,7 @@ -import { vi } from 'vitest' +import { assert, vi } from 'vitest' import sinon from 'sinon' import MockRequest from '../helpers/MockRequest.js' import MockResponse from '../helpers/MockResponse.js' -import { assert } from 'chai' const MODULE_PATH = new URL( '../../../../app/src/Features/Analytics/AnalyticsUTMTrackingMiddleware', diff --git a/services/web/test/unit/src/Authorization/AuthorizationManagerTests.js b/services/web/test/unit/src/Authorization/AuthorizationManagerTests.js index 7463bbdeb7..e4c67d2f77 100644 --- a/services/web/test/unit/src/Authorization/AuthorizationManagerTests.js +++ b/services/web/test/unit/src/Authorization/AuthorizationManagerTests.js @@ -27,7 +27,10 @@ describe('AuthorizationManager', function () { this.CollaboratorsGetter = { promises: { - getMemberIdPrivilegeLevel: sinon.stub().resolves(PrivilegeLevels.NONE), + getProjectAccess: sinon.stub().resolves({ + publicAccessLevel: sinon.stub().returns(PublicAccessLevels.PRIVATE), + privilegeLevelForUser: sinon.stub().returns(PrivilegeLevels.NONE), + }), }, } @@ -113,9 +116,17 @@ describe('AuthorizationManager', function () { describe('with a user id with a privilege level', function () { beforeEach(async function () { - this.CollaboratorsGetter.promises.getMemberIdPrivilegeLevel - .withArgs(this.user._id, this.project._id) - .resolves(PrivilegeLevels.READ_ONLY) + this.CollaboratorsGetter.promises.getProjectAccess + .withArgs(this.project._id) + .resolves({ + publicAccessLevel: sinon + .stub() + .returns(PublicAccessLevels.PRIVATE), + privilegeLevelForUser: sinon + .stub() + .withArgs(this.user._id) + .returns(PrivilegeLevels.READ_ONLY), + }) this.result = await this.AuthorizationManager.promises.getPrivilegeLevelForProject( this.user._id, @@ -171,8 +182,8 @@ describe('AuthorizationManager', function () { ) }) - it('should not call CollaboratorsGetter.getMemberIdPrivilegeLevel', function () { - this.CollaboratorsGetter.promises.getMemberIdPrivilegeLevel.called.should.equal( + it('should not call CollaboratorsGetter.getProjectAccess', function () { + this.CollaboratorsGetter.promises.getProjectAccess.called.should.equal( false ) }) @@ -204,8 +215,8 @@ describe('AuthorizationManager', function () { ) }) - it('should not call CollaboratorsGetter.getMemberIdPrivilegeLevel', function () { - this.CollaboratorsGetter.promises.getMemberIdPrivilegeLevel.called.should.equal( + it('should not call CollaboratorsGetter.getProjectAccess', function () { + this.CollaboratorsGetter.promises.getProjectAccess.called.should.equal( false ) }) @@ -237,8 +248,8 @@ describe('AuthorizationManager', function () { ) }) - it('should not call CollaboratorsGetter.getMemberIdPrivilegeLevel', function () { - this.CollaboratorsGetter.promises.getMemberIdPrivilegeLevel.called.should.equal( + it('should not call CollaboratorsGetter.getProjectAccess', function () { + this.CollaboratorsGetter.promises.getProjectAccess.called.should.equal( false ) }) @@ -264,9 +275,17 @@ describe('AuthorizationManager', function () { describe('with a user id with a privilege level', function () { beforeEach(async function () { - this.CollaboratorsGetter.promises.getMemberIdPrivilegeLevel - .withArgs(this.user._id, this.project._id) - .resolves(PrivilegeLevels.READ_ONLY) + this.CollaboratorsGetter.promises.getProjectAccess + .withArgs(this.project._id) + .resolves({ + publicAccessLevel: sinon + .stub() + .returns(PublicAccessLevels.PRIVATE), + privilegeLevelForUser: sinon + .stub() + .withArgs(this.user._id) + .returns(PrivilegeLevels.READ_ONLY), + }) this.result = await this.AuthorizationManager.promises.getPrivilegeLevelForProject( this.user._id, @@ -321,8 +340,8 @@ describe('AuthorizationManager', function () { ) }) - it('should not call CollaboratorsGetter.getMemberIdPrivilegeLevel', function () { - this.CollaboratorsGetter.promises.getMemberIdPrivilegeLevel.called.should.equal( + it('should not call CollaboratorsGetter.getProjectAccess', function () { + this.CollaboratorsGetter.promises.getProjectAccess.called.should.equal( false ) }) @@ -336,13 +355,32 @@ describe('AuthorizationManager', function () { describe('with a public project', function () { beforeEach(function () { this.project.publicAccesLevel = 'readAndWrite' + this.CollaboratorsGetter.promises.getProjectAccess + .withArgs(this.project._id) + .resolves({ + publicAccessLevel: sinon + .stub() + .returns(this.project.publicAccesLevel), + privilegeLevelForUser: sinon + .stub() + .withArgs(this.user._id) + .returns(PrivilegeLevels.NONE), + }) }) describe('with a user id with a privilege level', function () { beforeEach(async function () { - this.CollaboratorsGetter.promises.getMemberIdPrivilegeLevel - .withArgs(this.user._id, this.project._id) - .resolves(PrivilegeLevels.READ_ONLY) + this.CollaboratorsGetter.promises.getProjectAccess + .withArgs(this.project._id) + .resolves({ + publicAccessLevel: sinon + .stub() + .returns(this.project.publicAccesLevel), + privilegeLevelForUser: sinon + .stub() + .withArgs(this.user._id) + .returns(PrivilegeLevels.READ_ONLY), + }) this.result = await this.AuthorizationManager.promises.getPrivilegeLevelForProject( this.user._id, @@ -397,8 +435,8 @@ describe('AuthorizationManager', function () { ) }) - it('should not call CollaboratorsGetter.getMemberIdPrivilegeLevel', function () { - this.CollaboratorsGetter.promises.getMemberIdPrivilegeLevel.called.should.equal( + it('should not call CollaboratorsGetter.getProjectAccess', function () { + this.CollaboratorsGetter.promises.getProjectAccess.called.should.equal( false ) }) @@ -410,6 +448,11 @@ describe('AuthorizationManager', function () { }) describe("when the project doesn't exist", function () { + beforeEach(function () { + this.CollaboratorsGetter.promises.getProjectAccess.rejects( + new Errors.NotFoundError() + ) + }) it('should return a NotFoundError', async function () { const someOtherId = new ObjectId() await expect( @@ -424,9 +467,15 @@ describe('AuthorizationManager', function () { describe('when the project id is not valid', function () { beforeEach(function () { - this.CollaboratorsGetter.promises.getMemberIdPrivilegeLevel - .withArgs(this.user._id, this.project._id) - .resolves(PrivilegeLevels.READ_ONLY) + this.CollaboratorsGetter.promises.getProjectAccess + .withArgs(this.project._id) + .resolves({ + publicAccessLevel: sinon.stub().returns(PublicAccessLevels.PRIVATE), + privilegeLevelForUser: sinon + .stub() + .withArgs(this.user._id) + .returns(PrivilegeLevels.READ_ONLY), + }) }) it('should return a error', async function () { @@ -529,9 +578,15 @@ describe('AuthorizationManager', function () { describe('canUserDeleteOrResolveThread', function () { it('should return true when user has write permissions', async function () { - this.CollaboratorsGetter.promises.getMemberIdPrivilegeLevel - .withArgs(this.user._id, this.project._id) - .resolves(PrivilegeLevels.READ_AND_WRITE) + this.CollaboratorsGetter.promises.getProjectAccess + .withArgs(this.project._id) + .resolves({ + publicAccessLevel: sinon.stub().returns(PublicAccessLevels.PRIVATE), + privilegeLevelForUser: sinon + .stub() + .withArgs(this.user._id) + .returns(PrivilegeLevels.READ_AND_WRITE), + }) const canResolve = await this.AuthorizationManager.promises.canUserDeleteOrResolveThread( @@ -546,9 +601,15 @@ describe('AuthorizationManager', function () { }) it('should return false when user has read permission', async function () { - this.CollaboratorsGetter.promises.getMemberIdPrivilegeLevel - .withArgs(this.user._id, this.project._id) - .resolves(PrivilegeLevels.READ_ONLY) + this.CollaboratorsGetter.promises.getProjectAccess + .withArgs(this.project._id) + .resolves({ + publicAccessLevel: sinon.stub().returns(PublicAccessLevels.PRIVATE), + privilegeLevelForUser: sinon + .stub() + .withArgs(this.user._id) + .returns(PrivilegeLevels.READ_ONLY), + }) const canResolve = await this.AuthorizationManager.promises.canUserDeleteOrResolveThread( @@ -564,9 +625,15 @@ describe('AuthorizationManager', function () { describe('when user has review permission', function () { beforeEach(function () { - this.CollaboratorsGetter.promises.getMemberIdPrivilegeLevel - .withArgs(this.user._id, this.project._id) - .resolves(PrivilegeLevels.REVIEW) + this.CollaboratorsGetter.promises.getProjectAccess + .withArgs(this.project._id) + .resolves({ + publicAccessLevel: sinon.stub().returns(PublicAccessLevels.PRIVATE), + privilegeLevelForUser: sinon + .stub() + .withArgs(this.user._id) + .returns(PrivilegeLevels.REVIEW), + }) }) it('should return false when user is not the comment author', async function () { @@ -691,15 +758,27 @@ function testPermission(permission, privilegeLevels) { function setupUserPrivilegeLevel(privilegeLevel) { beforeEach(`set user privilege level to ${privilegeLevel}`, function () { - this.CollaboratorsGetter.promises.getMemberIdPrivilegeLevel - .withArgs(this.user._id, this.project._id) - .resolves(privilegeLevel) + this.CollaboratorsGetter.promises.getProjectAccess + .withArgs(this.project._id) + .resolves({ + publicAccessLevel: sinon.stub().returns(PublicAccessLevels.PRIVATE), + privilegeLevelForUser: sinon + .stub() + .withArgs(this.user._id) + .returns(privilegeLevel), + }) }) } function setupPublicAccessLevel(level) { beforeEach(`set public access level to ${level}`, function () { this.project.publicAccesLevel = level + this.CollaboratorsGetter.promises.getProjectAccess + .withArgs(this.project._id) + .resolves({ + publicAccessLevel: sinon.stub().returns(this.project.publicAccesLevel), + privilegeLevelForUser: sinon.stub().returns(PrivilegeLevels.NONE), + }) }) } diff --git a/services/web/test/unit/src/BetaProgram/BetaProgramController.test.mjs b/services/web/test/unit/src/BetaProgram/BetaProgramController.test.mjs index e2160cca08..23dd4dc1c8 100644 --- a/services/web/test/unit/src/BetaProgram/BetaProgramController.test.mjs +++ b/services/web/test/unit/src/BetaProgram/BetaProgramController.test.mjs @@ -1,7 +1,6 @@ -import { vi } from 'vitest' +import { expect, vi } from 'vitest' import path from 'node:path' import sinon from 'sinon' -import { expect } from 'chai' import MockResponse from '../helpers/MockResponse.js' import { fileURLToPath } from 'node:url' diff --git a/services/web/test/unit/src/BetaProgram/BetaProgramHandler.test.mjs b/services/web/test/unit/src/BetaProgram/BetaProgramHandler.test.mjs index 14438a8ed7..4034835666 100644 --- a/services/web/test/unit/src/BetaProgram/BetaProgramHandler.test.mjs +++ b/services/web/test/unit/src/BetaProgram/BetaProgramHandler.test.mjs @@ -1,8 +1,7 @@ -import { vi } from 'vitest' +import { expect, vi } from 'vitest' import path from 'node:path' import sinon from 'sinon' -import { expect } from 'chai' import { fileURLToPath } from 'node:url' const __dirname = fileURLToPath(new URL('.', import.meta.url)) diff --git a/services/web/test/unit/src/Chat/ChatManagerTests.js b/services/web/test/unit/src/Chat/ChatManagerTests.js index bdd3042513..5578b8b167 100644 --- a/services/web/test/unit/src/Chat/ChatManagerTests.js +++ b/services/web/test/unit/src/Chat/ChatManagerTests.js @@ -12,7 +12,7 @@ describe('ChatManager', function () { this.user_id = 'mock-user-id' this.ChatManager = SandboxedModule.require(modulePath, { requires: { - '../User/UserInfoManager': (this.UserInfoManager = {}), + '../User/UserGetter': (this.UserGetter = { promises: {} }), '../User/UserInfoController': (this.UserInfoController = {}), }, }) @@ -32,18 +32,22 @@ describe('ChatManager', function () { beforeEach(function () { this.users = { user_id_1: { - mock: 'user_1', + _id: 'user_id_1', }, user_id_2: { - mock: 'user_2', + _id: 'user_id_2', }, } - this.UserInfoManager.getPersonalInfo = (userId, callback) => { - return callback(null, this.users[userId]) - } - sinon.spy(this.UserInfoManager, 'getPersonalInfo') + this.UserGetter.promises.getUsers = userIds => + Promise.resolve( + Array.from(userIds) + .map(id => this.users[id]) + .filter(u => !!u) + ) + + sinon.spy(this.UserGetter.promises, 'getUsers') return (this.UserInfoController.formatPersonalInfo = user => ({ - formatted: user.mock, + formatted: { id: user._id.toString() }, })) }) @@ -79,16 +83,16 @@ describe('ChatManager', function () { thread1: { resolved: true, resolved_by_user_id: 'user_id_1', - resolved_by_user: { formatted: 'user_1' }, + resolved_by_user: { formatted: { id: 'user_id_1' } }, messages: [ { user_id: 'user_id_1', - user: { formatted: 'user_1' }, + user: { formatted: { id: 'user_id_1' } }, content: 'foo', }, { user_id: 'user_id_2', - user: { formatted: 'user_2' }, + user: { formatted: { id: 'user_id_2' } }, content: 'bar', }, ], @@ -97,7 +101,7 @@ describe('ChatManager', function () { messages: [ { user_id: 'user_id_1', - user: { formatted: 'user_1' }, + user: { formatted: { id: 'user_id_1' } }, content: 'baz', }, ], @@ -105,7 +109,7 @@ describe('ChatManager', function () { }) }) - it('should only need to look up each user once', async function () { + it('should lookup all users in a single batch', async function () { await this.ChatManager.promises.injectUserInfoIntoThreads([ { messages: [ @@ -121,7 +125,7 @@ describe('ChatManager', function () { }, ]) - this.UserInfoManager.getPersonalInfo.calledOnce.should.equal(true) + this.UserGetter.promises.getUsers.should.have.been.calledOnce }) }) }) diff --git a/services/web/test/unit/src/Collaborators/CollaboratorsController.test.mjs b/services/web/test/unit/src/Collaborators/CollaboratorsController.test.mjs index 9bb9c4b3c0..1d8345a195 100644 --- a/services/web/test/unit/src/Collaborators/CollaboratorsController.test.mjs +++ b/services/web/test/unit/src/Collaborators/CollaboratorsController.test.mjs @@ -1,6 +1,5 @@ -import { vi } from 'vitest' +import { expect, vi } from 'vitest' import sinon from 'sinon' -import { expect } from 'chai' import mongodb from 'mongodb-legacy' import Errors from '../../../../app/src/Features/Errors/Errors.js' import MockRequest from '../helpers/MockRequest.js' diff --git a/services/web/test/unit/src/Collaborators/CollaboratorsGetterTests.js b/services/web/test/unit/src/Collaborators/CollaboratorsGetterTests.js index dda99e04f3..10542c4564 100644 --- a/services/web/test/unit/src/Collaborators/CollaboratorsGetterTests.js +++ b/services/web/test/unit/src/Collaborators/CollaboratorsGetterTests.js @@ -62,7 +62,7 @@ describe('CollaboratorsGetter', function () { }, } this.ProjectEditorHandler = { - buildOwnerAndMembersViews: sinon.stub(), + buildUserModelView: sinon.stub(), } this.CollaboratorsGetter = SandboxedModule.require(MODULE_PATH, { requires: { @@ -204,30 +204,6 @@ describe('CollaboratorsGetter', function () { }) }) - describe('getInvitedMembersWithPrivilegeLevels', function () { - beforeEach(function () { - this.UserGetter.promises.getUsers.resolves([ - { _id: this.readOnlyRef1 }, - { _id: this.readOnlyTokenRef }, - { _id: this.readWriteRef2 }, - { _id: this.readWriteTokenRef }, - { _id: this.reviewer1Ref }, - ]) - }) - - it('should return an array of invited members with their privilege levels', async function () { - const result = - await this.CollaboratorsGetter.promises.getInvitedMembersWithPrivilegeLevels( - this.project._id - ) - expect(result).to.have.deep.members([ - { user: { _id: this.readOnlyRef1 }, privilegeLevel: 'readOnly' }, - { user: { _id: this.readWriteRef2 }, privilegeLevel: 'readAndWrite' }, - { user: { _id: this.reviewer1Ref }, privilegeLevel: 'review' }, - ]) - }) - }) - describe('getMemberIdPrivilegeLevel', function () { it('should return the privilege level if it exists', async function () { const level = @@ -401,20 +377,21 @@ describe('CollaboratorsGetter', function () { { user: this.readWriteUser, privilegeLevel: 'readAndWrite' }, { user: this.reviewUser, privilegeLevel: 'review' }, ] - this.views = { - owner: this.owningUser, - ownerFeatures: this.owningUser.features, - members: [ - { _id: this.readWriteUser._id, email: this.readWriteUser.email }, - { _id: this.reviewUser._id, email: this.reviewUser.email }, - ], - } + this.memberViews = [ + { _id: this.readWriteUser._id, email: this.readWriteUser.email }, + { _id: this.reviewUser._id, email: this.reviewUser.email }, + ] this.UserGetter.promises.getUsers.resolves([ this.owningUser, this.readWriteUser, this.reviewUser, ]) - this.ProjectEditorHandler.buildOwnerAndMembersViews.returns(this.views) + this.ProjectEditorHandler.buildUserModelView + .withArgs(this.members[1]) + .returns(this.memberViews[0]) + this.ProjectEditorHandler.buildUserModelView + .withArgs(this.members[2]) + .returns(this.memberViews[1]) this.result = await this.CollaboratorsGetter.promises.getAllInvitedMembers( this.project._id @@ -422,15 +399,18 @@ describe('CollaboratorsGetter', function () { }) it('should produce a list of members', function () { - expect(this.result).to.deep.equal(this.views.members) + expect(this.result).to.deep.equal(this.memberViews) }) - it('should call ProjectEditorHandler.buildOwnerAndMembersViews', function () { - expect(this.ProjectEditorHandler.buildOwnerAndMembersViews).to.have.been - .calledOnce + it('should call ProjectEditorHandler.buildUserModelView', function () { + expect(this.ProjectEditorHandler.buildUserModelView).to.have.been + .calledTwice expect( - this.ProjectEditorHandler.buildOwnerAndMembersViews - ).to.have.been.calledWith(this.members) + this.ProjectEditorHandler.buildUserModelView + ).to.have.been.calledWith(this.members[1]) + expect( + this.ProjectEditorHandler.buildUserModelView + ).to.have.been.calledWith(this.members[2]) }) }) diff --git a/services/web/test/unit/src/Collaborators/CollaboratorsHandlerTests.js b/services/web/test/unit/src/Collaborators/CollaboratorsHandlerTests.js index 8542bd8355..73fb699772 100644 --- a/services/web/test/unit/src/Collaborators/CollaboratorsHandlerTests.js +++ b/services/web/test/unit/src/Collaborators/CollaboratorsHandlerTests.js @@ -447,6 +447,40 @@ describe('CollaboratorsHandler', function () { }) }) + describe('when user already exists as a reviewer', function () { + beforeEach(function () { + this.project.collaberator_refs = [] + this.project.reviewer_refs = [this.userId] + this.project.readOnly_refs = [] + }) + + it('should not add the user again', async function () { + await this.CollaboratorsHandler.promises.addUserIdToProject( + this.project._id, + this.addingUserId, + this.userId, + 'readAndWrite' + ) + }) + }) + + describe('when user already exists as a read-only user', function () { + beforeEach(function () { + this.project.collaberator_refs = [] + this.project.reviewer_refs = [] + this.project.readOnly_refs = [this.userId] + }) + + it('should not add the user again', async function () { + await this.CollaboratorsHandler.promises.addUserIdToProject( + this.project._id, + this.addingUserId, + this.userId, + 'readAndWrite' + ) + }) + }) + describe('with null addingUserId', function () { beforeEach(async function () { this.project.collaberator_refs = [] diff --git a/services/web/test/unit/src/Collaborators/CollaboratorsInviteController.test.mjs b/services/web/test/unit/src/Collaborators/CollaboratorsInviteController.test.mjs index d948e69ed4..edac9c6c92 100644 --- a/services/web/test/unit/src/Collaborators/CollaboratorsInviteController.test.mjs +++ b/services/web/test/unit/src/Collaborators/CollaboratorsInviteController.test.mjs @@ -1,6 +1,5 @@ -import { vi } from 'vitest' +import { expect, vi } from 'vitest' import sinon from 'sinon' -import { expect } from 'chai' import MockRequest from '../helpers/MockRequest.js' import MockResponse from '../helpers/MockResponse.js' import mongodb from 'mongodb-legacy' diff --git a/services/web/test/unit/src/Collaborators/CollaboratorsInviteHandler.test.mjs b/services/web/test/unit/src/Collaborators/CollaboratorsInviteHandler.test.mjs index ec8f453536..5d6690d7c0 100644 --- a/services/web/test/unit/src/Collaborators/CollaboratorsInviteHandler.test.mjs +++ b/services/web/test/unit/src/Collaborators/CollaboratorsInviteHandler.test.mjs @@ -1,6 +1,5 @@ -import { vi } from 'vitest' +import { expect, vi } from 'vitest' import sinon from 'sinon' -import { expect } from 'chai' import mongodb from 'mongodb-legacy' import Crypto from 'crypto' diff --git a/services/web/test/unit/src/Contact/ContactController.test.mjs b/services/web/test/unit/src/Contact/ContactController.test.mjs index 2defc2c3a7..13f70c81f6 100644 --- a/services/web/test/unit/src/Contact/ContactController.test.mjs +++ b/services/web/test/unit/src/Contact/ContactController.test.mjs @@ -1,6 +1,5 @@ -import { vi } from 'vitest' +import { expect, vi } from 'vitest' import sinon from 'sinon' -import { expect } from 'chai' import MockResponse from '../helpers/MockResponse.js' const modulePath = '../../../../app/src/Features/Contacts/ContactController.mjs' diff --git a/services/web/test/unit/src/Cooldown/CooldownMiddleware.test.mjs b/services/web/test/unit/src/Cooldown/CooldownMiddleware.test.mjs index 2bb1ed81dd..846a54d4ce 100644 --- a/services/web/test/unit/src/Cooldown/CooldownMiddleware.test.mjs +++ b/services/web/test/unit/src/Cooldown/CooldownMiddleware.test.mjs @@ -1,6 +1,5 @@ -import { vi } from 'vitest' +import { expect, vi } from 'vitest' import sinon from 'sinon' -import { expect } from 'chai' const modulePath = new URL( '../../../../app/src/Features/Cooldown/CooldownMiddleware.mjs', import.meta.url diff --git a/services/web/test/unit/src/DocumentUpdater/DocumentUpdaterController.test.mjs b/services/web/test/unit/src/DocumentUpdater/DocumentUpdaterController.test.mjs index 095e598d39..5a60903552 100644 --- a/services/web/test/unit/src/DocumentUpdater/DocumentUpdaterController.test.mjs +++ b/services/web/test/unit/src/DocumentUpdater/DocumentUpdaterController.test.mjs @@ -1,6 +1,5 @@ -import { vi } from 'vitest' +import { expect, vi } from 'vitest' import sinon from 'sinon' -import { expect } from 'chai' import MockResponse from '../helpers/MockResponse.js' const MODULE_PATH = diff --git a/services/web/test/unit/src/Editor/EditorHttpControllerTests.js b/services/web/test/unit/src/Editor/EditorHttpControllerTests.js index dffa2d21ff..7fc08c45d3 100644 --- a/services/web/test/unit/src/Editor/EditorHttpControllerTests.js +++ b/services/web/test/unit/src/Editor/EditorHttpControllerTests.js @@ -20,6 +20,12 @@ describe('EditorHttpController', function () { _id: new ObjectId(), projects: {}, } + this.members = [ + { user: { _id: 'owner', features: {} }, privilegeLevel: 'owner' }, + { user: { _id: 'one' }, privilegeLevel: 'readOnly' }, + ] + this.ownerMember = this.members[0] + this.invites = [{ _id: 'three' }, { _id: 'four' }] this.projectView = { _id: this.project._id, owner: { @@ -27,7 +33,10 @@ describe('EditorHttpController', function () { email: 'owner@example.com', other_property: true, }, - members: [{ one: 1 }, { two: 2 }], + members: [ + { _id: 'owner', privileges: 'owner' }, + { _id: 'one', privileges: 'readOnly' }, + ], invites: [{ three: 3 }, { four: 4 }], } this.reducedProjectView = { @@ -51,14 +60,32 @@ describe('EditorHttpController', function () { this.AuthorizationManager = { isRestrictedUser: sinon.stub().returns(false), promises: { - getPrivilegeLevelForProject: sinon.stub().resolves('owner'), + getPrivilegeLevelForProjectWithProjectAccess: sinon + .stub() + .resolves('owner'), }, } + const members = this.members + const ownerMember = this.ownerMember this.CollaboratorsGetter = { + ProjectAccess: class { + loadOwnerAndInvitedMembers() { + return { members, ownerMember } + } + + loadOwner() { + return ownerMember + } + + isUserTokenMember() { + return false + } + + isUserInvitedMember() { + return false + } + }, promises: { - getInvitedMembersWithPrivilegeLevels: sinon - .stub() - .resolves(['members', 'mock']), isUserInvitedMemberOfProject: sinon.stub().resolves(false), }, } @@ -67,22 +94,23 @@ describe('EditorHttpController', function () { userIsTokenMember: sinon.stub().resolves(false), }, } + this.invites = [ + { + _id: 'invite_one', + email: 'user-one@example.com', + privileges: 'readOnly', + projectId: this.project._id, + }, + { + _id: 'invite_two', + email: 'user-two@example.com', + privileges: 'readOnly', + projectId: this.project._id, + }, + ] this.CollaboratorsInviteGetter = { promises: { - getAllInvites: sinon.stub().resolves([ - { - _id: 'invite_one', - email: 'user-one@example.com', - privileges: 'readOnly', - projectId: this.project._id, - }, - { - _id: 'invite_two', - email: 'user-two@example.com', - privileges: 'readOnly', - projectId: this.project._id, - }, - ]), + getAllInvites: sinon.stub().resolves(this.invites), }, } this.EditorController = { @@ -170,13 +198,28 @@ describe('EditorHttpController', function () { describe('successfully', function () { beforeEach(function (done) { - this.CollaboratorsGetter.promises.isUserInvitedMemberOfProject.resolves( - true - ) + sinon + .stub( + this.CollaboratorsGetter.ProjectAccess.prototype, + 'isUserInvitedMember' + ) + .returns(true) this.res.callback = done this.EditorHttpController.joinProject(this.req, this.res) }) + it('should request a full view', function () { + expect( + this.ProjectEditorHandler.buildProjectModelView + ).to.have.been.calledWith( + this.project, + this.ownerMember, + this.members, + this.invites, + false + ) + }) + it('should return the project and privilege level', function () { expect(this.res.json).to.have.been.calledWith({ project: this.projectView, @@ -213,14 +256,23 @@ describe('EditorHttpController', function () { describe('with a restricted user', function () { beforeEach(function (done) { + this.ProjectEditorHandler.buildProjectModelView.returns( + this.reducedProjectView + ) this.AuthorizationManager.isRestrictedUser.returns(true) - this.AuthorizationManager.promises.getPrivilegeLevelForProject.resolves( + this.AuthorizationManager.promises.getPrivilegeLevelForProjectWithProjectAccess.resolves( 'readOnly' ) this.res.callback = done this.EditorHttpController.joinProject(this.req, this.res) }) + it('should request a restricted view', function () { + expect( + this.ProjectEditorHandler.buildProjectModelView + ).to.have.been.calledWith(this.project, this.ownerMember, [], [], true) + }) + it('should mark the user as restricted, and hide details of owner', function () { expect(this.res.json).to.have.been.calledWith({ project: this.reducedProjectView, @@ -234,7 +286,7 @@ describe('EditorHttpController', function () { describe('when not authorized', function () { beforeEach(function (done) { - this.AuthorizationManager.promises.getPrivilegeLevelForProject.resolves( + this.AuthorizationManager.promises.getPrivilegeLevelForProjectWithProjectAccess.resolves( null ) this.res.callback = done @@ -250,6 +302,9 @@ describe('EditorHttpController', function () { beforeEach(function (done) { this.token = 'token' this.TokenAccessHandler.getRequestToken.returns(this.token) + this.ProjectEditorHandler.buildProjectModelView.returns( + this.reducedProjectView + ) this.req.body = { userId: 'anonymous-user', anonymousAccessToken: this.token, @@ -258,12 +313,18 @@ describe('EditorHttpController', function () { this.AuthorizationManager.isRestrictedUser .withArgs(null, 'readOnly', false, false) .returns(true) - this.AuthorizationManager.promises.getPrivilegeLevelForProject + this.AuthorizationManager.promises.getPrivilegeLevelForProjectWithProjectAccess .withArgs(null, this.project._id, this.token) .resolves('readOnly') this.EditorHttpController.joinProject(this.req, this.res) }) + it('should request a restricted view', function () { + expect( + this.ProjectEditorHandler.buildProjectModelView + ).to.have.been.calledWith(this.project, this.ownerMember, [], [], true) + }) + it('should mark the user as restricted', function () { expect(this.res.json).to.have.been.calledWith({ project: this.reducedProjectView, @@ -277,11 +338,19 @@ describe('EditorHttpController', function () { describe('with a token access user', function () { beforeEach(function (done) { - this.CollaboratorsGetter.promises.isUserInvitedMemberOfProject.resolves( - false - ) - this.CollaboratorsHandler.promises.userIsTokenMember.resolves(true) - this.AuthorizationManager.promises.getPrivilegeLevelForProject.resolves( + sinon + .stub( + this.CollaboratorsGetter.ProjectAccess.prototype, + 'isUserInvitedMember' + ) + .returns(false) + sinon + .stub( + this.CollaboratorsGetter.ProjectAccess.prototype, + 'isUserTokenMember' + ) + .returns(true) + this.AuthorizationManager.promises.getPrivilegeLevelForProjectWithProjectAccess.resolves( 'readAndWrite' ) this.res.callback = done diff --git a/services/web/test/unit/src/Email/EmailBuilderTests.js b/services/web/test/unit/src/Email/EmailBuilderTests.js index a8a0dc1ad5..8cc83f0228 100644 --- a/services/web/test/unit/src/Email/EmailBuilderTests.js +++ b/services/web/test/unit/src/Email/EmailBuilderTests.js @@ -818,6 +818,43 @@ describe('EmailBuilder', function () { }) }) }) + + describe('removeGroupMember', function () { + beforeEach(function () { + this.passwordResetUrl = `${this.settings.siteUrl}/user/password/reset` + this.emailAddress = 'example@overleaf.com' + this.opts = { + to: this.emailAddress, + adminName: 'abcdef', + } + this.email = this.EmailBuilder.buildEmail( + 'removeGroupMember', + this.opts + ) + this.dom = cheerio.load(this.email.html) + }) + + it('should build the email', function () { + expect(this.email.html).to.exist + expect(this.email.text).to.exist + }) + + describe('HTML email', function () { + it('should include links', function () { + const resetPasswordLink = this.dom('a:contains("set a password")') + expect(resetPasswordLink.length).to.equal(1) + expect(resetPasswordLink.attr('href')).to.equal( + this.passwordResetUrl + ) + }) + }) + + describe('plain text email', function () { + it('should include URLs', function () { + expect(this.email.text).to.contain(this.passwordResetUrl) + }) + }) + }) }) }) }) diff --git a/services/web/test/unit/src/Exports/ExportsController.test.mjs b/services/web/test/unit/src/Exports/ExportsController.test.mjs index af9c1483fb..cd8f4ba7a9 100644 --- a/services/web/test/unit/src/Exports/ExportsController.test.mjs +++ b/services/web/test/unit/src/Exports/ExportsController.test.mjs @@ -1,5 +1,4 @@ -import { vi } from 'vitest' -import { expect } from 'chai' +import { expect, vi } from 'vitest' import sinon from 'sinon' const modulePath = new URL( '../../../../app/src/Features/Exports/ExportsController.mjs', diff --git a/services/web/test/unit/src/Exports/ExportsHandler.test.mjs b/services/web/test/unit/src/Exports/ExportsHandler.test.mjs index 0eb8a98e26..a7944beced 100644 --- a/services/web/test/unit/src/Exports/ExportsHandler.test.mjs +++ b/services/web/test/unit/src/Exports/ExportsHandler.test.mjs @@ -1,6 +1,5 @@ -import { vi } from 'vitest' +import { expect, vi } from 'vitest' import sinon from 'sinon' -import { expect } from 'chai' const modulePath = '../../../../app/src/Features/Exports/ExportsHandler.mjs' describe('ExportsHandler', function () { diff --git a/services/web/test/unit/src/FileStore/FileStoreController.test.mjs b/services/web/test/unit/src/FileStore/FileStoreController.test.mjs index 5c46e516a0..ba0670d49c 100644 --- a/services/web/test/unit/src/FileStore/FileStoreController.test.mjs +++ b/services/web/test/unit/src/FileStore/FileStoreController.test.mjs @@ -1,5 +1,4 @@ -import { vi } from 'vitest' -import { expect } from 'chai' +import { expect, vi } from 'vitest' import sinon from 'sinon' import Errors from '../../../../app/src/Features/Errors/Errors.js' import MockResponse from '../helpers/MockResponse.js' diff --git a/services/web/test/unit/src/History/RestoreManagerTests.js b/services/web/test/unit/src/History/RestoreManagerTests.js index 2474425bfb..f76ba506ad 100644 --- a/services/web/test/unit/src/History/RestoreManagerTests.js +++ b/services/web/test/unit/src/History/RestoreManagerTests.js @@ -9,6 +9,12 @@ const tk = require('timekeeper') const moment = require('moment') const { expect } = require('chai') +function nestedMapWithSetToObject(m) { + return Object.fromEntries( + Array.from(m.entries()).map(([key, set]) => [key, Array.from(set)]) + ) +} + describe('RestoreManager', function () { beforeEach(function () { tk.freeze(Date.now()) // freeze the time for these tests @@ -28,7 +34,7 @@ describe('RestoreManager', function () { promises: { flushProjectToMongo: sinon.stub().resolves() }, }), '../Docstore/DocstoreManager': (this.DocstoreManager = { - promises: {}, + promises: { getCommentThreadIds: sinon.stub().resolves({}) }, }), '../Chat/ChatApiHandler': (this.ChatApiHandler = { promises: {} }), '../Chat/ChatManager': (this.ChatManager = { promises: {} }), @@ -260,22 +266,33 @@ describe('RestoreManager', function () { beforeEach(function () { this.pathname = 'foo.tex' this.comments = [ - { op: { t: 'comment-in-other-doc', p: 0, c: 'foo' } }, - { op: { t: 'single-comment', p: 10, c: 'bar' } }, - { op: { t: 'deleted-comment', p: 20, c: 'baz' } }, + { + id: 'comment-in-other-doc', + op: { t: 'comment-in-other-doc', p: 0, c: 'foo' }, + }, + { + id: 'single-comment', + op: { t: 'single-comment', p: 10, c: 'bar' }, + }, + { + id: 'deleted-comment', + op: { t: 'deleted-comment', p: 20, c: 'baz' }, + }, ] this.remappedComments = [ - { op: { t: 'duplicate-comment', p: 0, c: 'foo' } }, - { op: { t: 'single-comment', p: 10, c: 'bar' } }, + { + id: 'duplicate-comment', + op: { t: 'duplicate-comment', p: 0, c: 'foo' }, + }, + { + id: 'single-comment', + op: { t: 'single-comment', p: 10, c: 'bar' }, + }, ] this.ProjectLocator.promises.findElementByPath = sinon.stub().rejects() - this.DocstoreManager.promises.getAllRanges = sinon.stub().resolves([ - { - ranges: { - comments: this.comments.slice(0, 1), - }, - }, - ]) + this.DocstoreManager.promises.getCommentThreadIds = sinon + .stub() + .resolves({ 'other-doc': [this.comments[0].op.t] }) this.ChatApiHandler.promises.duplicateCommentThreads = sinon .stub() .resolves({ @@ -355,7 +372,7 @@ describe('RestoreManager', function () { expect( this.DocumentUpdaterHandler.promises.flushProjectToMongo ).to.have.been.calledBefore( - this.DocstoreManager.promises.getAllRanges + this.DocstoreManager.promises.getCommentThreadIds ) }) @@ -451,19 +468,11 @@ describe('RestoreManager', function () { ) }) - it('should delete the document before flushing', function () { - expect( - this.EditorController.promises.deleteEntity - ).to.have.been.calledBefore( - this.DocumentUpdaterHandler.promises.flushProjectToMongo - ) - }) - it('should flush the document before fetching ranges', function () { expect( this.DocumentUpdaterHandler.promises.flushProjectToMongo ).to.have.been.calledBefore( - this.DocstoreManager.promises.getAllRanges + this.DocstoreManager.promises.getCommentThreadIds ) }) @@ -499,6 +508,143 @@ describe('RestoreManager', function () { ) }) }) + + describe('with comments in same doc', function () { + // copy of the above, addition: inject and later inspect threadIds set + beforeEach(async function () { + this.ProjectLocator.promises.findElementByPath = sinon + .stub() + .resolves({ type: 'doc', element: { _id: 'mock-file-id' } }) + this.EditorController.promises.deleteEntity = sinon.stub().resolves() + this.ChatApiHandler.promises.generateThreadData = sinon + .stub() + .resolves( + (this.threadData = { + [this.comments[0].op.t]: { + messages: [ + { + content: 'message', + timestamp: '2024-01-01T00:00:00.000Z', + user_id: 'user-1', + }, + ], + }, + [this.comments[1].op.t]: { + messages: [ + { + content: 'other message', + timestamp: '2024-01-01T00:00:00.000Z', + user_id: 'user-1', + }, + ], + }, + }) + ) + + this.threadIds = new Map([ + [ + 'mock-file-id', + new Set([this.comments[0].op.t, this.comments[1].op.t]), + ], + ]) + // Comments are updated in-place. Look up threads before reverting. + this.afterThreadIds = { + // mock-file-id removed + [this.addedFile._id]: [ + this.comments[0].op.t, + this.comments[1].op.t, + ], + } + this.data = await this.RestoreManager.promises._revertSingleFile( + this.user_id, + this.project_id, + this.version, + this.pathname, + this.threadIds + ) + }) + + it('should import the file with original comments minus the deleted one', function () { + expect( + this.EditorController.promises.addDocWithRanges + ).to.have.been.calledWith( + this.project_id, + this.folder_id, + 'foo.tex', + ['foo', 'bar', 'baz'], + { + changes: this.tracked_changes, + comments: this.comments.slice(0, 2), + }, + { + kind: 'file-restore', + path: this.pathname, + version: this.version, + timestamp: new Date(this.endTs).toISOString(), + } + ) + }) + + it('should add the seen thread ids to the map', function () { + expect(nestedMapWithSetToObject(this.threadIds)).to.deep.equal( + this.afterThreadIds + ) + }) + }) + + describe('with remapped comments during revertProject', function () { + // copy of the above, addition: inject and later inspect threadIds set + beforeEach(async function () { + this.ProjectLocator.promises.findElementByPath = sinon + .stub() + .resolves({ type: 'doc', element: { _id: 'mock-file-id' } }) + this.EditorController.promises.deleteEntity = sinon.stub().resolves() + + this.threadIds = new Map([ + ['other-doc', new Set([this.comments[0].op.t])], + ]) + // Comments are updated in-place. Look up threads before reverting. + this.afterThreadIds = { + // mock-file-id removed + 'other-doc': [this.comments[0].op.t], + [this.addedFile._id]: [ + this.remappedComments[0].op.t, + this.remappedComments[1].op.t, + ], + } + this.data = await this.RestoreManager.promises._revertSingleFile( + this.user_id, + this.project_id, + this.version, + this.pathname, + this.threadIds + ) + }) + + it('should import the file', function () { + expect( + this.EditorController.promises.addDocWithRanges + ).to.have.been.calledWith( + this.project_id, + this.folder_id, + 'foo.tex', + ['foo', 'bar', 'baz'], + { changes: this.tracked_changes, comments: this.remappedComments }, + { + kind: 'file-restore', + path: this.pathname, + version: this.version, + timestamp: new Date(this.endTs).toISOString(), + } + ) + }) + + it('should add the seen thread ids to the map', function () { + expect(nestedMapWithSetToObject(this.threadIds)).to.deep.equal( + this.afterThreadIds + ) + }) + }) }) describe('reverting a file or document with metadata', function () { @@ -524,7 +670,9 @@ describe('RestoreManager', function () { .stub() .resolves((this.addedFile = { _id: 'mock-doc-id', type: 'doc' })) - this.DocstoreManager.promises.getAllRanges = sinon.stub().resolves([]) + this.DocstoreManager.promises.getCommentThreadIds = sinon + .stub() + .resolves({}) this.ChatApiHandler.promises.generateThreadData = sinon .stub() .resolves({}) @@ -741,7 +889,7 @@ describe('RestoreManager', function () { this.ProjectGetter.promises.getProject .withArgs(this.project_id) .resolves({ overleaf: { history: { rangesSupportEnabled: true } } }) - this.RestoreManager.promises.revertFile = sinon.stub().resolves() + this.RestoreManager.promises._revertSingleFile = sinon.stub().resolves() this.RestoreManager.promises._getProjectPathsAtVersion = sinon .stub() .resolves([]) @@ -832,21 +980,27 @@ describe('RestoreManager', function () { }) it('should revert the old files', function () { - expect(this.RestoreManager.promises.revertFile).to.have.been.calledWith( + expect( + this.RestoreManager.promises._revertSingleFile + ).to.have.been.calledWith( this.user_id, this.project_id, this.version, 'main.tex' ) - expect(this.RestoreManager.promises.revertFile).to.have.been.calledWith( + expect( + this.RestoreManager.promises._revertSingleFile + ).to.have.been.calledWith( this.user_id, this.project_id, this.version, 'figures/image.png' ) - expect(this.RestoreManager.promises.revertFile).to.have.been.calledWith( + expect( + this.RestoreManager.promises._revertSingleFile + ).to.have.been.calledWith( this.user_id, this.project_id, this.version, @@ -856,7 +1010,7 @@ describe('RestoreManager', function () { it('should not revert the current files', function () { expect( - this.RestoreManager.promises.revertFile + this.RestoreManager.promises._revertSingleFile ).to.not.have.been.calledWith( this.user_id, this.project_id, diff --git a/services/web/test/unit/src/LinkedFiles/LinkedFilesController.test.mjs b/services/web/test/unit/src/LinkedFiles/LinkedFilesController.test.mjs index b29d10bba4..e712d17198 100644 --- a/services/web/test/unit/src/LinkedFiles/LinkedFilesController.test.mjs +++ b/services/web/test/unit/src/LinkedFiles/LinkedFilesController.test.mjs @@ -1,5 +1,4 @@ -import { vi } from 'vitest' -import { expect } from 'chai' +import { expect, vi } from 'vitest' import sinon from 'sinon' const modulePath = '../../../../app/src/Features/LinkedFiles/LinkedFilesController.mjs' diff --git a/services/web/test/unit/src/Metadata/MetaController.test.mjs b/services/web/test/unit/src/Metadata/MetaController.test.mjs index 00b3568ae2..ee3488137a 100644 --- a/services/web/test/unit/src/Metadata/MetaController.test.mjs +++ b/services/web/test/unit/src/Metadata/MetaController.test.mjs @@ -1,5 +1,4 @@ -import { vi } from 'vitest' -import { expect } from 'chai' +import { expect, vi } from 'vitest' import sinon from 'sinon' import MockResponse from '../helpers/MockResponse.js' const modulePath = '../../../../app/src/Features/Metadata/MetaController.mjs' diff --git a/services/web/test/unit/src/Metadata/MetaHandler.test.mjs b/services/web/test/unit/src/Metadata/MetaHandler.test.mjs index c6009a2dd6..48d5cc51a4 100644 --- a/services/web/test/unit/src/Metadata/MetaHandler.test.mjs +++ b/services/web/test/unit/src/Metadata/MetaHandler.test.mjs @@ -1,5 +1,4 @@ -import { vi } from 'vitest' -import { expect } from 'chai' +import { expect, vi } from 'vitest' import sinon from 'sinon' const modulePath = '../../../../app/src/Features/Metadata/MetaHandler.mjs' diff --git a/services/web/test/unit/src/Notifications/NotificationsController.test.mjs b/services/web/test/unit/src/Notifications/NotificationsController.test.mjs index 6e1f9177c0..1bc5c51b31 100644 --- a/services/web/test/unit/src/Notifications/NotificationsController.test.mjs +++ b/services/web/test/unit/src/Notifications/NotificationsController.test.mjs @@ -14,6 +14,9 @@ describe('NotificationsController', function () { ctx.handler = { getUserNotifications: sinon.stub().callsArgWith(1), markAsRead: sinon.stub().callsArgWith(2), + promises: { + getUserNotifications: sinon.stub().callsArgWith(1), + }, } ctx.req = { params: { @@ -77,4 +80,22 @@ describe('NotificationsController', function () { }) }) }) + + it('should get a notification by notification id', function (ctx) { + return new Promise(resolve => { + const notification = { _id: notificationId, user_id: userId } + ctx.handler.getUserNotifications = sinon + .stub() + .callsArgWith(1, null, [notification]) + ctx.controller.getNotification(ctx.req, { + json: body => { + body.should.deep.equal(notification) + resolve() + }, + status: () => ({ + end: () => {}, + }), + }) + }) + }) }) diff --git a/services/web/test/unit/src/PasswordReset/PasswordResetController.test.mjs b/services/web/test/unit/src/PasswordReset/PasswordResetController.test.mjs index e4cf6e569f..05bbfdb433 100644 --- a/services/web/test/unit/src/PasswordReset/PasswordResetController.test.mjs +++ b/services/web/test/unit/src/PasswordReset/PasswordResetController.test.mjs @@ -1,6 +1,5 @@ -import { vi } from 'vitest' +import { expect, vi } from 'vitest' import sinon from 'sinon' -import { expect } from 'chai' import MockResponse from '../helpers/MockResponse.js' const MODULE_PATH = new URL( diff --git a/services/web/test/unit/src/PasswordReset/PasswordResetHandler.test.mjs b/services/web/test/unit/src/PasswordReset/PasswordResetHandler.test.mjs index 25d664b795..aab46ae2bf 100644 --- a/services/web/test/unit/src/PasswordReset/PasswordResetHandler.test.mjs +++ b/services/web/test/unit/src/PasswordReset/PasswordResetHandler.test.mjs @@ -1,6 +1,5 @@ -import { vi } from 'vitest' +import { expect, vi } from 'vitest' import sinon from 'sinon' -import { expect } from 'chai' const modulePath = new URL( '../../../../app/src/Features/PasswordReset/PasswordResetHandler', import.meta.url diff --git a/services/web/test/unit/src/Project/ProjectControllerTests.js b/services/web/test/unit/src/Project/ProjectControllerTests.js index 46427171da..0acd900b90 100644 --- a/services/web/test/unit/src/Project/ProjectControllerTests.js +++ b/services/web/test/unit/src/Project/ProjectControllerTests.js @@ -201,9 +201,6 @@ describe('ProjectController', function () { getCurrentAffiliations: sinon.stub().resolves([]), }, } - this.SubscriptionViewModelBuilder = { - getBestSubscription: sinon.stub().yields(null, { type: 'free' }), - } this.SurveyHandler = { getSurvey: sinon.stub().yields(null, {}), } @@ -303,6 +300,7 @@ describe('ProjectController', function () { translate() {}, }, ip: '192.170.18.1', + capabilitySet: new Set(['chat']), } this.res = { locals: { @@ -1088,34 +1086,12 @@ describe('ProjectController', function () { this.ProjectController.loadEditor(this.req, this.res) }) - describe('chatEnabled flag', function () { - it('should be set to false when the feature is disabled', function (done) { + describe('capabilitySet', function () { + it('should be passed as an array when loading the editor', function (done) { this.Features.hasFeature = sinon.stub().withArgs('chat').returns(false) this.res.render = (pageName, opts) => { - expect(opts.chatEnabled).to.be.false - done() - } - this.ProjectController.loadEditor(this.req, this.res) - }) - - it('should be set to false when the feature is enabled but the capability is not available', function (done) { - this.Features.hasFeature = sinon.stub().withArgs('chat').returns(false) - this.req.capabilitySet = new Set() - - this.res.render = (pageName, opts) => { - expect(opts.chatEnabled).to.be.false - done() - } - this.ProjectController.loadEditor(this.req, this.res) - }) - - it('should be set to true when the feature is enabled and the capability is available', function (done) { - this.Features.hasFeature = sinon.stub().withArgs('chat').returns(true) - this.req.capabilitySet = new Set(['chat']) - - this.res.render = (pageName, opts) => { - expect(opts.chatEnabled).to.be.true + expect(opts.capabilities).to.deep.equal(['chat']) done() } this.ProjectController.loadEditor(this.req, this.res) diff --git a/services/web/test/unit/src/Project/ProjectEditorHandlerTests.js b/services/web/test/unit/src/Project/ProjectEditorHandlerTests.js index 0fb5b5fce4..8456fe2227 100644 --- a/services/web/test/unit/src/Project/ProjectEditorHandlerTests.js +++ b/services/web/test/unit/src/Project/ProjectEditorHandlerTests.js @@ -8,6 +8,7 @@ describe('ProjectEditorHandler', function () { beforeEach(function () { this.project = { _id: 'project-id', + owner_ref: 'owner-id', name: 'Project Name', rootDoc_id: 'file-id', publicAccesLevel: 'private', @@ -43,16 +44,19 @@ describe('ProjectEditorHandler', function () { }, ], } + this.ownerMember = { + user: (this.owner = { + _id: 'owner-id', + first_name: 'Owner', + last_name: 'Overleaf', + email: 'owner@overleaf.com', + features: { + compileTimeout: 240, + }, + }), + privilegeLevel: 'owner', + } this.members = [ - { - user: (this.owner = { - _id: 'owner-id', - first_name: 'Owner', - last_name: 'Overleaf', - email: 'owner@overleaf.com', - }), - privilegeLevel: 'owner', - }, { user: { _id: 'read-only-id', @@ -96,8 +100,10 @@ describe('ProjectEditorHandler', function () { beforeEach(function () { this.result = this.handler.buildProjectModelView( this.project, + this.ownerMember, this.members, - this.invites + this.invites, + false ) }) @@ -206,6 +212,93 @@ describe('ProjectEditorHandler', function () { expect(invite.token).not.to.exist } }) + + it('should have the correct features', function () { + expect(this.result.features.compileTimeout).to.equal(240) + }) + }) + + describe('with a restricted user', function () { + beforeEach(function () { + this.result = this.handler.buildProjectModelView( + this.project, + this.ownerMember, + [], + [], + true + ) + }) + + it('should include the id', function () { + expect(this.result._id).to.exist + this.result._id.should.equal('project-id') + }) + + it('should include the name', function () { + expect(this.result.name).to.exist + this.result.name.should.equal('Project Name') + }) + + it('should include the root doc id', function () { + expect(this.result.rootDoc_id).to.exist + this.result.rootDoc_id.should.equal('file-id') + }) + + it('should include the public access level', function () { + expect(this.result.publicAccesLevel).to.exist + this.result.publicAccesLevel.should.equal('private') + }) + + it('should hide the owner', function () { + expect(this.result.owner).to.deep.equal({ _id: 'owner-id' }) + }) + + it('should hide members', function () { + this.result.members.length.should.equal(0) + }) + + it('should include folders in the project', function () { + this.result.rootFolder[0]._id.should.equal('root-folder-id') + this.result.rootFolder[0].name.should.equal('') + + this.result.rootFolder[0].folders[0]._id.should.equal('sub-folder-id') + this.result.rootFolder[0].folders[0].name.should.equal('folder') + }) + + it('should not duplicate folder contents', function () { + this.result.rootFolder[0].docs.length.should.equal(0) + this.result.rootFolder[0].fileRefs.length.should.equal(0) + }) + + it('should include files in the project', function () { + this.result.rootFolder[0].folders[0].fileRefs[0]._id.should.equal( + 'file-id' + ) + this.result.rootFolder[0].folders[0].fileRefs[0].name.should.equal( + 'image.png' + ) + this.result.rootFolder[0].folders[0].fileRefs[0].created.should.equal( + this.created + ) + expect(this.result.rootFolder[0].folders[0].fileRefs[0].size).not.to + .exist + }) + + it('should include docs in the project but not the lines', function () { + this.result.rootFolder[0].folders[0].docs[0]._id.should.equal('doc-id') + this.result.rootFolder[0].folders[0].docs[0].name.should.equal( + 'main.tex' + ) + expect(this.result.rootFolder[0].folders[0].docs[0].lines).not.to.exist + }) + + it('should hide invites', function () { + expect(this.result.invites).to.have.length(0) + }) + + it('should have the correct features', function () { + expect(this.result.features.compileTimeout).to.equal(240) + }) }) describe('deletedByExternalDataSource', function () { @@ -213,8 +306,10 @@ describe('ProjectEditorHandler', function () { delete this.project.deletedByExternalDataSource const result = this.handler.buildProjectModelView( this.project, + this.ownerMember, this.members, - [] + [], + false ) result.deletedByExternalDataSource.should.equal(false) }) @@ -222,8 +317,10 @@ describe('ProjectEditorHandler', function () { it('should set the deletedByExternalDataSource flag to false when it is false', function () { const result = this.handler.buildProjectModelView( this.project, + this.ownerMember, this.members, - [] + [], + false ) result.deletedByExternalDataSource.should.equal(false) }) @@ -232,8 +329,10 @@ describe('ProjectEditorHandler', function () { this.project.deletedByExternalDataSource = true const result = this.handler.buildProjectModelView( this.project, + this.ownerMember, this.members, - [] + [], + false ) result.deletedByExternalDataSource.should.equal(true) }) @@ -249,8 +348,10 @@ describe('ProjectEditorHandler', function () { } this.result = this.handler.buildProjectModelView( this.project, + this.ownerMember, this.members, - [] + [], + false ) }) @@ -278,8 +379,10 @@ describe('ProjectEditorHandler', function () { } this.result = this.handler.buildProjectModelView( this.project, + this.ownerMember, this.members, - [] + [], + false ) }) it('should not emit trackChangesState', function () { @@ -302,8 +405,10 @@ describe('ProjectEditorHandler', function () { this.project.track_changes = dbEntry this.result = this.handler.buildProjectModelView( this.project, + this.ownerMember, this.members, - [] + [], + false ) }) it(`should set trackChangesState=${expected}`, function () { @@ -322,66 +427,4 @@ describe('ProjectEditorHandler', function () { }) }) }) - - describe('buildOwnerAndMembersViews', function () { - beforeEach(function () { - this.owner.features = { - versioning: true, - collaborators: 3, - compileGroup: 'priority', - compileTimeout: 22, - } - this.result = this.handler.buildOwnerAndMembersViews(this.members) - }) - - it('should produce an object with the right keys', function () { - expect(this.result).to.have.all.keys([ - 'owner', - 'ownerFeatures', - 'members', - ]) - }) - - it('should separate the owner from the members', function () { - this.result.members.length.should.equal(this.members.length - 1) - expect(this.result.owner._id).to.equal(this.owner._id) - expect(this.result.owner.email).to.equal(this.owner.email) - expect( - this.result.members.filter(m => m._id === this.owner._id).length - ).to.equal(0) - }) - - it('should extract the ownerFeatures from the owner object', function () { - expect(this.result.ownerFeatures).to.deep.equal(this.owner.features) - }) - - describe('when there is no owner', function () { - beforeEach(function () { - // remove the owner from members list - this.membersWithoutOwner = this.members.filter( - m => m.user._id !== this.owner._id - ) - this.result = this.handler.buildOwnerAndMembersViews( - this.membersWithoutOwner - ) - }) - - it('should produce an object with the right keys', function () { - expect(this.result).to.have.all.keys([ - 'owner', - 'ownerFeatures', - 'members', - ]) - }) - - it('should not separate out an owner', function () { - this.result.members.length.should.equal(this.membersWithoutOwner.length) - expect(this.result.owner).to.equal(null) - }) - - it('should not extract the ownerFeatures from the owner object', function () { - expect(this.result.ownerFeatures).to.equal(null) - }) - }) - }) }) diff --git a/services/web/test/unit/src/Project/ProjectListController.test.mjs b/services/web/test/unit/src/Project/ProjectListController.test.mjs index a051382279..ae1bc72210 100644 --- a/services/web/test/unit/src/Project/ProjectListController.test.mjs +++ b/services/web/test/unit/src/Project/ProjectListController.test.mjs @@ -1,6 +1,5 @@ -import { vi } from 'vitest' +import { expect, vi } from 'vitest' import sinon from 'sinon' -import { expect } from 'chai' import mongodb from 'mongodb-legacy' import Errors from '../../../../app/src/Features/Errors/Errors.js' @@ -99,6 +98,7 @@ describe('ProjectListController', function () { ctx.SplitTestHandler = { promises: { getAssignment: sinon.stub().resolves({ variant: 'default' }), + hasUserBeenAssignedToVariant: sinon.stub().resolves(false), }, } ctx.SplitTestSessionHandler = { diff --git a/services/web/test/unit/src/Referal/ReferalHandler.test.mjs b/services/web/test/unit/src/Referal/ReferalHandler.test.mjs index 5174918bd7..5c042f2ef9 100644 --- a/services/web/test/unit/src/Referal/ReferalHandler.test.mjs +++ b/services/web/test/unit/src/Referal/ReferalHandler.test.mjs @@ -1,5 +1,4 @@ -import { vi } from 'vitest' -import { expect } from 'chai' +import { expect, vi } from 'vitest' import sinon from 'sinon' const modulePath = '../../../../app/src/Features/Referal/ReferalHandler.mjs' diff --git a/services/web/test/unit/src/References/ReferencesHandler.test.mjs b/services/web/test/unit/src/References/ReferencesHandler.test.mjs index ae7b86822a..92666e6bcc 100644 --- a/services/web/test/unit/src/References/ReferencesHandler.test.mjs +++ b/services/web/test/unit/src/References/ReferencesHandler.test.mjs @@ -1,6 +1,4 @@ -import { vi } from 'vitest' - -import { expect } from 'chai' +import { expect, vi } from 'vitest' import sinon from 'sinon' import Errors from '../../../../app/src/Features/Errors/Errors.js' const modulePath = diff --git a/services/web/test/unit/src/Subscription/PaymentProviderEntitiesTest.js b/services/web/test/unit/src/Subscription/PaymentProviderEntitiesTest.js index c6593da28d..07c401dfb8 100644 --- a/services/web/test/unit/src/Subscription/PaymentProviderEntitiesTest.js +++ b/services/web/test/unit/src/Subscription/PaymentProviderEntitiesTest.js @@ -11,6 +11,7 @@ const { PaymentProviderSubscription, PaymentProviderSubscriptionAddOnUpdate, } = require('../../../../app/src/Features/Subscription/PaymentProviderEntities') +const SubscriptionHelper = require('../../../../app/src/Features/Subscription/SubscriptionHelper') const MODULE_PATH = '../../../../app/src/Features/Subscription/PaymentProviderEntities' @@ -32,6 +33,7 @@ describe('PaymentProviderEntities', function () { requires: { '@overleaf/settings': this.Settings, './Errors': Errors, + './SubscriptionHelper': SubscriptionHelper, }, }) }) @@ -102,6 +104,23 @@ describe('PaymentProviderEntities', function () { ) }) + it('returns a change request for downgrades while on trial', function () { + const fiveDaysFromNow = new Date() + fiveDaysFromNow.setDate(fiveDaysFromNow.getDate() + 5) + this.subscription.trialPeriodEnd = fiveDaysFromNow + const { PaymentProviderSubscriptionChangeRequest } = + this.PaymentProviderEntities + const changeRequest = + this.subscription.getRequestForPlanChange('cheap-plan') + expect(changeRequest).to.deep.equal( + new PaymentProviderSubscriptionChangeRequest({ + subscription: this.subscription, + timeframe: 'now', + planCode: 'cheap-plan', + }) + ) + }) + it('preserves the AI add-on on upgrades', function () { const { PaymentProviderSubscriptionChangeRequest } = this.PaymentProviderEntities @@ -154,7 +173,7 @@ describe('PaymentProviderEntities', function () { expect(changeRequest).to.deep.equal( new PaymentProviderSubscriptionChangeRequest({ subscription: this.subscription, - timeframe: 'term_end', + timeframe: 'now', planCode: 'cheap-plan', addOnUpdates: [ new PaymentProviderSubscriptionAddOnUpdate({ @@ -280,6 +299,22 @@ describe('PaymentProviderEntities', function () { ) }) + it('returns a change request when in trial', function () { + const fiveDaysFromNow = new Date() + fiveDaysFromNow.setDate(fiveDaysFromNow.getDate() + 5) + this.subscription.trialPeriodEnd = fiveDaysFromNow + const changeRequest = this.subscription.getRequestForAddOnRemoval( + this.addOn.code + ) + expect(changeRequest).to.deep.equal( + new PaymentProviderSubscriptionChangeRequest({ + subscription: this.subscription, + timeframe: 'now', + addOnUpdates: [], + }) + ) + }) + it("throws an AddOnNotPresentError if the subscription doesn't have the add-on", function () { expect(() => this.subscription.getRequestForAddOnRemoval('another-add-on') diff --git a/services/web/test/unit/src/Subscription/PlansLocatorTests.js b/services/web/test/unit/src/Subscription/PlansLocatorTests.js index f705baa01c..bd15f5cfaa 100644 --- a/services/web/test/unit/src/Subscription/PlansLocatorTests.js +++ b/services/web/test/unit/src/Subscription/PlansLocatorTests.js @@ -29,6 +29,7 @@ const plans = [ describe('PlansLocator', function () { beforeEach(function () { this.settings = { plans } + this.AI_ADD_ON_CODE = 'assistant' this.PlansLocator = SandboxedModule.require(modulePath, { requires: { @@ -49,68 +50,139 @@ describe('PlansLocator', function () { }) }) - describe('mapRecurlyPlanCodeToStripeLookupKey', function () { + describe('buildStripeLookupKey', function () { it('should map "collaborator" plan code to stripe lookup keys', function () { const planCode = 'collaborator' - const lookupKey = - this.PlansLocator.mapRecurlyPlanCodeToStripeLookupKey(planCode) - expect(lookupKey).to.equal('standard_monthly') + const currency = 'eur' + const lookupKey = this.PlansLocator.buildStripeLookupKey( + planCode, + currency + ) + expect(lookupKey).to.equal('standard_monthly_jun2025_eur') }) it('should map "collaborator_free_trial_7_days" plan code to stripe lookup keys', function () { const planCode = 'collaborator_free_trial_7_days' - const lookupKey = - this.PlansLocator.mapRecurlyPlanCodeToStripeLookupKey(planCode) - expect(lookupKey).to.equal('standard_monthly') + const currency = 'eur' + const lookupKey = this.PlansLocator.buildStripeLookupKey( + planCode, + currency + ) + expect(lookupKey).to.equal('standard_monthly_jun2025_eur') }) it('should map "collaborator-annual" plan code to stripe lookup keys', function () { const planCode = 'collaborator-annual' - const lookupKey = - this.PlansLocator.mapRecurlyPlanCodeToStripeLookupKey(planCode) - expect(lookupKey).to.equal('standard_annual') + const currency = 'eur' + const lookupKey = this.PlansLocator.buildStripeLookupKey( + planCode, + currency + ) + expect(lookupKey).to.equal('standard_annual_jun2025_eur') }) it('should map "professional" plan code to stripe lookup keys', function () { const planCode = 'professional' - const lookupKey = - this.PlansLocator.mapRecurlyPlanCodeToStripeLookupKey(planCode) - expect(lookupKey).to.equal('professional_monthly') + const currency = 'eur' + const lookupKey = this.PlansLocator.buildStripeLookupKey( + planCode, + currency + ) + expect(lookupKey).to.equal('professional_monthly_jun2025_eur') }) it('should map "professional_free_trial_7_days" plan code to stripe lookup keys', function () { const planCode = 'professional_free_trial_7_days' - const lookupKey = - this.PlansLocator.mapRecurlyPlanCodeToStripeLookupKey(planCode) - expect(lookupKey).to.equal('professional_monthly') + const currency = 'eur' + const lookupKey = this.PlansLocator.buildStripeLookupKey( + planCode, + currency + ) + expect(lookupKey).to.equal('professional_monthly_jun2025_eur') }) it('should map "professional-annual" plan code to stripe lookup keys', function () { const planCode = 'professional-annual' - const lookupKey = - this.PlansLocator.mapRecurlyPlanCodeToStripeLookupKey(planCode) - expect(lookupKey).to.equal('professional_annual') + const currency = 'eur' + const lookupKey = this.PlansLocator.buildStripeLookupKey( + planCode, + currency + ) + expect(lookupKey).to.equal('professional_annual_jun2025_eur') }) it('should map "student" plan code to stripe lookup keys', function () { const planCode = 'student' - const lookupKey = - this.PlansLocator.mapRecurlyPlanCodeToStripeLookupKey(planCode) - expect(lookupKey).to.equal('student_monthly') + const currency = 'eur' + const lookupKey = this.PlansLocator.buildStripeLookupKey( + planCode, + currency + ) + expect(lookupKey).to.equal('student_monthly_jun2025_eur') }) it('shoult map "student_free_trial_7_days" plan code to stripe lookup keys', function () { const planCode = 'student_free_trial_7_days' - const lookupKey = - this.PlansLocator.mapRecurlyPlanCodeToStripeLookupKey(planCode) - expect(lookupKey).to.equal('student_monthly') + const currency = 'eur' + const lookupKey = this.PlansLocator.buildStripeLookupKey( + planCode, + currency + ) + expect(lookupKey).to.equal('student_monthly_jun2025_eur') }) it('should map "student-annual" plan code to stripe lookup keys', function () { const planCode = 'student-annual' - const lookupKey = - this.PlansLocator.mapRecurlyPlanCodeToStripeLookupKey(planCode) - expect(lookupKey).to.equal('student_annual') + const currency = 'eur' + const lookupKey = this.PlansLocator.buildStripeLookupKey( + planCode, + currency + ) + expect(lookupKey).to.equal('student_annual_jun2025_eur') + }) + + it('should return null for unknown add-on codes', function () { + const billingCycleInterval = 'month' + const addOnCode = 'unknown_addon' + const currency = 'gbp' + const lookupKey = this.PlansLocator.buildStripeLookupKey( + addOnCode, + currency, + billingCycleInterval + ) + expect(lookupKey).to.equal(null) + }) + + it('should handle missing input', function () { + const lookupKey = this.PlansLocator.buildStripeLookupKey( + undefined, + undefined + ) + expect(lookupKey).to.equal(null) + }) + + it('returns the key for a monthly AI assist add-on', function () { + const billingCycleInterval = 'month' + const addOnCode = this.AI_ADD_ON_CODE + const currency = 'gbp' + const lookupKey = this.PlansLocator.buildStripeLookupKey( + addOnCode, + currency, + billingCycleInterval + ) + expect(lookupKey).to.equal('assistant_monthly_jun2025_gbp') + }) + + it('returns the key for an annual AI assist add-on', function () { + const billingCycleInterval = 'year' + const addOnCode = this.AI_ADD_ON_CODE + const currency = 'gbp' + const lookupKey = this.PlansLocator.buildStripeLookupKey( + addOnCode, + currency, + billingCycleInterval + ) + expect(lookupKey).to.equal('assistant_annual_jun2025_gbp') }) }) diff --git a/services/web/test/unit/src/Subscription/RecurlyClientTests.js b/services/web/test/unit/src/Subscription/RecurlyClientTests.js index 97088e9944..6194e35a5f 100644 --- a/services/web/test/unit/src/Subscription/RecurlyClientTests.js +++ b/services/web/test/unit/src/Subscription/RecurlyClientTests.js @@ -692,4 +692,20 @@ describe('RecurlyClient', function () { ).to.be.rejectedWith(Error) }) }) + + describe('terminateSubscriptionByUuid', function () { + it('should attempt to terminate the subscription', async function () { + this.client.terminateSubscription = sinon + .stub() + .resolves(this.recurlySubscription) + const subscription = + await this.RecurlyClient.promises.terminateSubscriptionByUuid( + this.subscription.uuid + ) + expect(subscription).to.deep.equal(this.recurlySubscription) + expect(this.client.terminateSubscription).to.be.calledWith( + 'uuid-' + this.subscription.uuid + ) + }) + }) }) diff --git a/services/web/test/unit/src/Subscription/SubscriptionControllerTests.js b/services/web/test/unit/src/Subscription/SubscriptionControllerTests.js index b3ae6610e1..087df52815 100644 --- a/services/web/test/unit/src/Subscription/SubscriptionControllerTests.js +++ b/services/web/test/unit/src/Subscription/SubscriptionControllerTests.js @@ -6,6 +6,7 @@ const MockResponse = require('../helpers/MockResponse') const modulePath = '../../../../app/src/Features/Subscription/SubscriptionController' const SubscriptionErrors = require('../../../../app/src/Features/Subscription/Errors') +const SubscriptionHelper = require('../../../../app/src/Features/Subscription/SubscriptionHelper') const mockSubscriptions = { 'subscription-123-active': { @@ -77,7 +78,6 @@ describe('SubscriptionController', function () { buildPlansList: sinon.stub(), promises: { buildUsersSubscriptionViewModel: sinon.stub().resolves({}), - getBestSubscription: sinon.stub().resolves({}), }, buildPlansListForSubscriptionDash: sinon .stub() @@ -146,14 +146,16 @@ describe('SubscriptionController', function () { '../SplitTests/SplitTestHandler': this.SplitTestV2Hander, '../Authentication/SessionManager': this.SessionManager, './SubscriptionHandler': this.SubscriptionHandler, - './SubscriptionHelper': this.SubscriptionHelper, + './SubscriptionHelper': SubscriptionHelper, './SubscriptionViewModelBuilder': this.SubscriptionViewModelBuilder, './LimitationsManager': this.LimitationsManager, '../../infrastructure/GeoIpLookup': this.GeoIpLookup, '@overleaf/settings': this.settings, '../User/UserGetter': this.UserGetter, './RecurlyWrapper': (this.RecurlyWrapper = { - updateAccountEmailAddress: sinon.stub().yields(), + promises: { + updateAccountEmailAddress: sinon.stub().resolves(), + }, }), './RecurlyEventHandler': { sendRecurlyAnalyticsEvent: sinon.stub().resolves(), @@ -309,31 +311,50 @@ describe('SubscriptionController', function () { }) describe('updateAccountEmailAddress via put', function () { - it('should send the user and subscriptionId to RecurlyWrapper', function () { - this.res.sendStatus = sinon.spy() - this.SubscriptionController.updateAccountEmailAddress(this.req, this.res) - this.RecurlyWrapper.updateAccountEmailAddress - .calledWith(this.user._id, this.user.email) - .should.equal(true) + beforeEach(function () { + this.req.body = { + account_email: 'current_account_email@overleaf.com', + } }) - it('should respond with 200', function () { + it('should send the user and subscriptionId to "updateAccountEmailAddress" hooks', async function () { this.res.sendStatus = sinon.spy() - this.SubscriptionController.updateAccountEmailAddress(this.req, this.res) + + await this.SubscriptionController.updateAccountEmailAddress( + this.req, + this.res + ) + + expect(this.Modules.promises.hooks.fire).to.have.been.calledWith( + 'updateAccountEmailAddress', + this.user._id, + this.user.email + ) + }) + + it('should respond with 200', async function () { + this.res.sendStatus = sinon.spy() + await this.SubscriptionController.updateAccountEmailAddress( + this.req, + this.res + ) this.res.sendStatus.calledWith(200).should.equal(true) }) - it('should send the error to the next handler when updating recurly account email fails', function (done) { - this.RecurlyWrapper.updateAccountEmailAddress.yields(new Error()) + it('should send the error to the next handler when updating recurly account email fails', async function () { + this.Modules.promises.hooks.fire + .withArgs('updateAccountEmailAddress', this.user._id, this.user.email) + .rejects(new Error()) + this.next = sinon.spy(error => { - expect(error).instanceOf(Error) - done() + expect(error).to.be.instanceOf(Error) }) - this.SubscriptionController.updateAccountEmailAddress( + await this.SubscriptionController.updateAccountEmailAddress( this.req, this.res, this.next ) + expect(this.next.calledOnce).to.be.true }) }) diff --git a/services/web/test/unit/src/Subscription/SubscriptionHandlerTests.js b/services/web/test/unit/src/Subscription/SubscriptionHandlerTests.js index ed5ed2f6d1..7bf23defd2 100644 --- a/services/web/test/unit/src/Subscription/SubscriptionHandlerTests.js +++ b/services/web/test/unit/src/Subscription/SubscriptionHandlerTests.js @@ -5,6 +5,7 @@ const { expect } = chai const { PaymentProviderSubscription, } = require('../../../../app/src/Features/Subscription/PaymentProviderEntities') +const SubscriptionHelper = require('../../../../app/src/Features/Subscription/SubscriptionHelper') const MODULE_PATH = '../../../../app/src/Features/Subscription/SubscriptionHandler' @@ -149,6 +150,7 @@ describe('SubscriptionHandler', function () { '../../models/User': { User: this.User, }, + './SubscriptionHelper': SubscriptionHelper, './SubscriptionUpdater': this.SubscriptionUpdater, './SubscriptionLocator': this.SubscriptionLocator, './LimitationsManager': this.LimitationsManager, diff --git a/services/web/test/unit/src/Subscription/SubscriptionHelperTests.js b/services/web/test/unit/src/Subscription/SubscriptionHelperTests.js index a6e1ffa089..fb667ca451 100644 --- a/services/web/test/unit/src/Subscription/SubscriptionHelperTests.js +++ b/services/web/test/unit/src/Subscription/SubscriptionHelperTests.js @@ -102,38 +102,65 @@ describe('SubscriptionHelper', function () { }) describe('shouldPlanChangeAtTermEnd', function () { - it('should return true if the new plan is less expensive', function () { + it('should return false if isInTrial is true', function () { + const isInTrial = true const changeAtTermEnd = this.SubscriptionHelper.shouldPlanChangeAtTermEnd( plans.expensive, - plans.cheaper + plans.cheaper, + isInTrial + ) + expect(changeAtTermEnd).to.be.false + }) + + it('should return true if the new plan is less expensive', function () { + const isInTrial = false + const changeAtTermEnd = this.SubscriptionHelper.shouldPlanChangeAtTermEnd( + plans.expensive, + plans.cheaper, + isInTrial ) expect(changeAtTermEnd).to.be.true }) + it('should return false if the new plan is more exepensive', function () { + const isInTrial = false const changeAtTermEnd = this.SubscriptionHelper.shouldPlanChangeAtTermEnd( plans.cheaper, - plans.expensive + plans.expensive, + isInTrial ) expect(changeAtTermEnd).to.be.false }) + it('should return false if the new plan is the same price', function () { + const isInTrial = false + const changeAtTermEnd = this.SubscriptionHelper.shouldPlanChangeAtTermEnd( plans.cheaper, - plans.alsoCheap + plans.alsoCheap, + isInTrial ) expect(changeAtTermEnd).to.be.false }) + it('should return false if the change is from an individual plan to a more expensive group plan', function () { + const isInTrial = false + const changeAtTermEnd = this.SubscriptionHelper.shouldPlanChangeAtTermEnd( plans.expensive, - plans.expensiveGroup + plans.expensiveGroup, + isInTrial ) expect(changeAtTermEnd).to.be.false }) + it('should return true if the change is from an individual plan to a cheaper group plan', function () { + const isInTrial = false + const changeAtTermEnd = this.SubscriptionHelper.shouldPlanChangeAtTermEnd( plans.expensive, - plans.cheapGroup + plans.cheapGroup, + isInTrial ) expect(changeAtTermEnd).to.be.true }) @@ -267,4 +294,229 @@ describe('SubscriptionHelper', function () { }) }) }) + + describe('isPaidSubscription', function () { + it('should return true for a subscription with a recurly subscription id', function () { + const result = this.SubscriptionHelper.isPaidSubscription({ + recurlySubscription_id: 'some-id', + }) + expect(result).to.be.true + }) + + it('should return true for a subscription with a stripe subscription id', function () { + const result = this.SubscriptionHelper.isPaidSubscription({ + paymentProvider: { subscriptionId: 'some-id' }, + }) + expect(result).to.be.true + }) + + it('should return false for a free subscription', function () { + const result = this.SubscriptionHelper.isPaidSubscription({}) + expect(result).to.be.false + }) + + it('should return false for a missing subscription', function () { + const result = this.SubscriptionHelper.isPaidSubscription() + expect(result).to.be.false + }) + }) + + describe('isIndividualActivePaidSubscription', function () { + it('should return true for an active recurly subscription', function () { + const result = this.SubscriptionHelper.isIndividualActivePaidSubscription( + { + groupPlan: false, + recurlyStatus: { state: 'active' }, + recurlySubscription_id: 'some-id', + } + ) + expect(result).to.be.true + }) + + it('should return true for an active stripe subscription', function () { + const result = this.SubscriptionHelper.isIndividualActivePaidSubscription( + { + groupPlan: false, + paymentProvider: { subscriptionId: 'sub_123', state: 'active' }, + } + ) + expect(result).to.be.true + }) + + it('should return false for a canceled recurly subscription', function () { + const result = this.SubscriptionHelper.isIndividualActivePaidSubscription( + { + groupPlan: false, + recurlyStatus: { state: 'canceled' }, + recurlySubscription_id: 'some-id', + } + ) + expect(result).to.be.false + }) + + it('should return false for a canceled stripe subscription', function () { + const result = this.SubscriptionHelper.isIndividualActivePaidSubscription( + { + groupPlan: false, + paymentProvider: { state: 'canceled', subscriptionId: 'sub_123' }, + } + ) + expect(result).to.be.false + }) + + it('should return false for a group plan subscription', function () { + const result = this.SubscriptionHelper.isIndividualActivePaidSubscription( + { + groupPlan: true, + recurlyStatus: { state: 'active' }, + recurlySubscription_id: 'some-id', + } + ) + expect(result).to.be.false + }) + + it('should return false for a free subscription', function () { + const result = this.SubscriptionHelper.isIndividualActivePaidSubscription( + {} + ) + expect(result).to.be.false + }) + + it('should return false for a subscription with an empty string for recurlySubscription_id', function () { + const result = this.SubscriptionHelper.isIndividualActivePaidSubscription( + { + groupPlan: false, + recurlySubscription_id: '', + recurlyStatus: { state: 'active' }, + } + ) + expect(result).to.be.false + }) + + it('should return false for a subscription with an empty string for paymentProvider.subscriptionId', function () { + const result = this.SubscriptionHelper.isIndividualActivePaidSubscription( + { + groupPlan: false, + paymentProvider: { state: 'active', subscriptionId: '' }, + } + ) + expect(result).to.be.false + }) + + it('should return false for a missing subscription', function () { + const result = this.SubscriptionHelper.isPaidSubscription() + expect(result).to.be.false + }) + }) + + describe('getPaymentProviderSubscriptionId', function () { + it('should return the recurly subscription id if it exists', function () { + const result = this.SubscriptionHelper.getPaymentProviderSubscriptionId({ + recurlySubscription_id: 'some-id', + }) + expect(result).to.equal('some-id') + }) + + it('should return the payment provider subscription id if it exists', function () { + const result = this.SubscriptionHelper.getPaymentProviderSubscriptionId({ + paymentProvider: { subscriptionId: 'sub_123' }, + }) + expect(result).to.equal('sub_123') + }) + + it('should return null if no subscription id exists', function () { + const result = this.SubscriptionHelper.getPaymentProviderSubscriptionId( + {} + ) + expect(result).to.be.null + }) + }) + + describe('getPaidSubscriptionState', function () { + it('should return the recurly state if it exists', function () { + const result = this.SubscriptionHelper.getPaidSubscriptionState({ + recurlyStatus: { state: 'active' }, + }) + expect(result).to.equal('active') + }) + + it('should return the payment provider state if it exists', function () { + const result = this.SubscriptionHelper.getPaidSubscriptionState({ + paymentProvider: { state: 'active' }, + }) + expect(result).to.equal('active') + }) + + it('should return null if no state exists', function () { + const result = this.SubscriptionHelper.getPaidSubscriptionState({}) + expect(result).to.be.null + }) + }) + + describe('getSubscriptionTrialStartedAt', function () { + it('should return the recurly trial start date if it exists', function () { + const result = this.SubscriptionHelper.getSubscriptionTrialStartedAt({ + recurlySubscription_id: 'some-id', + recurlyStatus: { trialStartedAt: new Date('2023-01-01') }, + }) + expect(result).to.deep.equal(new Date('2023-01-01')) + }) + + it('should return the payment provider trial start date if it exists', function () { + const result = this.SubscriptionHelper.getSubscriptionTrialStartedAt({ + recurlyStatus: {}, + paymentProvider: { trialStartedAt: new Date('2023-01-01') }, + }) + expect(result).to.deep.equal(new Date('2023-01-01')) + }) + + it('should return undefined if no trial start date exists', function () { + const result = this.SubscriptionHelper.getSubscriptionTrialStartedAt({}) + expect(result).to.be.undefined + }) + }) + + describe('getSubscriptionTrialEndsAt', function () { + it('should return the recurly trial end date if it exists', function () { + const result = this.SubscriptionHelper.getSubscriptionTrialEndsAt({ + recurlySubscription_id: 'some-id', + recurlyStatus: { trialEndsAt: new Date('2023-01-01') }, + }) + expect(result).to.deep.equal(new Date('2023-01-01')) + }) + + it('should return the payment provider trial end date if it exists', function () { + const result = this.SubscriptionHelper.getSubscriptionTrialEndsAt({ + recurlyStatus: {}, + paymentProvider: { trialEndsAt: new Date('2023-01-01') }, + }) + expect(result).to.deep.equal(new Date('2023-01-01')) + }) + + it('should return undefined if no trial end date exists', function () { + const result = this.SubscriptionHelper.getSubscriptionTrialEndsAt({}) + expect(result).to.be.undefined + }) + }) + + describe('isInTrial', function () { + it('should return false if trialEndsAt is null', function () { + const result = this.SubscriptionHelper.isInTrial(null) + expect(result).to.be.false + }) + + it('should return false if trialEndsAt is before now', function () { + const tenDaysAgo = new Date() + tenDaysAgo.setDate(tenDaysAgo.getDate() - 10) + const result = this.SubscriptionHelper.isInTrial(tenDaysAgo) + expect(result).to.be.false + }) + + it('should return true if trialEndsAt is after now', function () { + const tenDaysFromNow = new Date() + tenDaysFromNow.setDate(tenDaysFromNow.getDate() + 10) + const result = this.SubscriptionHelper.isInTrial(tenDaysFromNow) + expect(result).to.be.true + }) + }) }) diff --git a/services/web/test/unit/src/Subscription/SubscriptionLocatorTests.js b/services/web/test/unit/src/Subscription/SubscriptionLocatorTests.js index f66eda5b7f..e8202424fc 100644 --- a/services/web/test/unit/src/Subscription/SubscriptionLocatorTests.js +++ b/services/web/test/unit/src/Subscription/SubscriptionLocatorTests.js @@ -13,6 +13,11 @@ describe('Subscription Locator Tests', function () { exec: sinon.stub().resolves(), }), find: sinon.stub().returns({ + populate: sinon.stub().returns({ + populate: sinon.stub().returns({ + exec: sinon.stub().resolves([]), + }), + }), exec: sinon.stub().resolves(), }), } @@ -77,4 +82,110 @@ describe('Subscription Locator Tests', function () { subscription.should.equal(this.subscription) }) }) + + describe('getUserSubscriptionStatus', function () { + it('should return no active personal or group subscription when no user is passed', async function () { + const subscriptionStatus = + await this.SubscriptionLocator.promises.getUserSubscriptionStatus( + undefined + ) + expect(subscriptionStatus).to.deep.equal({ + personal: false, + group: false, + }) + }) + + it('should return no active personal or group subscription when the user has no subscription', async function () { + const subscriptionStatus = + await this.SubscriptionLocator.promises.getUserSubscriptionStatus( + this.user._id + ) + expect(subscriptionStatus).to.deep.equal({ + personal: false, + group: false, + }) + }) + + it('should return active personal subscription', async function () { + this.Subscription.findOne.returns({ + exec: sinon.stub().resolves({ + recurlyStatus: { + state: 'active', + }, + }), + }) + const subscriptionStatus = + await this.SubscriptionLocator.promises.getUserSubscriptionStatus( + this.user._id + ) + expect(subscriptionStatus).to.deep.equal({ personal: true, group: false }) + }) + + it('should return active group subscription when member of a group plan', async function () { + this.Subscription.find.returns({ + populate: sinon.stub().returns({ + populate: sinon.stub().returns({ + exec: sinon.stub().resolves([ + { + recurlyStatus: { + state: 'active', + }, + groupPlan: true, + }, + ]), + }), + }), + }) + const subscriptionStatus = + await this.SubscriptionLocator.promises.getUserSubscriptionStatus( + this.user._id + ) + expect(subscriptionStatus).to.deep.equal({ personal: false, group: true }) + }) + + it('should return active group subscription when owner of a group plan', async function () { + this.Subscription.findOne.returns({ + exec: sinon.stub().resolves({ + recurlyStatus: { + state: 'active', + }, + groupPlan: true, + }), + }) + const subscriptionStatus = + await this.SubscriptionLocator.promises.getUserSubscriptionStatus( + this.user._id + ) + expect(subscriptionStatus).to.deep.equal({ personal: false, group: true }) + }) + + it('should return active personal and group subscription when has personal subscription and member of a group', async function () { + this.Subscription.find.returns({ + populate: sinon.stub().returns({ + populate: sinon.stub().returns({ + exec: sinon.stub().resolves([ + { + recurlyStatus: { + state: 'active', + }, + groupPlan: true, + }, + ]), + }), + }), + }) + this.Subscription.findOne.returns({ + exec: sinon.stub().resolves({ + recurlyStatus: { + state: 'active', + }, + }), + }) + const subscriptionStatus = + await this.SubscriptionLocator.promises.getUserSubscriptionStatus( + this.user._id + ) + expect(subscriptionStatus).to.deep.equal({ personal: true, group: true }) + }) + }) }) diff --git a/services/web/test/unit/src/Subscription/SubscriptionUpdaterTests.js b/services/web/test/unit/src/Subscription/SubscriptionUpdaterTests.js index a122f0e4b2..d272ad51e4 100644 --- a/services/web/test/unit/src/Subscription/SubscriptionUpdaterTests.js +++ b/services/web/test/unit/src/Subscription/SubscriptionUpdaterTests.js @@ -70,6 +70,7 @@ describe('SubscriptionUpdater', function () { .stub() .returns({ exec: sinon.stub().resolves() }) this.SubscriptionModel.findOne = sinon.stub().resolves() + this.SubscriptionModel.findById = sinon.stub().resolves() this.SubscriptionModel.updateMany = sinon .stub() .returns({ exec: sinon.stub().resolves() }) @@ -173,6 +174,12 @@ describe('SubscriptionUpdater', function () { }, } + this.UserUpdater = { + promises: { + updateUser: sinon.stub().resolves(), + }, + } + this.SubscriptionUpdater = SandboxedModule.require(modulePath, { requires: { '../../models/Subscription': { @@ -193,6 +200,7 @@ describe('SubscriptionUpdater', function () { }), '../../infrastructure/Features': this.Features, '../User/UserAuditLogHandler': this.UserAuditLogHandler, + '../User/UserUpdater': this.UserUpdater, '../../infrastructure/Modules': (this.Modules = { promises: { hooks: { @@ -622,6 +630,9 @@ describe('SubscriptionUpdater', function () { }, ] this.SubscriptionModel.findOne.resolves(this.groupSubscription) + this.SubscriptionModel.findById = sinon + .stub() + .resolves(this.groupSubscription) this.SubscriptionLocator.promises.getMemberSubscriptions.resolves( this.fakeSubscriptions ) @@ -638,6 +649,28 @@ describe('SubscriptionUpdater', function () { .should.equal(true) }) + it('should remove user enrollment if the group is managed', async function () { + this.SubscriptionModel.findById.resolves({ + ...this.groupSubscription, + managedUsersEnabled: true, + }) + await this.SubscriptionUpdater.promises.removeUserFromGroup( + this.groupSubscription._id, + this.otherUserId + ) + this.UserUpdater.promises.updateUser + .calledWith( + { _id: this.otherUserId }, + { + $unset: { + 'enrollment.managedBy': 1, + 'enrollment.enrolledAt': 1, + }, + } + ) + .should.equal(true) + }) + it('should send a group-subscription-left event', async function () { await this.SubscriptionUpdater.promises.removeUserFromGroup( this.groupSubscription._id, diff --git a/services/web/test/unit/src/Subscription/SubscriptionViewModelBuilderTests.js b/services/web/test/unit/src/Subscription/SubscriptionViewModelBuilderTests.js index 0f666b888a..86eb51070e 100644 --- a/services/web/test/unit/src/Subscription/SubscriptionViewModelBuilderTests.js +++ b/services/web/test/unit/src/Subscription/SubscriptionViewModelBuilderTests.js @@ -7,6 +7,7 @@ const { PaymentProviderSubscriptionAddOn, PaymentProviderSubscriptionChange, } = require('../../../../app/src/Features/Subscription/PaymentProviderEntities') +const SubscriptionHelper = require('../../../../app/src/Features/Subscription/SubscriptionHelper') const modulePath = '../../../../app/src/Features/Subscription/SubscriptionViewModelBuilder' @@ -159,13 +160,14 @@ describe('SubscriptionViewModelBuilder', function () { './SubscriptionUpdater': this.SubscriptionUpdater, './PlansLocator': this.PlansLocator, '../../infrastructure/Modules': (this.Modules = { + promises: { hooks: { fire: sinon.stub().resolves([]) } }, hooks: { fire: sinon.stub().yields(null, []), }, }), './V1SubscriptionManager': {}, '../Publishers/PublishersGetter': this.PublishersGetter, - './SubscriptionHelper': {}, + './SubscriptionHelper': SubscriptionHelper, }, }) @@ -180,10 +182,10 @@ describe('SubscriptionViewModelBuilder', function () { .returns(this.commonsPlan) }) - describe('getBestSubscription', function () { + describe('getUsersSubscriptionDetails', function () { it('should return a free plan when user has no subscription or affiliation', async function () { - const usersBestSubscription = - await this.SubscriptionViewModelBuilder.promises.getBestSubscription( + const { bestSubscription: usersBestSubscription } = + await this.SubscriptionViewModelBuilder.promises.getUsersSubscriptionDetails( this.user ) assert.deepEqual(usersBestSubscription, { type: 'free' }) @@ -195,8 +197,8 @@ describe('SubscriptionViewModelBuilder', function () { .withArgs(this.user) .resolves(this.individualCustomSubscription) - const usersBestSubscription = - await this.SubscriptionViewModelBuilder.promises.getBestSubscription( + const { bestSubscription: usersBestSubscription } = + await this.SubscriptionViewModelBuilder.promises.getUsersSubscriptionDetails( this.user ) @@ -213,8 +215,8 @@ describe('SubscriptionViewModelBuilder', function () { .withArgs(this.user) .resolves(this.individualSubscription) - const usersBestSubscription = - await this.SubscriptionViewModelBuilder.promises.getBestSubscription( + const { bestSubscription: usersBestSubscription } = + await this.SubscriptionViewModelBuilder.promises.getUsersSubscriptionDetails( this.user ) @@ -234,8 +236,8 @@ describe('SubscriptionViewModelBuilder', function () { .withArgs(this.user) .resolves(this.individualSubscription) - const usersBestSubscription = - await this.SubscriptionViewModelBuilder.promises.getBestSubscription( + const { bestSubscription: usersBestSubscription } = + await this.SubscriptionViewModelBuilder.promises.getUsersSubscriptionDetails( this.user ) @@ -255,8 +257,8 @@ describe('SubscriptionViewModelBuilder', function () { .withArgs(this.user) .resolves(this.individualSubscription) - const usersBestSubscription = - await this.SubscriptionViewModelBuilder.promises.getBestSubscription( + const { bestSubscription: usersBestSubscription } = + await this.SubscriptionViewModelBuilder.promises.getUsersSubscriptionDetails( this.user ) @@ -268,8 +270,8 @@ describe('SubscriptionViewModelBuilder', function () { }) }) - it('should update subscription if recurly data is missing', async function () { - this.individualSubscriptionWithoutRecurly = { + it('should update subscription if recurly payment state is missing', async function () { + this.individualSubscriptionWithoutPaymentState = { planCode: this.planCode, plan: this.plan, recurlySubscription_id: this.recurlySubscription_id, @@ -280,37 +282,104 @@ describe('SubscriptionViewModelBuilder', function () { this.SubscriptionLocator.promises.getUsersSubscription .withArgs(this.user) .onCall(0) - .resolves(this.individualSubscriptionWithoutRecurly) + .resolves(this.individualSubscriptionWithoutPaymentState) .withArgs(this.user) .onCall(1) .resolves(this.individualSubscription) - this.RecurlyWrapper.promises.getSubscription - .withArgs(this.individualSubscription.recurlySubscription_id, { - includeAccount: true, - }) - .resolves(this.paymentRecord) + const payment = { + subscription: this.paymentRecord, + account: new PaymentProviderAccount({}), + coupons: [], + } - const usersBestSubscription = - await this.SubscriptionViewModelBuilder.promises.getBestSubscription( + this.Modules.promises.hooks.fire + .withArgs( + 'getPaymentFromRecordPromise', + this.individualSubscriptionWithoutPaymentState + ) + .resolves([payment]) + this.Modules.promises.hooks.fire + .withArgs( + 'syncSubscription', + payment, + this.individualSubscriptionWithoutPaymentState + ) + .resolves([]) + + const { bestSubscription: usersBestSubscription } = + await this.SubscriptionViewModelBuilder.promises.getUsersSubscriptionDetails( this.user ) - sinon.assert.calledWith( - this.RecurlyWrapper.promises.getSubscription, - this.individualSubscriptionWithoutRecurly.recurlySubscription_id, - { includeAccount: true } - ) - sinon.assert.calledWith( - this.SubscriptionUpdater.promises.updateSubscriptionFromRecurly, - this.paymentRecord, - this.individualSubscriptionWithoutRecurly - ) assert.deepEqual(usersBestSubscription, { type: 'individual', subscription: this.individualSubscription, plan: this.plan, remainingTrialDays: -1, }) + assert.isTrue( + this.Modules.promises.hooks.fire.withArgs( + 'getPaymentFromRecordPromise', + this.individualSubscriptionWithoutPaymentState + ).calledOnce + ) + }) + + it('should update subscription if stripe payment state is missing', async function () { + this.individualSubscriptionWithoutPaymentState = { + planCode: this.planCode, + plan: this.plan, + paymentProvider: { + subscriptionId: this.recurlySubscription_id, + }, + } + this.paymentRecord = { + state: 'active', + } + this.SubscriptionLocator.promises.getUsersSubscription + .withArgs(this.user) + .onCall(0) + .resolves(this.individualSubscriptionWithoutPaymentState) + .withArgs(this.user) + .onCall(1) + .resolves(this.individualSubscription) + const payment = { + subscription: this.paymentRecord, + account: new PaymentProviderAccount({}), + coupons: [], + } + + this.Modules.promises.hooks.fire + .withArgs( + 'getPaymentFromRecordPromise', + this.individualSubscriptionWithoutPaymentState + ) + .resolves([payment]) + this.Modules.promises.hooks.fire + .withArgs( + 'syncSubscription', + payment, + this.individualSubscriptionWithoutPaymentState + ) + .resolves([]) + + const { bestSubscription: usersBestSubscription } = + await this.SubscriptionViewModelBuilder.promises.getUsersSubscriptionDetails( + this.user + ) + + assert.deepEqual(usersBestSubscription, { + type: 'individual', + subscription: this.individualSubscription, + plan: this.plan, + remainingTrialDays: -1, + }) + assert.isTrue( + this.Modules.promises.hooks.fire.withArgs( + 'getPaymentFromRecordPromise', + this.individualSubscriptionWithoutPaymentState + ).calledOnce + ) }) }) @@ -318,8 +387,8 @@ describe('SubscriptionViewModelBuilder', function () { this.SubscriptionLocator.promises.getMemberSubscriptions .withArgs(this.user) .resolves([this.groupSubscription]) - const usersBestSubscription = - await this.SubscriptionViewModelBuilder.promises.getBestSubscription( + const { bestSubscription: usersBestSubscription } = + await this.SubscriptionViewModelBuilder.promises.getUsersSubscriptionDetails( this.user ) assert.deepEqual(usersBestSubscription, { @@ -336,8 +405,8 @@ describe('SubscriptionViewModelBuilder', function () { .resolves([ Object.assign({}, this.groupSubscription, { teamName: 'test team' }), ]) - const usersBestSubscription = - await this.SubscriptionViewModelBuilder.promises.getBestSubscription( + const { bestSubscription: usersBestSubscription } = + await this.SubscriptionViewModelBuilder.promises.getUsersSubscriptionDetails( this.user ) assert.deepEqual(usersBestSubscription, { @@ -353,8 +422,8 @@ describe('SubscriptionViewModelBuilder', function () { .withArgs(this.user._id) .resolves([this.commonsSubscription]) - const usersBestSubscription = - await this.SubscriptionViewModelBuilder.promises.getBestSubscription( + const { bestSubscription: usersBestSubscription } = + await this.SubscriptionViewModelBuilder.promises.getUsersSubscriptionDetails( this.user ) @@ -385,8 +454,8 @@ describe('SubscriptionViewModelBuilder', function () { compileTimeout: 60, } - const usersBestSubscription = - await this.SubscriptionViewModelBuilder.promises.getBestSubscription( + const { bestSubscription: usersBestSubscription } = + await this.SubscriptionViewModelBuilder.promises.getUsersSubscriptionDetails( this.user ) @@ -410,8 +479,8 @@ describe('SubscriptionViewModelBuilder', function () { compileTimeout: 60, } - const usersBestSubscription = - await this.SubscriptionViewModelBuilder.promises.getBestSubscription( + const { bestSubscription: usersBestSubscription } = + await this.SubscriptionViewModelBuilder.promises.getUsersSubscriptionDetails( this.user ) @@ -440,8 +509,8 @@ describe('SubscriptionViewModelBuilder', function () { compileTimeout: 240, } - const usersBestSubscription = - await this.SubscriptionViewModelBuilder.promises.getBestSubscription( + const { bestSubscription: usersBestSubscription } = + await this.SubscriptionViewModelBuilder.promises.getUsersSubscriptionDetails( this.user ) @@ -469,8 +538,8 @@ describe('SubscriptionViewModelBuilder', function () { compileTimeout: 240, } - const usersBestSubscription = - await this.SubscriptionViewModelBuilder.promises.getBestSubscription( + const { bestSubscription: usersBestSubscription } = + await this.SubscriptionViewModelBuilder.promises.getUsersSubscriptionDetails( this.user ) @@ -499,8 +568,8 @@ describe('SubscriptionViewModelBuilder', function () { compileTimeout: 240, } - const usersBestSubscription = - await this.SubscriptionViewModelBuilder.promises.getBestSubscription( + const { bestSubscription: usersBestSubscription } = + await this.SubscriptionViewModelBuilder.promises.getUsersSubscriptionDetails( this.user ) @@ -589,7 +658,7 @@ describe('SubscriptionViewModelBuilder', function () { describe('isEligibleForGroupPlan', function () { it('is false for Stripe subscriptions', async function () { - this.paymentRecord.service = 'stripe' + this.paymentRecord.service = 'stripe-us' const result = await this.SubscriptionViewModelBuilder.promises.buildUsersSubscriptionViewModel( this.user @@ -627,7 +696,7 @@ describe('SubscriptionViewModelBuilder', function () { describe('isEligibleForPause', function () { it('is false for Stripe subscriptions', async function () { - this.paymentRecord.service = 'stripe' + this.paymentRecord.service = 'stripe-us' const result = await this.SubscriptionViewModelBuilder.promises.buildUsersSubscriptionViewModel( this.user @@ -777,7 +846,7 @@ describe('SubscriptionViewModelBuilder', function () { this.paymentRecord.pausePeriodStart = null this.paymentRecord.remainingPauseCycles = null this.paymentRecord.trialPeriodEnd = null - this.paymentRecord.service = 'stripe' + this.paymentRecord.service = 'stripe-us' const result = await this.SubscriptionViewModelBuilder.promises.buildUsersSubscriptionViewModel( this.user @@ -847,7 +916,7 @@ describe('SubscriptionViewModelBuilder', function () { }) it('does not add a billing details link for a Stripe subscription', async function () { - this.paymentRecord.service = 'stripe' + this.paymentRecord.service = 'stripe-us' this.Modules.hooks.fire .withArgs('getPaymentFromRecord', this.individualSubscription) .yields(null, [ diff --git a/services/web/test/unit/src/Subscription/TeamInvitesController.test.mjs b/services/web/test/unit/src/Subscription/TeamInvitesController.test.mjs index b72a406ac0..be5fe26670 100644 --- a/services/web/test/unit/src/Subscription/TeamInvitesController.test.mjs +++ b/services/web/test/unit/src/Subscription/TeamInvitesController.test.mjs @@ -1,6 +1,6 @@ -import { vi } from 'vitest' +import { expect, vi } from 'vitest' import sinon from 'sinon' -import { expect } from 'chai' + const modulePath = '../../../../app/src/Features/Subscription/TeamInvitesController' @@ -175,7 +175,7 @@ describe('TeamInvitesController', function () { }, } - describe('hasIndividualRecurlySubscription', function () { + describe('hasIndividualPaidSubscription', function () { it('is true for personal subscription', function (ctx) { return new Promise(resolve => { ctx.SubscriptionLocator.promises.getUsersSubscription.resolves({ @@ -184,7 +184,7 @@ describe('TeamInvitesController', function () { }) const res = { render: (template, data) => { - expect(data.hasIndividualRecurlySubscription).to.be.true + expect(data.hasIndividualPaidSubscription).to.be.true resolve() }, } @@ -200,7 +200,7 @@ describe('TeamInvitesController', function () { }) const res = { render: (template, data) => { - expect(data.hasIndividualRecurlySubscription).to.be.false + expect(data.hasIndividualPaidSubscription).to.be.false resolve() }, } @@ -219,7 +219,7 @@ describe('TeamInvitesController', function () { }) const res = { render: (template, data) => { - expect(data.hasIndividualRecurlySubscription).to.be.false + expect(data.hasIndividualPaidSubscription).to.be.false resolve() }, } diff --git a/services/web/test/unit/src/Subscription/TeamInvitesHandlerTests.js b/services/web/test/unit/src/Subscription/TeamInvitesHandlerTests.js index fdd247bf96..b15232c822 100644 --- a/services/web/test/unit/src/Subscription/TeamInvitesHandlerTests.js +++ b/services/web/test/unit/src/Subscription/TeamInvitesHandlerTests.js @@ -29,6 +29,7 @@ describe('TeamInvitesHandler', function () { this.subscription = { id: '55153a8014829a865bbf700d', _id: new ObjectId('55153a8014829a865bbf700d'), + recurlySubscription_id: '1a2b3c4d5e6f7g', admin_id: this.manager._id, groupPlan: true, member_ids: [], @@ -54,6 +55,7 @@ describe('TeamInvitesHandler', function () { this.SubscriptionUpdater = { promises: { addUserToGroup: sinon.stub().resolves(), + deleteSubscription: sinon.stub().resolves(), }, } @@ -109,6 +111,12 @@ describe('TeamInvitesHandler', function () { this.Subscription.findOne.resolves(this.subscription) + this.RecurlyClient = { + promises: { + terminateSubscriptionByUuid: sinon.stub().resolves(), + }, + } + this.TeamInvitesHandler = SandboxedModule.require(modulePath, { requires: { 'mongodb-legacy': { ObjectId }, @@ -126,6 +134,7 @@ describe('TeamInvitesHandler', function () { '../../infrastructure/Modules': (this.Modules = { promises: { hooks: { fire: sinon.stub().resolves() } }, }), + './RecurlyClient': this.RecurlyClient, }, }) }) @@ -335,6 +344,8 @@ describe('TeamInvitesHandler', function () { email: 'tyrion@example.com', } + this.ipAddress = '127.0.0.1' + this.UserGetter.promises.getUserByAnyEmail .withArgs(this.user.email) .resolves(this.user) @@ -350,7 +361,8 @@ describe('TeamInvitesHandler', function () { it('adds the user to the team', async function () { await this.TeamInvitesHandler.promises.acceptInvite( 'dddddddd', - this.user.id + this.user.id, + this.ipAddress ) this.SubscriptionUpdater.promises.addUserToGroup .calledWith(this.subscription._id, this.user.id) @@ -360,7 +372,8 @@ describe('TeamInvitesHandler', function () { it('removes the invite from the subscription', async function () { await this.TeamInvitesHandler.promises.acceptInvite( 'dddddddd', - this.user.id + this.user.id, + this.ipAddress ) this.Subscription.updateOne .calledWith( @@ -375,7 +388,8 @@ describe('TeamInvitesHandler', function () { await this.TeamInvitesHandler.promises.acceptInvite( 'dddddddd', - this.user.id + this.user.id, + this.ipAddress ) sinon.assert.called( this.NotificationsBuilder.promises.groupInvitation( @@ -389,7 +403,8 @@ describe('TeamInvitesHandler', function () { it('should not schedule an SSO invite reminder', async function () { await this.TeamInvitesHandler.promises.acceptInvite( 'dddddddd', - this.user.id + this.user.id, + this.ipAddress ) sinon.assert.notCalled(this.Modules.promises.hooks.fire) }) @@ -401,7 +416,17 @@ describe('TeamInvitesHandler', function () { await this.TeamInvitesHandler.promises.acceptInvite( 'dddddddd', - this.user.id + this.user.id, + this.ipAddress + ) + sinon.assert.calledWith( + this.SubscriptionUpdater.promises.deleteSubscription, + this.subscription, + { id: this.user.id, ip: this.ipAddress } + ) + sinon.assert.calledWith( + this.RecurlyClient.promises.terminateSubscriptionByUuid, + this.subscription.recurlySubscription_id ) sinon.assert.calledWith( this.Modules.promises.hooks.fire, @@ -421,7 +446,8 @@ describe('TeamInvitesHandler', function () { await this.TeamInvitesHandler.promises.acceptInvite( 'dddddddd', - this.user.id + this.user.id, + this.ipAddress ) sinon.assert.calledWith( this.Modules.promises.hooks.fire, diff --git a/services/web/test/unit/src/Tags/TagsController.test.mjs b/services/web/test/unit/src/Tags/TagsController.test.mjs index 927c6283a5..c8cb739d0e 100644 --- a/services/web/test/unit/src/Tags/TagsController.test.mjs +++ b/services/web/test/unit/src/Tags/TagsController.test.mjs @@ -1,6 +1,6 @@ -import { vi } from 'vitest' +import { assert, vi } from 'vitest' import sinon from 'sinon' -import { assert } from 'chai' + const modulePath = '../../../../app/src/Features/Tags/TagsController.mjs' describe('TagsController', function () { diff --git a/services/web/test/unit/src/ThirdPartyDataStore/TpdsController.test.mjs b/services/web/test/unit/src/ThirdPartyDataStore/TpdsController.test.mjs index 313f2d2456..29daa00efc 100644 --- a/services/web/test/unit/src/ThirdPartyDataStore/TpdsController.test.mjs +++ b/services/web/test/unit/src/ThirdPartyDataStore/TpdsController.test.mjs @@ -1,6 +1,5 @@ -import { vi } from 'vitest' +import { expect, vi } from 'vitest' import mongodb from 'mongodb-legacy' -import { expect } from 'chai' import sinon from 'sinon' import Errors from '../../../../app/src/Features/Errors/Errors.js' import MockResponse from '../helpers/MockResponse.js' diff --git a/services/web/test/unit/src/ThirdPartyDataStore/TpdsUpdateHandler.test.mjs b/services/web/test/unit/src/ThirdPartyDataStore/TpdsUpdateHandler.test.mjs index 96cc22279e..08a7dcf494 100644 --- a/services/web/test/unit/src/ThirdPartyDataStore/TpdsUpdateHandler.test.mjs +++ b/services/web/test/unit/src/ThirdPartyDataStore/TpdsUpdateHandler.test.mjs @@ -1,6 +1,5 @@ -import { vi } from 'vitest' +import { expect, vi } from 'vitest' import sinon from 'sinon' -import { expect } from 'chai' import mongodb from 'mongodb-legacy' import Errors from '../../../../app/src/Features/Errors/Errors.js' diff --git a/services/web/test/unit/src/TokenAccess/TokenAccessController.test.mjs b/services/web/test/unit/src/TokenAccess/TokenAccessController.test.mjs index 3408c3bb32..96d2d19b04 100644 --- a/services/web/test/unit/src/TokenAccess/TokenAccessController.test.mjs +++ b/services/web/test/unit/src/TokenAccess/TokenAccessController.test.mjs @@ -1,6 +1,5 @@ -import { vi } from 'vitest' +import { expect, vi } from 'vitest' import sinon from 'sinon' -import { expect } from 'chai' import mongodb from 'mongodb-legacy' import MockRequest from '../helpers/MockRequest.js' import MockResponse from '../helpers/MockResponse.js' diff --git a/services/web/test/unit/src/Uploads/ProjectUploadController.test.mjs b/services/web/test/unit/src/Uploads/ProjectUploadController.test.mjs index 1f6fd7adb9..443578f747 100644 --- a/services/web/test/unit/src/Uploads/ProjectUploadController.test.mjs +++ b/services/web/test/unit/src/Uploads/ProjectUploadController.test.mjs @@ -5,9 +5,8 @@ * DS206: Consider reworking classes to avoid initClass * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -import { vi } from 'vitest' +import { expect, vi } from 'vitest' import sinon from 'sinon' -import { expect } from 'chai' import MockRequest from '../helpers/MockRequest.js' import MockResponse from '../helpers/MockResponse.js' import ArchiveErrors from '../../../../app/src/Features/Uploads/ArchiveErrors.js' diff --git a/services/web/test/unit/src/User/UserGetterTests.js b/services/web/test/unit/src/User/UserGetterTests.js index 0e0c170fd6..315a8073d6 100644 --- a/services/web/test/unit/src/User/UserGetterTests.js +++ b/services/web/test/unit/src/User/UserGetterTests.js @@ -119,6 +119,17 @@ describe('UserGetter', function () { }) }) + it('should not call mongo with empty list', function (done) { + const query = [] + const projection = { email: 1 } + this.UserGetter.getUsers(query, projection, (error, users) => { + expect(error).to.not.exist + expect(users).to.deep.equal([]) + expect(this.find).to.not.have.been.called + done() + }) + }) + it('should not allow null query', function (done) { this.UserGetter.getUser(null, {}, error => { error.should.exist diff --git a/services/web/test/unit/src/User/UserPagesController.test.mjs b/services/web/test/unit/src/User/UserPagesController.test.mjs index 181c9513ae..1fa908d1be 100644 --- a/services/web/test/unit/src/User/UserPagesController.test.mjs +++ b/services/web/test/unit/src/User/UserPagesController.test.mjs @@ -1,7 +1,6 @@ -import { vi } from 'vitest' +import { expect, vi } from 'vitest' import assert from 'assert' import sinon from 'sinon' -import { expect } from 'chai' import MockResponse from '../helpers/MockResponse.js' import MockRequest from '../helpers/MockRequest.js' diff --git a/services/web/test/unit/src/User/UserUpdaterTests.js b/services/web/test/unit/src/User/UserUpdaterTests.js index 5832bc4656..2803e6d6f2 100644 --- a/services/web/test/unit/src/User/UserUpdaterTests.js +++ b/services/web/test/unit/src/User/UserUpdaterTests.js @@ -59,11 +59,6 @@ describe('UserUpdater', function () { changeEmail: sinon.stub().resolves(), }, } - this.RecurlyWrapper = { - promises: { - updateAccountEmailAddress: sinon.stub().resolves(), - }, - } this.AnalyticsManager = { recordEventForUserInBackground: sinon.stub(), } @@ -264,9 +259,11 @@ describe('UserUpdater', function () { expect( this.NewsletterManager.promises.changeEmail ).to.have.been.calledWith(this.user, this.newEmail) - expect( - this.RecurlyWrapper.promises.updateAccountEmailAddress - ).to.have.been.calledWith(this.user._id, this.newEmail) + expect(this.Modules.promises.hooks.fire).to.have.been.calledWith( + 'updateAccountEmailAddress', + this.user._id, + this.newEmail + ) }) it('validates email', async function () { @@ -615,9 +612,11 @@ describe('UserUpdater', function () { expect( this.NewsletterManager.promises.changeEmail ).to.have.been.calledWith(this.user, this.newEmail) - expect( - this.RecurlyWrapper.promises.updateAccountEmailAddress - ).to.have.been.calledWith(this.user._id, this.newEmail) + expect(this.Modules.promises.hooks.fire).to.have.been.calledWith( + 'updateAccountEmailAddress', + this.user._id, + this.newEmail + ) }) it('handles Mongo errors', async function () { diff --git a/services/web/test/unit/src/UserMembership/UserMembershipController.test.mjs b/services/web/test/unit/src/UserMembership/UserMembershipController.test.mjs index 55bc62cd2d..18e2d8526b 100644 --- a/services/web/test/unit/src/UserMembership/UserMembershipController.test.mjs +++ b/services/web/test/unit/src/UserMembership/UserMembershipController.test.mjs @@ -1,6 +1,5 @@ -import { vi } from 'vitest' +import { expect, vi } from 'vitest' import sinon from 'sinon' -import { expect } from 'chai' import MockRequest from '../helpers/MockRequest.js' import MockResponse from '../helpers/MockResponse.js' import EntityConfigs from '../../../../app/src/Features/UserMembership/UserMembershipEntityConfigs.js' @@ -185,6 +184,7 @@ describe('UserMembershipController', function () { expect(viewParams.users).to.deep.equal(ctx.users) expect(viewParams.groupSize).to.equal(ctx.subscription.membersLimit) expect(viewParams.managedUsersActive).to.equal(true) + expect(viewParams.isUserGroupManager).to.equal(false) }, }) }) diff --git a/services/web/test/unit/src/infrastructure/ServeStaticWrapper.test.mjs b/services/web/test/unit/src/infrastructure/ServeStaticWrapper.test.mjs index 4d8479a9cb..619fe74a2b 100644 --- a/services/web/test/unit/src/infrastructure/ServeStaticWrapper.test.mjs +++ b/services/web/test/unit/src/infrastructure/ServeStaticWrapper.test.mjs @@ -1,5 +1,4 @@ -import { vi } from 'vitest' -import { expect } from 'chai' +import { expect, vi } from 'vitest' import Path from 'node:path' import sinon from 'sinon' import MockResponse from '../helpers/MockResponse.js' diff --git a/services/web/test/unit/vitest_bootstrap.mjs b/services/web/test/unit/vitest_bootstrap.mjs index 2244faefd3..5a39b2d587 100644 --- a/services/web/test/unit/vitest_bootstrap.mjs +++ b/services/web/test/unit/vitest_bootstrap.mjs @@ -1,8 +1,26 @@ -import { vi } from 'vitest' +import { chai, vi } from 'vitest' import './common_bootstrap.js' import sinon from 'sinon' import logger from '@overleaf/logger' +import sinonChai from 'sinon-chai' +import chaiAsPromised from 'chai-as-promised' +/* + * Chai configuration + */ + +// add chai.should() +chai.should() + +// Load sinon-chai assertions so expect(stubFn).to.have.been.calledWith('abc') +// has a nicer failure messages +chai.use(sinonChai) + +// Load promise support for chai +chai.use(chaiAsPromised) + +// Do not truncate assertion errors +chai.config.truncateThreshold = 0 vi.mock('@overleaf/logger', async () => { return { default: { diff --git a/services/web/types/admin/subscription.ts b/services/web/types/admin/subscription.ts index bbcdd3b953..811ebf54bf 100644 --- a/services/web/types/admin/subscription.ts +++ b/services/web/types/admin/subscription.ts @@ -1,7 +1,15 @@ -import { GroupPolicy } from '../subscription/dashboard/subscription' +import { + GroupPolicy, + PaymentProvider, +} from '../subscription/dashboard/subscription' import { SSOConfig } from '../subscription/sso' import { TeamInvite } from '../team-invite' +type RecurlyAdminClientPaymentProvider = Record +type StripeAdminClientPaymentProvider = PaymentProvider & { + service: 'stripe-us' | 'stripe-uk' +} + export type Subscription = { _id: string teamInvites: TeamInvite[] @@ -13,4 +21,8 @@ export type Subscription = { managedUsersEnabled: boolean v1_id: number salesforce_id: string + recurlySubscription_id?: string + paymentProvider: + | RecurlyAdminClientPaymentProvider + | StripeAdminClientPaymentProvider } diff --git a/services/web/types/onboarding.ts b/services/web/types/onboarding.ts new file mode 100644 index 0000000000..11ae3e51d0 --- /dev/null +++ b/services/web/types/onboarding.ts @@ -0,0 +1,25 @@ +export type UsedLatex = 'never' | 'occasionally' | 'often' +export type Occupation = + | 'university' + | 'company' + | 'nonprofitngo' + | 'government' + | 'other' + +export type OnboardingFormData = { + firstName: string + lastName: string + primaryOccupation: Occupation | null + usedLatex: UsedLatex | null + companyDivisionDepartment: string + companyJobTitle: string + governmentJobTitle: string + institutionName: string + otherJobTitle: string + nonprofitDivisionDepartment: string + nonprofitJobTitle: string + role: string + subjectArea: string + updatedAt?: Date + shouldReceiveUpdates?: boolean +} diff --git a/services/web/types/project.ts b/services/web/types/project.ts index 83a28533b3..0fc21533e2 100644 --- a/services/web/types/project.ts +++ b/services/web/types/project.ts @@ -25,7 +25,8 @@ export type Project = { owner: MongoUser members: ProjectMember[] invites: ProjectInvite[] - rootDoc_id?: string + // `rootDoc_id` in the backend; `rootDocId` in the frontend + rootDocId?: string rootFolder?: Folder[] deletedByExternalDataSource?: boolean } diff --git a/services/web/types/project/dashboard/subscription.ts b/services/web/types/project/dashboard/subscription.ts index e8b595c49f..c8f8835b34 100644 --- a/services/web/types/project/dashboard/subscription.ts +++ b/services/web/types/project/dashboard/subscription.ts @@ -1,4 +1,7 @@ -import { SubscriptionState } from '../../subscription/dashboard/subscription' +import { + SubscriptionState, + PaymentProvider, +} from '../../subscription/dashboard/subscription' type SubscriptionBase = { featuresPageURL: string @@ -22,6 +25,7 @@ type PaidSubscriptionBase = { teamName?: string name: string recurlyStatus?: RecurlyStatus + paymentProvider?: PaymentProvider } } & SubscriptionBase diff --git a/services/web/types/share-doc.ts b/services/web/types/share-doc.ts index d071c97f28..7c75e6d0de 100644 --- a/services/web/types/share-doc.ts +++ b/services/web/types/share-doc.ts @@ -1,9 +1,23 @@ import EventEmitter from 'events' +import { StringFileData } from 'overleaf-editor-core' // type for the Doc class in vendor/libs/sharejs.js -export interface ShareDoc extends EventEmitter { +export interface ShareLatexOTShareDoc extends EventEmitter { + otType: 'sharejs-text-ot' + snapshot: string detach_cm6?: () => void getText: () => string insert: (pos: number, insert: string, fromUndo: boolean) => void del: (pos: number, length: number, fromUndo: boolean) => void + submitOp(op: any[]): void } + +export interface HistoryOTShareDoc extends EventEmitter { + otType: 'history-ot' + snapshot: StringFileData + detach_cm6?: () => void + getText: () => string + submitOp(op: any[]): void +} + +export type ShareDoc = ShareLatexOTShareDoc | HistoryOTShareDoc diff --git a/services/web/types/subscription/currency.ts b/services/web/types/subscription/currency.ts index 8d6b88dc0b..d63d71be74 100644 --- a/services/web/types/subscription/currency.ts +++ b/services/web/types/subscription/currency.ts @@ -20,3 +20,4 @@ export const currencies = { type Currency = typeof currencies export type CurrencyCode = keyof Currency +export type StripeCurrencyCode = Lowercase diff --git a/services/web/types/subscription/dashboard/subscription.ts b/services/web/types/subscription/dashboard/subscription.ts index a1ee934423..db17b25684 100644 --- a/services/web/types/subscription/dashboard/subscription.ts +++ b/services/web/types/subscription/dashboard/subscription.ts @@ -64,7 +64,6 @@ export type Subscription = { membersLimit: number teamInvites: object[] planCode: string - recurlySubscription_id: string plan: Plan pendingPlan?: PendingPaymentProviderPlan addOns?: AddOn[] @@ -103,7 +102,7 @@ export type MemberGroupSubscription = Omit & { admin_id: User } -type PaymentProviderService = 'stripe' | 'recurly' +type PaymentProviderService = 'stripe-us' | 'stripe-uk' | 'recurly' export type PaymentProvider = { service: PaymentProviderService diff --git a/services/web/types/subscription/plan.ts b/services/web/types/subscription/plan.ts index c5e8f7e820..d6f3008a19 100644 --- a/services/web/types/subscription/plan.ts +++ b/services/web/types/subscription/plan.ts @@ -1,3 +1,5 @@ +import { StripeCurrencyCode } from './currency' + type Features = { collaborators: number compileGroup: string @@ -60,6 +62,7 @@ export type Plan = { name: string planCode: string price_in_cents: number + canUseFlexibleLicensing?: boolean } export type PriceForDisplayData = { @@ -85,15 +88,27 @@ export type RecurlyPlanCode = | 'group_professional_educational' | 'group_collaborator' | 'group_collaborator_educational' + | 'assistant' + | 'assistant-annual' -export type StripeLookupKey = +export type RecurlyAddOnCode = 'assistant' + +export type StripeBaseLookupKey = | 'standard_monthly' | 'standard_annual' | 'professional_monthly' | 'professional_annual' | 'student_monthly' | 'student_annual' + | 'assistant_annual' + | 'assistant_monthly' + // TODO: change all group plans' lookup_keys to match the UK account after they have been added | 'group_standard_enterprise' | 'group_professional_enterprise' | 'group_standard_educational' | 'group_professional_educational' + +export type StripeLookupKeyVersion = 'jun2025' + +export type StripeLookupKey = + `${StripeBaseLookupKey}_${StripeLookupKeyVersion}_${StripeCurrencyCode}` diff --git a/services/web/types/subscription/sso.ts b/services/web/types/subscription/sso.ts index cf869ec741..6500817407 100644 --- a/services/web/types/subscription/sso.ts +++ b/services/web/types/subscription/sso.ts @@ -13,6 +13,7 @@ export type SSOConfig = { userLastNameAttribute?: string validated?: boolean enabled?: boolean + useSettingsUKAMF?: boolean } export type GroupSSOLinkingStatus = { diff --git a/services/web/types/user-settings.ts b/services/web/types/user-settings.ts index 3e748d937e..add460edfa 100644 --- a/services/web/types/user-settings.ts +++ b/services/web/types/user-settings.ts @@ -17,4 +17,5 @@ export type UserSettings = { mathPreview: boolean referencesSearchMode: 'advanced' | 'simple' enableNewEditor: boolean + breadcrumbs: boolean } diff --git a/services/web/types/user.ts b/services/web/types/user.ts index 8d00ea803f..2fce1ce46b 100644 --- a/services/web/types/user.ts +++ b/services/web/types/user.ts @@ -39,7 +39,7 @@ export type User = { isAdmin?: boolean email: string allowedFreeTrial?: boolean - hasRecurlySubscription?: boolean + hasPaidSubscription?: boolean first_name?: string last_name?: string alphaProgram?: boolean diff --git a/services/web/types/window.ts b/services/web/types/window.ts index 1150bf1e50..d2856e7179 100644 --- a/services/web/types/window.ts +++ b/services/web/types/window.ts @@ -1,20 +1,11 @@ import 'recurly__recurly-js' import { ScopeValueStore } from './ide/scope-value-store' import { MetaAttributesCache } from '@/utils/meta' -import { Socket } from '@/features/ide-react/connection/types/socket' declare global { // eslint-disable-next-line no-unused-vars interface Window { metaAttributesCache: MetaAttributesCache - _ide: Record & { - $scope: Record & { - pdf?: { - logEntryAnnotations: Record - } - } - socket: Socket - } MathJax: Record // For react-google-recaptcha recaptchaOptions?: {