diff --git a/.github/ISSUE_TEMPLATE.md b/.github/ISSUE_TEMPLATE/bug_report.md similarity index 83% rename from .github/ISSUE_TEMPLATE.md rename to .github/ISSUE_TEMPLATE/bug_report.md index 3a375bcbe9..9c0577106e 100644 --- a/.github/ISSUE_TEMPLATE.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -1,10 +1,19 @@ +--- +name: Bug report +about: Report a bug +title: '' +labels: type:bug +assignees: '' + +--- + diff --git a/develop/docker-compose.yml b/develop/docker-compose.yml index 750e11ac87..7161e0686a 100644 --- a/develop/docker-compose.yml +++ b/develop/docker-compose.yml @@ -25,10 +25,10 @@ services: env_file: - dev.env environment: - - DOCKER_RUNNER=true - TEXLIVE_IMAGE=texlive-full # docker build texlive -t texlive-full - - COMPILES_HOST_DIR=${PWD}/compiles - - OUTPUT_HOST_DIR=${PWD}/output + - SANDBOXED_COMPILES=true + - SANDBOXED_COMPILES_HOST_DIR_COMPILES=${PWD}/compiles + - SANDBOXED_COMPILES_HOST_DIR_OUTPUT=${PWD}/output user: root volumes: - ${PWD}/compiles:/overleaf/services/clsi/compiles diff --git a/libraries/access-token-encryptor/buildscript.txt b/libraries/access-token-encryptor/buildscript.txt index 74c3bbbd24..8ce12073ea 100644 --- a/libraries/access-token-encryptor/buildscript.txt +++ b/libraries/access-token-encryptor/buildscript.txt @@ -1,6 +1,6 @@ access-token-encryptor --dependencies=None ---docker-repos=gcr.io/overleaf-ops +--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker --env-add= --env-pass-through= --esmock-loader=False diff --git a/libraries/fetch-utils/buildscript.txt b/libraries/fetch-utils/buildscript.txt index 91548ff7c6..35e8eed85b 100644 --- a/libraries/fetch-utils/buildscript.txt +++ b/libraries/fetch-utils/buildscript.txt @@ -1,6 +1,6 @@ fetch-utils --dependencies=None ---docker-repos=gcr.io/overleaf-ops +--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker --env-add= --env-pass-through= --esmock-loader=False diff --git a/libraries/logger/buildscript.txt b/libraries/logger/buildscript.txt index 9008707b0e..a3d1cc0646 100644 --- a/libraries/logger/buildscript.txt +++ b/libraries/logger/buildscript.txt @@ -1,6 +1,6 @@ logger --dependencies=None ---docker-repos=gcr.io/overleaf-ops +--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker --env-add= --env-pass-through= --esmock-loader=False diff --git a/libraries/metrics/buildscript.txt b/libraries/metrics/buildscript.txt index 2c2e5d7531..58ff195d95 100644 --- a/libraries/metrics/buildscript.txt +++ b/libraries/metrics/buildscript.txt @@ -1,6 +1,6 @@ metrics --dependencies=None ---docker-repos=gcr.io/overleaf-ops +--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker --env-add= --env-pass-through= --esmock-loader=False diff --git a/libraries/mongo-utils/batchedUpdate.js b/libraries/mongo-utils/batchedUpdate.js index 7e3ad677db..41af41f0d4 100644 --- a/libraries/mongo-utils/batchedUpdate.js +++ b/libraries/mongo-utils/batchedUpdate.js @@ -35,6 +35,7 @@ let BATCHED_UPDATE_RUNNING = false * @property {string} [BATCH_RANGE_START] * @property {string} [BATCH_SIZE] * @property {string} [VERBOSE_LOGGING] + * @property {(progress: string) => Promise} [trackProgress] */ /** @@ -210,7 +211,7 @@ async function batchedUpdate( update, projection, findOptions, - batchedUpdateOptions + batchedUpdateOptions = {} ) { // only a single batchedUpdate can run at a time due to global variables if (BATCHED_UPDATE_RUNNING) { @@ -226,6 +227,8 @@ async function batchedUpdate( return 0 } refreshGlobalOptionsForBatchedUpdate(batchedUpdateOptions) + const { trackProgress = async progress => console.warn(progress) } = + batchedUpdateOptions findOptions = findOptions || {} findOptions.readPreference = READ_PREFERENCE_SECONDARY @@ -255,9 +258,10 @@ async function batchedUpdate( nextBatch.map(entry => entry._id) )}` ) - } else { - console.error(`Running update on batch ending ${renderObjectId(end)}`) } + await trackProgress( + `Running update on batch ending ${renderObjectId(end)}` + ) if (typeof update === 'function') { await update(nextBatch) @@ -265,7 +269,7 @@ async function batchedUpdate( await performUpdate(collection, nextBatch, update) } } - console.error(`Completed batch ending ${renderObjectId(end)}`) + await trackProgress(`Completed batch ending ${renderObjectId(end)}`) start = end } return updated diff --git a/libraries/mongo-utils/buildscript.txt b/libraries/mongo-utils/buildscript.txt index bda8d4f734..35ca540bfb 100644 --- a/libraries/mongo-utils/buildscript.txt +++ b/libraries/mongo-utils/buildscript.txt @@ -1,6 +1,6 @@ mongo-utils --dependencies=None ---docker-repos=gcr.io/overleaf-ops +--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker --env-add= --env-pass-through= --esmock-loader=False diff --git a/libraries/o-error/buildscript.txt b/libraries/o-error/buildscript.txt index a4134b4b60..c61679157e 100644 --- a/libraries/o-error/buildscript.txt +++ b/libraries/o-error/buildscript.txt @@ -1,6 +1,6 @@ o-error --dependencies=None ---docker-repos=gcr.io/overleaf-ops +--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker --env-add= --env-pass-through= --esmock-loader=False diff --git a/libraries/object-persistor/buildscript.txt b/libraries/object-persistor/buildscript.txt index 75d2e09382..d5113ce910 100644 --- a/libraries/object-persistor/buildscript.txt +++ b/libraries/object-persistor/buildscript.txt @@ -1,6 +1,6 @@ object-persistor --dependencies=None ---docker-repos=gcr.io/overleaf-ops +--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker --env-add= --env-pass-through= --esmock-loader=False diff --git a/libraries/overleaf-editor-core/buildscript.txt b/libraries/overleaf-editor-core/buildscript.txt index 9b6508663b..25a221232a 100644 --- a/libraries/overleaf-editor-core/buildscript.txt +++ b/libraries/overleaf-editor-core/buildscript.txt @@ -1,6 +1,6 @@ overleaf-editor-core --dependencies=None ---docker-repos=gcr.io/overleaf-ops +--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker --env-add= --env-pass-through= --esmock-loader=False diff --git a/libraries/overleaf-editor-core/lib/file_data/clear_tracking_props.js b/libraries/overleaf-editor-core/lib/file_data/clear_tracking_props.js index ba7f0bf00b..b3ddbab7d8 100644 --- a/libraries/overleaf-editor-core/lib/file_data/clear_tracking_props.js +++ b/libraries/overleaf-editor-core/lib/file_data/clear_tracking_props.js @@ -1,7 +1,7 @@ // @ts-check /** - * @import { ClearTrackingPropsRawData } from '../types' + * @import { ClearTrackingPropsRawData, TrackingDirective } from '../types' */ class ClearTrackingProps { @@ -11,12 +11,27 @@ class ClearTrackingProps { /** * @param {any} other - * @returns {boolean} + * @returns {other is ClearTrackingProps} */ equals(other) { return other instanceof ClearTrackingProps } + /** + * @param {TrackingDirective} other + * @returns {other is ClearTrackingProps} + */ + canMergeWith(other) { + return other instanceof ClearTrackingProps + } + + /** + * @param {TrackingDirective} other + */ + mergeWith(other) { + return this + } + /** * @returns {ClearTrackingPropsRawData} */ diff --git a/libraries/overleaf-editor-core/lib/file_data/lazy_string_file_data.js b/libraries/overleaf-editor-core/lib/file_data/lazy_string_file_data.js index bc11b3e98d..abc720d10c 100644 --- a/libraries/overleaf-editor-core/lib/file_data/lazy_string_file_data.js +++ b/libraries/overleaf-editor-core/lib/file_data/lazy_string_file_data.js @@ -11,7 +11,7 @@ const EditOperation = require('../operation/edit_operation') const EditOperationBuilder = require('../operation/edit_operation_builder') /** - * @import { BlobStore, ReadonlyBlobStore, RangesBlob, RawFileData, RawLazyStringFileData } from '../types' + * @import { BlobStore, ReadonlyBlobStore, RangesBlob, RawHashFileData, RawLazyStringFileData } from '../types' */ class LazyStringFileData extends FileData { @@ -159,11 +159,11 @@ class LazyStringFileData extends FileData { /** @inheritdoc * @param {BlobStore} blobStore - * @return {Promise} + * @return {Promise} */ async store(blobStore) { if (this.operations.length === 0) { - /** @type RawFileData */ + /** @type RawHashFileData */ const raw = { hash: this.hash } if (this.rangesHash) { raw.rangesHash = this.rangesHash @@ -171,9 +171,11 @@ class LazyStringFileData extends FileData { return raw } const eager = await this.toEager(blobStore) + const raw = await eager.store(blobStore) + this.hash = raw.hash + this.rangesHash = raw.rangesHash this.operations.length = 0 - /** @type RawFileData */ - return await eager.store(blobStore) + return raw } } diff --git a/libraries/overleaf-editor-core/lib/file_data/string_file_data.js b/libraries/overleaf-editor-core/lib/file_data/string_file_data.js index 48df633461..c78c1e0414 100644 --- a/libraries/overleaf-editor-core/lib/file_data/string_file_data.js +++ b/libraries/overleaf-editor-core/lib/file_data/string_file_data.js @@ -8,7 +8,7 @@ const CommentList = require('./comment_list') const TrackedChangeList = require('./tracked_change_list') /** - * @import { StringFileRawData, RawFileData, BlobStore, CommentRawData } from "../types" + * @import { StringFileRawData, RawHashFileData, BlobStore, CommentRawData } from "../types" * @import { TrackedChangeRawData, RangesBlob } from "../types" * @import EditOperation from "../operation/edit_operation" */ @@ -139,7 +139,7 @@ class StringFileData extends FileData { /** * @inheritdoc * @param {BlobStore} blobStore - * @return {Promise} + * @return {Promise} */ async store(blobStore) { const blob = await blobStore.putString(this.content) diff --git a/libraries/overleaf-editor-core/lib/file_data/tracked_change.js b/libraries/overleaf-editor-core/lib/file_data/tracked_change.js index d0e6517d0f..e789a427b0 100644 --- a/libraries/overleaf-editor-core/lib/file_data/tracked_change.js +++ b/libraries/overleaf-editor-core/lib/file_data/tracked_change.js @@ -84,6 +84,21 @@ class TrackedChange { ) ) } + + /** + * Return an equivalent tracked change whose extent is limited to the given + * range + * + * @param {Range} range + * @returns {TrackedChange | null} - the result or null if the intersection is empty + */ + intersectRange(range) { + const intersection = this.range.intersect(range) + if (intersection == null) { + return null + } + return new TrackedChange(intersection, this.tracking) + } } module.exports = TrackedChange diff --git a/libraries/overleaf-editor-core/lib/file_data/tracked_change_list.js b/libraries/overleaf-editor-core/lib/file_data/tracked_change_list.js index 263b37ab50..b302865c70 100644 --- a/libraries/overleaf-editor-core/lib/file_data/tracked_change_list.js +++ b/libraries/overleaf-editor-core/lib/file_data/tracked_change_list.js @@ -2,9 +2,11 @@ const Range = require('../range') const TrackedChange = require('./tracked_change') const TrackingProps = require('../file_data/tracking_props') +const { InsertOp, RemoveOp, RetainOp } = require('../operation/scan_op') /** * @import { TrackingDirective, TrackedChangeRawData } from "../types" + * @import TextOperation from "../operation/text_operation" */ class TrackedChangeList { @@ -58,6 +60,22 @@ class TrackedChangeList { return this._trackedChanges.filter(change => range.contains(change.range)) } + /** + * Returns tracked changes that overlap with the given range + * @param {Range} range + * @returns {TrackedChange[]} + */ + intersectRange(range) { + const changes = [] + for (const change of this._trackedChanges) { + const intersection = change.intersectRange(range) + if (intersection != null) { + changes.push(intersection) + } + } + return changes + } + /** * Returns the tracking props for a given range. * @param {Range} range @@ -89,6 +107,8 @@ class TrackedChangeList { /** * Collapses consecutive (and compatible) ranges + * + * @private * @returns {void} */ _mergeRanges() { @@ -117,12 +137,28 @@ class TrackedChangeList { } /** + * Apply an insert operation * * @param {number} cursor * @param {string} insertedText * @param {{tracking?: TrackingProps}} opts */ applyInsert(cursor, insertedText, opts = {}) { + this._applyInsert(cursor, insertedText, opts) + this._mergeRanges() + } + + /** + * Apply an insert operation + * + * This method will not merge ranges at the end + * + * @private + * @param {number} cursor + * @param {string} insertedText + * @param {{tracking?: TrackingProps}} [opts] + */ + _applyInsert(cursor, insertedText, opts = {}) { const newTrackedChanges = [] for (const trackedChange of this._trackedChanges) { if ( @@ -171,15 +207,29 @@ class TrackedChangeList { newTrackedChanges.push(newTrackedChange) } this._trackedChanges = newTrackedChanges - this._mergeRanges() } /** + * Apply a delete operation to the list of tracked changes * * @param {number} cursor * @param {number} length */ applyDelete(cursor, length) { + this._applyDelete(cursor, length) + this._mergeRanges() + } + + /** + * Apply a delete operation to the list of tracked changes + * + * This method will not merge ranges at the end + * + * @private + * @param {number} cursor + * @param {number} length + */ + _applyDelete(cursor, length) { const newTrackedChanges = [] for (const trackedChange of this._trackedChanges) { const deletedRange = new Range(cursor, length) @@ -205,15 +255,31 @@ class TrackedChangeList { } } this._trackedChanges = newTrackedChanges + } + + /** + * Apply a retain operation to the list of tracked changes + * + * @param {number} cursor + * @param {number} length + * @param {{tracking?: TrackingDirective}} [opts] + */ + applyRetain(cursor, length, opts = {}) { + this._applyRetain(cursor, length, opts) this._mergeRanges() } /** + * Apply a retain operation to the list of tracked changes + * + * This method will not merge ranges at the end + * + * @private * @param {number} cursor * @param {number} length * @param {{tracking?: TrackingDirective}} opts */ - applyRetain(cursor, length, opts = {}) { + _applyRetain(cursor, length, opts = {}) { // If there's no tracking info, leave everything as-is if (!opts.tracking) { return @@ -269,6 +335,31 @@ class TrackedChangeList { newTrackedChanges.push(newTrackedChange) } this._trackedChanges = newTrackedChanges + } + + /** + * Apply a text operation to the list of tracked changes + * + * Ranges are merged only once at the end, for performance and to avoid + * problematic edge cases where intermediate ranges get incorrectly merged. + * + * @param {TextOperation} operation + */ + applyTextOperation(operation) { + // this cursor tracks the destination document that gets modified as + // operations are applied to it. + let cursor = 0 + for (const op of operation.ops) { + if (op instanceof InsertOp) { + this._applyInsert(cursor, op.insertion, { tracking: op.tracking }) + cursor += op.insertion.length + } else if (op instanceof RemoveOp) { + this._applyDelete(cursor, op.length) + } else if (op instanceof RetainOp) { + this._applyRetain(cursor, op.length, { tracking: op.tracking }) + cursor += op.length + } + } this._mergeRanges() } } diff --git a/libraries/overleaf-editor-core/lib/file_data/tracking_props.js b/libraries/overleaf-editor-core/lib/file_data/tracking_props.js index 75ec95c566..82d731a232 100644 --- a/libraries/overleaf-editor-core/lib/file_data/tracking_props.js +++ b/libraries/overleaf-editor-core/lib/file_data/tracking_props.js @@ -62,6 +62,35 @@ class TrackingProps { this.ts.getTime() === other.ts.getTime() ) } + + /** + * Are these tracking props compatible with the other tracking props for merging + * ranges? + * + * @param {TrackingDirective} other + * @returns {other is TrackingProps} + */ + canMergeWith(other) { + if (!(other instanceof TrackingProps)) { + return false + } + return this.type === other.type && this.userId === other.userId + } + + /** + * Merge two tracking props + * + * Assumes that `canMerge(other)` returns true + * + * @param {TrackingDirective} other + */ + mergeWith(other) { + if (!this.canMergeWith(other)) { + throw new Error('Cannot merge with incompatible tracking props') + } + const ts = this.ts <= other.ts ? this.ts : other.ts + return new TrackingProps(this.type, this.userId, ts) + } } module.exports = TrackingProps diff --git a/libraries/overleaf-editor-core/lib/operation/scan_op.js b/libraries/overleaf-editor-core/lib/operation/scan_op.js index 4f179f24b4..fd322459cc 100644 --- a/libraries/overleaf-editor-core/lib/operation/scan_op.js +++ b/libraries/overleaf-editor-core/lib/operation/scan_op.js @@ -175,7 +175,7 @@ class InsertOp extends ScanOp { return false } if (this.tracking) { - if (!this.tracking.equals(other.tracking)) { + if (!other.tracking || !this.tracking.canMergeWith(other.tracking)) { return false } } else if (other.tracking) { @@ -198,7 +198,10 @@ class InsertOp extends ScanOp { throw new Error('Cannot merge with incompatible operation') } this.insertion += other.insertion - // We already have the same tracking info and commentIds + if (this.tracking != null && other.tracking != null) { + this.tracking = this.tracking.mergeWith(other.tracking) + } + // We already have the same commentIds } /** @@ -306,9 +309,13 @@ class RetainOp extends ScanOp { return false } if (this.tracking) { - return this.tracking.equals(other.tracking) + if (!other.tracking || !this.tracking.canMergeWith(other.tracking)) { + return false + } + } else if (other.tracking) { + return false } - return !other.tracking + return true } /** @@ -319,6 +326,9 @@ class RetainOp extends ScanOp { throw new Error('Cannot merge with incompatible operation') } this.length += other.length + if (this.tracking != null && other.tracking != null) { + this.tracking = this.tracking.mergeWith(other.tracking) + } } /** diff --git a/libraries/overleaf-editor-core/lib/operation/text_operation.js b/libraries/overleaf-editor-core/lib/operation/text_operation.js index 148570fa42..61c7f124b4 100644 --- a/libraries/overleaf-editor-core/lib/operation/text_operation.js +++ b/libraries/overleaf-editor-core/lib/operation/text_operation.js @@ -314,25 +314,18 @@ class TextOperation extends EditOperation { str ) } - file.trackedChanges.applyRetain(result.length, op.length, { - tracking: op.tracking, - }) result += str.slice(inputCursor, inputCursor + op.length) inputCursor += op.length } else if (op instanceof InsertOp) { if (containsNonBmpChars(op.insertion)) { throw new InvalidInsertionError(str, op.toJSON()) } - file.trackedChanges.applyInsert(result.length, op.insertion, { - tracking: op.tracking, - }) file.comments.applyInsert( new Range(result.length, op.insertion.length), { commentIds: op.commentIds } ) result += op.insertion } else if (op instanceof RemoveOp) { - file.trackedChanges.applyDelete(result.length, op.length) file.comments.applyDelete(new Range(result.length, op.length)) inputCursor += op.length } else { @@ -352,6 +345,8 @@ class TextOperation extends EditOperation { throw new TextOperation.TooLongError(operation, result.length) } + file.trackedChanges.applyTextOperation(this) + file.content = result } @@ -400,44 +395,36 @@ class TextOperation extends EditOperation { for (let i = 0, l = ops.length; i < l; i++) { const op = ops[i] if (op instanceof RetainOp) { - // Where we need to end up after the retains - const target = strIndex + op.length - // A previous retain could have overriden some tracking info. Now we - // need to restore it. - const previousRanges = previousState.trackedChanges.inRange( - new Range(strIndex, op.length) - ) - - let removeTrackingInfoIfNeeded if (op.tracking) { - removeTrackingInfoIfNeeded = new ClearTrackingProps() - } + // Where we need to end up after the retains + const target = strIndex + op.length + // A previous retain could have overriden some tracking info. Now we + // need to restore it. + const previousChanges = previousState.trackedChanges.intersectRange( + new Range(strIndex, op.length) + ) - for (const trackedChange of previousRanges) { - if (strIndex < trackedChange.range.start) { - inverse.retain(trackedChange.range.start - strIndex, { - tracking: removeTrackingInfoIfNeeded, + for (const change of previousChanges) { + if (strIndex < change.range.start) { + inverse.retain(change.range.start - strIndex, { + tracking: new ClearTrackingProps(), + }) + strIndex = change.range.start + } + inverse.retain(change.range.length, { + tracking: change.tracking, }) - strIndex = trackedChange.range.start + strIndex += change.range.length } - if (trackedChange.range.end < strIndex + op.length) { - inverse.retain(trackedChange.range.length, { - tracking: trackedChange.tracking, + if (strIndex < target) { + inverse.retain(target - strIndex, { + tracking: new ClearTrackingProps(), }) - strIndex = trackedChange.range.end + strIndex = target } - if (trackedChange.range.end !== strIndex) { - // No need to split the range at the end - const [left] = trackedChange.range.splitAt(strIndex) - inverse.retain(left.length, { tracking: trackedChange.tracking }) - strIndex = left.end - } - } - if (strIndex < target) { - inverse.retain(target - strIndex, { - tracking: removeTrackingInfoIfNeeded, - }) - strIndex = target + } else { + inverse.retain(op.length) + strIndex += op.length } } else if (op instanceof InsertOp) { inverse.remove(op.insertion.length) diff --git a/libraries/overleaf-editor-core/lib/range.js b/libraries/overleaf-editor-core/lib/range.js index bc47632f92..b3fb2bd78b 100644 --- a/libraries/overleaf-editor-core/lib/range.js +++ b/libraries/overleaf-editor-core/lib/range.js @@ -86,10 +86,32 @@ class Range { } /** - * @param {Range} range + * Does this range overlap another range? + * + * Overlapping means that the two ranges have at least one character in common + * + * @param {Range} other - the other range */ - overlaps(range) { - return this.start < range.end && this.end > range.start + overlaps(other) { + return this.start < other.end && this.end > other.start + } + + /** + * Does this range overlap the start of another range? + * + * @param {Range} other - the other range + */ + overlapsStart(other) { + return this.start <= other.start && this.end > other.start + } + + /** + * Does this range overlap the end of another range? + * + * @param {Range} other - the other range + */ + overlapsEnd(other) { + return this.start < other.end && this.end >= other.end } /** @@ -227,6 +249,26 @@ class Range { ) return [rangeUpToCursor, rangeAfterCursor] } + + /** + * Returns the intersection of this range with another range + * + * @param {Range} other - the other range + * @return {Range | null} the intersection or null if the intersection is empty + */ + intersect(other) { + if (this.contains(other)) { + return other + } else if (other.contains(this)) { + return this + } else if (other.overlapsStart(this)) { + return new Range(this.pos, other.end - this.start) + } else if (other.overlapsEnd(this)) { + return new Range(other.pos, this.end - other.start) + } else { + return null + } + } } module.exports = Range diff --git a/libraries/overleaf-editor-core/test/lazy_string_file_data.test.js b/libraries/overleaf-editor-core/test/lazy_string_file_data.test.js index 4c9f4aa497..946e6cd5d1 100644 --- a/libraries/overleaf-editor-core/test/lazy_string_file_data.test.js +++ b/libraries/overleaf-editor-core/test/lazy_string_file_data.test.js @@ -193,4 +193,13 @@ describe('LazyStringFileData', function () { expect(fileData.getStringLength()).to.equal(longString.length) expect(fileData.getOperations()).to.have.length(1) }) + + it('truncates its operations after being stored', async function () { + const testHash = File.EMPTY_FILE_HASH + const fileData = new LazyStringFileData(testHash, undefined, 0) + fileData.edit(new TextOperation().insert('abc')) + const stored = await fileData.store(this.blobStore) + expect(fileData.hash).to.equal(stored.hash) + expect(fileData.operations).to.deep.equal([]) + }) }) diff --git a/libraries/overleaf-editor-core/test/range.test.js b/libraries/overleaf-editor-core/test/range.test.js index daad8fd6ed..9a048d5c03 100644 --- a/libraries/overleaf-editor-core/test/range.test.js +++ b/libraries/overleaf-editor-core/test/range.test.js @@ -1,4 +1,3 @@ -// @ts-check 'use strict' const { expect } = require('chai') @@ -449,4 +448,44 @@ describe('Range', function () { expect(() => range.insertAt(16, 3)).to.throw() }) }) + + describe('intersect', function () { + it('should handle partially overlapping ranges', function () { + const range1 = new Range(5, 10) + const range2 = new Range(3, 6) + const intersection1 = range1.intersect(range2) + expect(intersection1.pos).to.equal(5) + expect(intersection1.length).to.equal(4) + const intersection2 = range2.intersect(range1) + expect(intersection2.pos).to.equal(5) + expect(intersection2.length).to.equal(4) + }) + + it('should intersect with itself', function () { + const range = new Range(5, 10) + const intersection = range.intersect(range) + expect(intersection.pos).to.equal(5) + expect(intersection.length).to.equal(10) + }) + + it('should handle nested ranges', function () { + const range1 = new Range(5, 10) + const range2 = new Range(7, 2) + const intersection1 = range1.intersect(range2) + expect(intersection1.pos).to.equal(7) + expect(intersection1.length).to.equal(2) + const intersection2 = range2.intersect(range1) + expect(intersection2.pos).to.equal(7) + expect(intersection2.length).to.equal(2) + }) + + it('should handle disconnected ranges', function () { + const range1 = new Range(5, 10) + const range2 = new Range(20, 30) + const intersection1 = range1.intersect(range2) + expect(intersection1).to.be.null + const intersection2 = range2.intersect(range1) + expect(intersection2).to.be.null + }) + }) }) diff --git a/libraries/overleaf-editor-core/test/scan_op.test.js b/libraries/overleaf-editor-core/test/scan_op.test.js index 80ab69114e..98f4834d48 100644 --- a/libraries/overleaf-editor-core/test/scan_op.test.js +++ b/libraries/overleaf-editor-core/test/scan_op.test.js @@ -107,7 +107,7 @@ describe('RetainOp', function () { expect(op1.equals(new RetainOp(3))).to.be.true }) - it('cannot merge with another RetainOp if tracking info is different', function () { + it('cannot merge with another RetainOp if the tracking user is different', function () { const op1 = new RetainOp( 4, new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z')) @@ -120,14 +120,14 @@ describe('RetainOp', function () { expect(() => op1.mergeWith(op2)).to.throw(Error) }) - it('can merge with another RetainOp if tracking info is the same', function () { + it('can merge with another RetainOp if the tracking user is the same', function () { const op1 = new RetainOp( 4, new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z')) ) const op2 = new RetainOp( 4, - new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z')) + new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:01.000Z')) ) op1.mergeWith(op2) expect( @@ -310,7 +310,7 @@ describe('InsertOp', function () { expect(() => op1.mergeWith(op2)).to.throw(Error) }) - it('cannot merge with another InsertOp if tracking info is different', function () { + it('cannot merge with another InsertOp if tracking user is different', function () { const op1 = new InsertOp( 'a', new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z')) @@ -323,7 +323,7 @@ describe('InsertOp', function () { expect(() => op1.mergeWith(op2)).to.throw(Error) }) - it('can merge with another InsertOp if tracking and comment info is the same', function () { + it('can merge with another InsertOp if tracking user and comment info is the same', function () { const op1 = new InsertOp( 'a', new TrackingProps( @@ -338,7 +338,7 @@ describe('InsertOp', function () { new TrackingProps( 'insert', 'user1', - new Date('2024-01-01T00:00:00.000Z') + new Date('2024-01-01T00:00:01.000Z') ), ['1', '2'] ) diff --git a/libraries/overleaf-editor-core/test/text_operation.test.js b/libraries/overleaf-editor-core/test/text_operation.test.js index fa9bc62dc3..43b8c707a6 100644 --- a/libraries/overleaf-editor-core/test/text_operation.test.js +++ b/libraries/overleaf-editor-core/test/text_operation.test.js @@ -322,6 +322,47 @@ describe('TextOperation', function () { new TextOperation().retain(4).remove(4).retain(3) ) }) + + it('undoing a tracked delete restores the tracked changes', function () { + expectInverseToLeadToInitialState( + new StringFileData( + 'the quick brown fox jumps over the lazy dog', + undefined, + [ + { + range: { pos: 5, length: 5 }, + tracking: { + ts: '2023-01-01T00:00:00.000Z', + type: 'insert', + userId: 'user1', + }, + }, + { + range: { pos: 12, length: 3 }, + tracking: { + ts: '2023-01-01T00:00:00.000Z', + type: 'delete', + userId: 'user1', + }, + }, + { + range: { pos: 18, length: 5 }, + tracking: { + ts: '2023-01-01T00:00:00.000Z', + type: 'insert', + userId: 'user1', + }, + }, + ] + ), + new TextOperation() + .retain(7) + .retain(13, { + tracking: new TrackingProps('delete', 'user1', new Date()), + }) + .retain(23) + ) + }) }) describe('compose', function () { diff --git a/libraries/promise-utils/buildscript.txt b/libraries/promise-utils/buildscript.txt index 73dec381c1..32c9fc8793 100644 --- a/libraries/promise-utils/buildscript.txt +++ b/libraries/promise-utils/buildscript.txt @@ -1,6 +1,6 @@ promise-utils --dependencies=None ---docker-repos=gcr.io/overleaf-ops +--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker --env-add= --env-pass-through= --esmock-loader=False diff --git a/libraries/ranges-tracker/buildscript.txt b/libraries/ranges-tracker/buildscript.txt index 6276182679..be28fc1d80 100644 --- a/libraries/ranges-tracker/buildscript.txt +++ b/libraries/ranges-tracker/buildscript.txt @@ -1,6 +1,6 @@ ranges-tracker --dependencies=None ---docker-repos=gcr.io/overleaf-ops +--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker --env-add= --env-pass-through= --esmock-loader=False diff --git a/libraries/redis-wrapper/buildscript.txt b/libraries/redis-wrapper/buildscript.txt index 1e4489a655..395bc706ac 100644 --- a/libraries/redis-wrapper/buildscript.txt +++ b/libraries/redis-wrapper/buildscript.txt @@ -1,6 +1,6 @@ redis-wrapper --dependencies=None ---docker-repos=gcr.io/overleaf-ops +--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker --env-add= --env-pass-through= --esmock-loader=False diff --git a/libraries/settings/buildscript.txt b/libraries/settings/buildscript.txt index 925234f561..d4daff96d5 100644 --- a/libraries/settings/buildscript.txt +++ b/libraries/settings/buildscript.txt @@ -1,6 +1,6 @@ settings --dependencies=None ---docker-repos=gcr.io/overleaf-ops +--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker --env-add= --env-pass-through= --esmock-loader=False diff --git a/libraries/stream-utils/buildscript.txt b/libraries/stream-utils/buildscript.txt index a04310e77f..1da6bdade9 100644 --- a/libraries/stream-utils/buildscript.txt +++ b/libraries/stream-utils/buildscript.txt @@ -1,6 +1,6 @@ stream-utils --dependencies=None ---docker-repos=gcr.io/overleaf-ops +--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker --env-add= --env-pass-through= --esmock-loader=False diff --git a/package-lock.json b/package-lock.json index 4a14efb544..2a3bb7696d 100644 --- a/package-lock.json +++ b/package-lock.json @@ -5943,15 +5943,16 @@ "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" }, "node_modules/@grpc/grpc-js": { - "version": "1.8.22", - "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.8.22.tgz", - "integrity": "sha512-oAjDdN7fzbUi+4hZjKG96MR6KTEubAeMpQEb+77qy+3r0Ua5xTFuie6JOLr4ZZgl5g+W5/uRTS2M1V8mVAFPuA==", + "version": "1.13.4", + "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.13.4.tgz", + "integrity": "sha512-GsFaMXCkMqkKIvwCQjCrwH+GHbPKBjhwo/8ZuUkWHqbI73Kky9I+pQltrlT0+MWpedCoosda53lgjYfyEPgxBg==", + "license": "Apache-2.0", "dependencies": { - "@grpc/proto-loader": "^0.7.0", - "@types/node": ">=12.12.47" + "@grpc/proto-loader": "^0.7.13", + "@js-sdsl/ordered-map": "^4.4.2" }, "engines": { - "node": "^8.13.0 || >=10.10.0" + "node": ">=12.10.0" } }, "node_modules/@grpc/proto-loader": { @@ -6989,6 +6990,18 @@ "dev": true, "optional": true }, + "node_modules/@noble/hashes": { + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/@noble/hashes/-/hashes-1.8.0.tgz", + "integrity": "sha512-jCs9ldd7NwzpgXDIf6P3+NrHh9/sD6CQdxHyjQI+h/6rDNo88ypBxxz45UDuZHz9r3tNz7N/VInSVoVdtXEI4A==", + "license": "MIT", + "engines": { + "node": "^14.21.3 || >=16" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + } + }, "node_modules/@node-oauth/formats": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/@node-oauth/formats/-/formats-1.0.0.tgz", @@ -8643,6 +8656,15 @@ "resolved": "services/web", "link": true }, + "node_modules/@paralleldrive/cuid2": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/@paralleldrive/cuid2/-/cuid2-2.2.2.tgz", + "integrity": "sha512-ZOBkgDwEdoYVlSeRbYYXs0S9MejQofiVYoTbKzy/6GQa39/q5tQU2IX46+shYnUkpEl3wc+J6wRlar7r2EK2xA==", + "license": "MIT", + "dependencies": { + "@noble/hashes": "^1.1.5" + } + }, "node_modules/@phosphor-icons/react": { "version": "2.1.7", "resolved": "https://registry.npmjs.org/@phosphor-icons/react/-/react-2.1.7.tgz", @@ -11575,29 +11597,6 @@ "storybook": "^8.2.0 || ^8.3.0-0 || ^8.4.0-0 || ^8.5.0-0 || ^8.6.0-0" } }, - "node_modules/@stripe/react-stripe-js": { - "version": "3.5.0", - "resolved": "https://registry.npmjs.org/@stripe/react-stripe-js/-/react-stripe-js-3.5.0.tgz", - "integrity": "sha512-oo5J2SNbuAUjE9XmQv/SOD7vgZCa1Y9OcZyRAfvQPkyrDrru35sg5c64ANdHEmOWUibism3+25rKdARSw3HOfA==", - "license": "MIT", - "dependencies": { - "prop-types": "^15.7.2" - }, - "peerDependencies": { - "@stripe/stripe-js": ">=1.44.1 <7.0.0", - "react": ">=16.8.0 <20.0.0", - "react-dom": ">=16.8.0 <20.0.0" - } - }, - "node_modules/@stripe/stripe-js": { - "version": "5.10.0", - "resolved": "https://registry.npmjs.org/@stripe/stripe-js/-/stripe-js-5.10.0.tgz", - "integrity": "sha512-PTigkxMdMUP6B5ISS7jMqJAKhgrhZwjprDqR1eATtFfh0OpKVNp110xiH+goeVdrJ29/4LeZJR4FaHHWstsu0A==", - "license": "MIT", - "engines": { - "node": ">=12.16" - } - }, "node_modules/@swc/helpers": { "version": "0.5.17", "resolved": "https://registry.npmjs.org/@swc/helpers/-/helpers-0.5.17.tgz", @@ -15252,13 +15251,13 @@ } }, "node_modules/array-buffer-byte-length": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.1.tgz", - "integrity": "sha512-ahC5W1xgou+KTXix4sAO8Ki12Q+jf4i0+tmk3sC+zgcynshkHxzpXdImBehiUYKKKDwvfFiJl1tZt6ewscS1Mg==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.2.tgz", + "integrity": "sha512-LHE+8BuR7RYGDKvnrmcuSq3tDcKv9OFEXQt/HpbZhY7V6h0zlUXutnAD82GiFx9rdieCMjkvtcsPqBwgUl1Iiw==", "license": "MIT", "dependencies": { - "call-bind": "^1.0.5", - "is-array-buffer": "^3.0.4" + "call-bound": "^1.0.3", + "is-array-buffer": "^3.0.5" }, "engines": { "node": ">= 0.4" @@ -15374,19 +15373,18 @@ } }, "node_modules/arraybuffer.prototype.slice": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.3.tgz", - "integrity": "sha512-bMxMKAjg13EBSVscxTaYA4mRc5t1UAXa2kXiGTNfZ079HIWXEkKmkgFrh/nJqamaLSrXO5H4WFFkPEaLJWbs3A==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.4.tgz", + "integrity": "sha512-BNoCY6SXXPQ7gF2opIP4GBE+Xw7U+pHMYKuzjgCN3GwiaIR09UUeKfheyIry77QtrCBlC0KK0q5/TER/tYh3PQ==", "license": "MIT", "dependencies": { "array-buffer-byte-length": "^1.0.1", - "call-bind": "^1.0.5", + "call-bind": "^1.0.8", "define-properties": "^1.2.1", - "es-abstract": "^1.22.3", - "es-errors": "^1.2.1", - "get-intrinsic": "^1.2.3", - "is-array-buffer": "^3.0.4", - "is-shared-array-buffer": "^1.0.2" + "es-abstract": "^1.23.5", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.6", + "is-array-buffer": "^3.0.4" }, "engines": { "node": ">= 0.4" @@ -15480,6 +15478,15 @@ "resolved": "https://registry.npmjs.org/async/-/async-3.2.5.tgz", "integrity": "sha512-baNZyqaaLhyLVKm/DlvdW051MSgO6b8eVfIezl9E5PqWxFgzLm/wQntEW4zOytVburDEr0JlALEpdOFwvErLsg==" }, + "node_modules/async-function": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/async-function/-/async-function-1.0.0.tgz", + "integrity": "sha512-hsU18Ae8CDTR6Kgu9DYf0EbCr/a5iGL0rytQDobUcdpYOKokk8LEjVphnXkDkgpi0wYVsqrXuP0bZxJaTqdgoA==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, "node_modules/async-lock": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/async-lock/-/async-lock-1.4.1.tgz", @@ -16049,24 +16056,32 @@ "optional": true }, "node_modules/bare-fs": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/bare-fs/-/bare-fs-4.0.1.tgz", - "integrity": "sha512-ilQs4fm/l9eMfWY2dY0WCIUplSUp7U0CT1vrqMg1MUdeZl4fypu5UP0XcDBK5WBQPJAKP1b7XEodISmekH/CEg==", + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/bare-fs/-/bare-fs-4.1.5.tgz", + "integrity": "sha512-1zccWBMypln0jEE05LzZt+V/8y8AQsQQqxtklqaIyg5nu6OAYFhZxPXinJTSG+kU5qyNmeLgcn9AW7eHiCHVLA==", "license": "Apache-2.0", "optional": true, "dependencies": { - "bare-events": "^2.0.0", + "bare-events": "^2.5.4", "bare-path": "^3.0.0", - "bare-stream": "^2.0.0" + "bare-stream": "^2.6.4" }, "engines": { - "bare": ">=1.7.0" + "bare": ">=1.16.0" + }, + "peerDependencies": { + "bare-buffer": "*" + }, + "peerDependenciesMeta": { + "bare-buffer": { + "optional": true + } } }, "node_modules/bare-os": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/bare-os/-/bare-os-3.6.0.tgz", - "integrity": "sha512-BUrFS5TqSBdA0LwHop4OjPJwisqxGy6JsWVqV6qaFoe965qqtaKfDzHY5T2YA1gUL0ZeeQeA+4BBc1FJTcHiPw==", + "version": "3.6.1", + "resolved": "https://registry.npmjs.org/bare-os/-/bare-os-3.6.1.tgz", + "integrity": "sha512-uaIjxokhFidJP+bmmvKSgiMzj2sV5GPHaZVAIktcxcpCyBFFWO+YlikVAdhmUo2vYFvFhOXIAlldqV29L8126g==", "license": "Apache-2.0", "optional": true, "engines": { @@ -16948,15 +16963,44 @@ } }, "node_modules/call-bind": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.7.tgz", - "integrity": "sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==", + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.8.tgz", + "integrity": "sha512-oKlSFMcMwpUg2ednkhQ454wfWiU/ul3CkJe/PEHcTKuiX6RpbehUiFMXu13HalGZxfUwCQzZG747YXBn1im9ww==", + "license": "MIT", "dependencies": { + "call-bind-apply-helpers": "^1.0.0", "es-define-property": "^1.0.0", - "es-errors": "^1.3.0", - "function-bind": "^1.1.2", "get-intrinsic": "^1.2.4", - "set-function-length": "^1.2.1" + "set-function-length": "^1.2.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/call-bind-apply-helpers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", + "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/call-bound": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.4.tgz", + "integrity": "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "get-intrinsic": "^1.3.0" }, "engines": { "node": ">= 0.4" @@ -17445,7 +17489,8 @@ "node_modules/chownr": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.4.tgz", - "integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==" + "integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==", + "license": "ISC" }, "node_modules/chrome-trace-event": { "version": "1.0.3", @@ -17803,12 +17848,10 @@ "license": "MIT" }, "node_modules/commander": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/commander/-/commander-4.1.1.tgz", - "integrity": "sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==", - "engines": { - "node": ">= 6" - } + "version": "2.11.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.11.0.tgz", + "integrity": "sha512-b0553uYA5YAEGgyYIGYROzKQ7X5RAqedkfjiZxwi0kL1g3bOaBNNZfYkzt/CL0umgD5wc9Jec2FbB98CjkMRvQ==", + "license": "MIT" }, "node_modules/common-path-prefix": { "version": "3.0.0", @@ -17923,46 +17966,20 @@ "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=" }, "node_modules/concat-stream": { - "version": "1.6.2", - "resolved": "https://registry.npmjs.org/concat-stream/-/concat-stream-1.6.2.tgz", - "integrity": "sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/concat-stream/-/concat-stream-2.0.0.tgz", + "integrity": "sha512-MWufYdFw53ccGjCA+Ol7XJYpAlW6/prSMzuPOTRnJGcGzuhLn4Scrz7qf6o8bROZ514ltazcIFJZevcfbo0x7A==", "engines": [ - "node >= 0.8" + "node >= 6.0" ], + "license": "MIT", "dependencies": { "buffer-from": "^1.0.0", "inherits": "^2.0.3", - "readable-stream": "^2.2.2", + "readable-stream": "^3.0.2", "typedarray": "^0.0.6" } }, - "node_modules/concat-stream/node_modules/isarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", - "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==" - }, - "node_modules/concat-stream/node_modules/readable-stream": { - "version": "2.3.8", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", - "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", - "dependencies": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - } - }, - "node_modules/concat-stream/node_modules/string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", - "dependencies": { - "safe-buffer": "~5.1.0" - } - }, "node_modules/confbox": { "version": "0.1.8", "resolved": "https://registry.npmjs.org/confbox/-/confbox-0.1.8.tgz", @@ -18408,6 +18425,20 @@ "node": ">=10" } }, + "node_modules/cpu-features": { + "version": "0.0.10", + "resolved": "https://registry.npmjs.org/cpu-features/-/cpu-features-0.0.10.tgz", + "integrity": "sha512-9IkYqtX3YHPCzoVg1Py+o9057a3i0fp7S530UWokCSaFVTc7CwXPRiOjRjBQQ18ZCNafx78YfnG+HALxtVmOGA==", + "hasInstallScript": true, + "optional": true, + "dependencies": { + "buildcheck": "~0.0.6", + "nan": "^2.19.0" + }, + "engines": { + "node": ">=10.0.0" + } + }, "node_modules/crc-32": { "version": "1.2.2", "resolved": "https://registry.npmjs.org/crc-32/-/crc-32-1.2.2.tgz", @@ -19453,14 +19484,14 @@ } }, "node_modules/data-view-buffer": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/data-view-buffer/-/data-view-buffer-1.0.1.tgz", - "integrity": "sha512-0lht7OugA5x3iJLOWFhWK/5ehONdprk0ISXqVFn/NFrDu+cuc8iADFrGQz5BnRK7LLU3JmkbXSxaqX+/mXYtUA==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/data-view-buffer/-/data-view-buffer-1.0.2.tgz", + "integrity": "sha512-EmKO5V3OLXh1rtK2wgXRansaK1/mtVdTUEiEI0W8RkvgT05kfxaH29PliLnpLP73yYO6142Q72QNa8Wx/A5CqQ==", "license": "MIT", "dependencies": { - "call-bind": "^1.0.6", + "call-bound": "^1.0.3", "es-errors": "^1.3.0", - "is-data-view": "^1.0.1" + "is-data-view": "^1.0.2" }, "engines": { "node": ">= 0.4" @@ -19470,29 +19501,29 @@ } }, "node_modules/data-view-byte-length": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/data-view-byte-length/-/data-view-byte-length-1.0.1.tgz", - "integrity": "sha512-4J7wRJD3ABAzr8wP+OcIcqq2dlUKp4DVflx++hs5h5ZKydWMI6/D/fAot+yh6g2tHh8fLFTvNOaVN357NvSrOQ==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/data-view-byte-length/-/data-view-byte-length-1.0.2.tgz", + "integrity": "sha512-tuhGbE6CfTM9+5ANGf+oQb72Ky/0+s3xKUpHvShfiz2RxMFgFPjsXuRLBVMtvMs15awe45SRb83D6wH4ew6wlQ==", "license": "MIT", "dependencies": { - "call-bind": "^1.0.7", + "call-bound": "^1.0.3", "es-errors": "^1.3.0", - "is-data-view": "^1.0.1" + "is-data-view": "^1.0.2" }, "engines": { "node": ">= 0.4" }, "funding": { - "url": "https://github.com/sponsors/ljharb" + "url": "https://github.com/sponsors/inspect-js" } }, "node_modules/data-view-byte-offset": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/data-view-byte-offset/-/data-view-byte-offset-1.0.0.tgz", - "integrity": "sha512-t/Ygsytq+R995EJ5PZlD4Cu56sWa8InXySaViRzw9apusqsOO2bQP+SbYzAhR0pFKoB+43lYy8rWban9JSuXnA==", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/data-view-byte-offset/-/data-view-byte-offset-1.0.1.tgz", + "integrity": "sha512-BS8PfmtDGnrgYdOonGZQdLZslWIeCGFP9tpan0hi1Co2Zr2NKADsvGYA8XxuG/4UWgJ6Cjtv+YJnB6MM69QGlQ==", "license": "MIT", "dependencies": { - "call-bind": "^1.0.6", + "call-bound": "^1.0.2", "es-errors": "^1.3.0", "is-data-view": "^1.0.1" }, @@ -19903,7 +19934,6 @@ "version": "1.0.4", "resolved": "https://registry.npmjs.org/dezalgo/-/dezalgo-1.0.4.tgz", "integrity": "sha512-rXSP0bf+5n0Qonsb+SVVfNfIsimO4HEtmnIpPHY8Q1UCzKlQrDMfdobr8nJOOsRgWCyMRqeSBQzmWUMq7zvVig==", - "dev": true, "dependencies": { "asap": "^2.0.0", "wrappy": "1" @@ -19975,6 +20005,88 @@ "node": ">=6" } }, + "node_modules/docker-modem": { + "version": "5.0.6", + "resolved": "https://registry.npmjs.org/docker-modem/-/docker-modem-5.0.6.tgz", + "integrity": "sha512-ens7BiayssQz/uAxGzH8zGXCtiV24rRWXdjNha5V4zSOcxmAZsfGVm/PPFbwQdqEkDnhG+SyR9E3zSHUbOKXBQ==", + "license": "Apache-2.0", + "dependencies": { + "debug": "^4.1.1", + "readable-stream": "^3.5.0", + "split-ca": "^1.0.1", + "ssh2": "^1.15.0" + }, + "engines": { + "node": ">= 8.0" + } + }, + "node_modules/dockerode": { + "version": "4.0.7", + "resolved": "https://registry.npmjs.org/dockerode/-/dockerode-4.0.7.tgz", + "integrity": "sha512-R+rgrSRTRdU5mH14PZTCPZtW/zw3HDWNTS/1ZAQpL/5Upe/ye5K9WQkIysu4wBoiMwKynsz0a8qWuGsHgEvSAA==", + "license": "Apache-2.0", + "dependencies": { + "@balena/dockerignore": "^1.0.2", + "@grpc/grpc-js": "^1.11.1", + "@grpc/proto-loader": "^0.7.13", + "docker-modem": "^5.0.6", + "protobufjs": "^7.3.2", + "tar-fs": "~2.1.2", + "uuid": "^10.0.0" + }, + "engines": { + "node": ">= 8.0" + } + }, + "node_modules/dockerode/node_modules/protobufjs": { + "version": "7.5.3", + "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-7.5.3.tgz", + "integrity": "sha512-sildjKwVqOI2kmFDiXQ6aEB0fjYTafpEvIBs8tOR8qI4spuL9OPROLVu2qZqi/xgCfsHIwVqlaF8JBjWFHnKbw==", + "hasInstallScript": true, + "license": "BSD-3-Clause", + "dependencies": { + "@protobufjs/aspromise": "^1.1.2", + "@protobufjs/base64": "^1.1.2", + "@protobufjs/codegen": "^2.0.4", + "@protobufjs/eventemitter": "^1.1.0", + "@protobufjs/fetch": "^1.1.0", + "@protobufjs/float": "^1.0.2", + "@protobufjs/inquire": "^1.1.0", + "@protobufjs/path": "^1.1.2", + "@protobufjs/pool": "^1.1.0", + "@protobufjs/utf8": "^1.1.0", + "@types/node": ">=13.7.0", + "long": "^5.0.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/dockerode/node_modules/tar-fs": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.3.tgz", + "integrity": "sha512-090nwYJDmlhwFwEW3QQl+vaNnxsO2yVsd45eTKRBzSzu+hlb1w2K9inVq5b0ngXuLVqQ4ApvsUHHnu/zQNkWAg==", + "license": "MIT", + "dependencies": { + "chownr": "^1.1.1", + "mkdirp-classic": "^0.5.2", + "pump": "^3.0.0", + "tar-stream": "^2.1.4" + } + }, + "node_modules/dockerode/node_modules/uuid": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-10.0.0.tgz", + "integrity": "sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ==", + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "license": "MIT", + "bin": { + "uuid": "dist/bin/uuid" + } + }, "node_modules/doctrine": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", @@ -20202,6 +20314,20 @@ "node": ">=0.10" } }, + "node_modules/dunder-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", + "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/duplexify": { "version": "4.1.3", "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.3.tgz", @@ -20533,57 +20659,65 @@ } }, "node_modules/es-abstract": { - "version": "1.23.3", - "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.23.3.tgz", - "integrity": "sha512-e+HfNH61Bj1X9/jLc5v1owaLYuHdeHHSQlkhCBiTK8rBvKaULl/beGMxwrMXjpYrv4pz22BlY570vVePA2ho4A==", + "version": "1.24.0", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.24.0.tgz", + "integrity": "sha512-WSzPgsdLtTcQwm4CROfS5ju2Wa1QQcVeT37jFjYzdFz1r9ahadC8B8/a4qxJxM+09F18iumCdRmlr96ZYkQvEg==", "license": "MIT", "dependencies": { - "array-buffer-byte-length": "^1.0.1", - "arraybuffer.prototype.slice": "^1.0.3", + "array-buffer-byte-length": "^1.0.2", + "arraybuffer.prototype.slice": "^1.0.4", "available-typed-arrays": "^1.0.7", - "call-bind": "^1.0.7", - "data-view-buffer": "^1.0.1", - "data-view-byte-length": "^1.0.1", - "data-view-byte-offset": "^1.0.0", - "es-define-property": "^1.0.0", + "call-bind": "^1.0.8", + "call-bound": "^1.0.4", + "data-view-buffer": "^1.0.2", + "data-view-byte-length": "^1.0.2", + "data-view-byte-offset": "^1.0.1", + "es-define-property": "^1.0.1", "es-errors": "^1.3.0", - "es-object-atoms": "^1.0.0", - "es-set-tostringtag": "^2.0.3", - "es-to-primitive": "^1.2.1", - "function.prototype.name": "^1.1.6", - "get-intrinsic": "^1.2.4", - "get-symbol-description": "^1.0.2", - "globalthis": "^1.0.3", - "gopd": "^1.0.1", + "es-object-atoms": "^1.1.1", + "es-set-tostringtag": "^2.1.0", + "es-to-primitive": "^1.3.0", + "function.prototype.name": "^1.1.8", + "get-intrinsic": "^1.3.0", + "get-proto": "^1.0.1", + "get-symbol-description": "^1.1.0", + "globalthis": "^1.0.4", + "gopd": "^1.2.0", "has-property-descriptors": "^1.0.2", - "has-proto": "^1.0.3", - "has-symbols": "^1.0.3", + "has-proto": "^1.2.0", + "has-symbols": "^1.1.0", "hasown": "^2.0.2", - "internal-slot": "^1.0.7", - "is-array-buffer": "^3.0.4", + "internal-slot": "^1.1.0", + "is-array-buffer": "^3.0.5", "is-callable": "^1.2.7", - "is-data-view": "^1.0.1", + "is-data-view": "^1.0.2", "is-negative-zero": "^2.0.3", - "is-regex": "^1.1.4", - "is-shared-array-buffer": "^1.0.3", - "is-string": "^1.0.7", - "is-typed-array": "^1.1.13", - "is-weakref": "^1.0.2", - "object-inspect": "^1.13.1", + "is-regex": "^1.2.1", + "is-set": "^2.0.3", + "is-shared-array-buffer": "^1.0.4", + "is-string": "^1.1.1", + "is-typed-array": "^1.1.15", + "is-weakref": "^1.1.1", + "math-intrinsics": "^1.1.0", + "object-inspect": "^1.13.4", "object-keys": "^1.1.1", - "object.assign": "^4.1.5", - "regexp.prototype.flags": "^1.5.2", - "safe-array-concat": "^1.1.2", - "safe-regex-test": "^1.0.3", - "string.prototype.trim": "^1.2.9", - "string.prototype.trimend": "^1.0.8", + "object.assign": "^4.1.7", + "own-keys": "^1.0.1", + "regexp.prototype.flags": "^1.5.4", + "safe-array-concat": "^1.1.3", + "safe-push-apply": "^1.0.0", + "safe-regex-test": "^1.1.0", + "set-proto": "^1.0.0", + "stop-iteration-iterator": "^1.1.0", + "string.prototype.trim": "^1.2.10", + "string.prototype.trimend": "^1.0.9", "string.prototype.trimstart": "^1.0.8", - "typed-array-buffer": "^1.0.2", - "typed-array-byte-length": "^1.0.1", - "typed-array-byte-offset": "^1.0.2", - "typed-array-length": "^1.0.6", - "unbox-primitive": "^1.0.2", - "which-typed-array": "^1.1.15" + "typed-array-buffer": "^1.0.3", + "typed-array-byte-length": "^1.0.3", + "typed-array-byte-offset": "^1.0.4", + "typed-array-length": "^1.0.7", + "unbox-primitive": "^1.1.0", + "which-typed-array": "^1.1.19" }, "engines": { "node": ">= 0.4" @@ -20593,12 +20727,10 @@ } }, "node_modules/es-define-property": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.0.tgz", - "integrity": "sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==", - "dependencies": { - "get-intrinsic": "^1.2.4" - }, + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", + "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", + "license": "MIT", "engines": { "node": ">= 0.4" } @@ -20639,9 +20771,9 @@ "license": "MIT" }, "node_modules/es-object-atoms": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.0.0.tgz", - "integrity": "sha512-MZ4iQ6JwHOBQjahnjwaC1ZtIBH+2ohjamzAO3oaHcXYup7qxjF2fixyH+Q71voWHeOkI2q/TnJao/KfXYIZWbw==", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", + "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", "license": "MIT", "dependencies": { "es-errors": "^1.3.0" @@ -20651,14 +20783,15 @@ } }, "node_modules/es-set-tostringtag": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.0.3.tgz", - "integrity": "sha512-3T8uNMC3OQTHkFUsFq8r/BwAXLHvU/9O9mE0fBc/MY5iq/8H7ncvO947LmYA6ldWw9Uh8Yhf25zu6n7nML5QWQ==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz", + "integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==", "license": "MIT", "dependencies": { - "get-intrinsic": "^1.2.4", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.6", "has-tostringtag": "^1.0.2", - "hasown": "^2.0.1" + "hasown": "^2.0.2" }, "engines": { "node": ">= 0.4" @@ -20674,13 +20807,14 @@ } }, "node_modules/es-to-primitive": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", - "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.3.0.tgz", + "integrity": "sha512-w+5mJ3GuFL+NjVtJlvydShqE1eN3h3PbI7/5LAsYJP/2qtuMXjfL2LpHSRqo4b4eSF5K/DH1JXKUAHSB2UW50g==", + "license": "MIT", "dependencies": { - "is-callable": "^1.1.4", - "is-date-object": "^1.0.1", - "is-symbol": "^1.0.2" + "is-callable": "^1.2.7", + "is-date-object": "^1.0.5", + "is-symbol": "^1.0.4" }, "engines": { "node": ">= 0.4" @@ -22835,8 +22969,7 @@ "node_modules/fast-safe-stringify": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", - "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==", - "dev": true + "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==" }, "node_modules/fast-text-encoding": { "version": "1.0.3", @@ -23331,11 +23464,18 @@ } }, "node_modules/for-each": { - "version": "0.3.3", - "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.3.tgz", - "integrity": "sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw==", + "version": "0.3.5", + "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.5.tgz", + "integrity": "sha512-dKx12eRCVIzqCxFGplyFKJMPvLEWgmNtUrpTiJIR5u97zEhRG8ySrtboPHZXx7daLxQVrl643cTzbab2tkQjxg==", + "license": "MIT", "dependencies": { - "is-callable": "^1.1.3" + "is-callable": "^1.2.7" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, "node_modules/for-in": { @@ -23497,6 +23637,7 @@ "resolved": "https://registry.npmjs.org/formidable/-/formidable-1.2.6.tgz", "integrity": "sha512-KcpbcpuLNOwrEjnbpMC0gS+X8ciDoZE1kkqzat4a8vrprf+s9pKNQ/QIwWfbfs4ltgmFl3MD177SNTkve3BwGQ==", "deprecated": "Please upgrade to latest, formidable@v2 or formidable@v3! Check these notes: https://bit.ly/2ZEqIau", + "license": "MIT", "funding": { "url": "https://ko-fi.com/tunnckoCore/commissions" } @@ -23672,14 +23813,17 @@ } }, "node_modules/function.prototype.name": { - "version": "1.1.6", - "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.6.tgz", - "integrity": "sha512-Z5kx79swU5P27WEayXM1tBi5Ze/lbIyiNgU3qyXUOf9b2rgXYyF9Dy9Cx+IQv/Lc8WCG6L82zwUPpSS9hGehIg==", + "version": "1.1.8", + "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.8.tgz", + "integrity": "sha512-e5iwyodOHhbMr/yNrc7fDYG4qlbIvI5gajyzPnb5TCwyhjApznQh1BMFou9b30SevY43gCJKXycoCBjMbsuW0Q==", + "license": "MIT", "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1", - "functions-have-names": "^1.2.3" + "call-bind": "^1.0.8", + "call-bound": "^1.0.3", + "define-properties": "^1.2.1", + "functions-have-names": "^1.2.3", + "hasown": "^2.0.2", + "is-callable": "^1.2.7" }, "engines": { "node": ">= 0.4" @@ -23791,15 +23935,21 @@ } }, "node_modules/get-intrinsic": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.4.tgz", - "integrity": "sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==", + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", + "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", + "license": "MIT", "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "es-define-property": "^1.0.1", "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", "function-bind": "^1.1.2", - "has-proto": "^1.0.1", - "has-symbols": "^1.0.3", - "hasown": "^2.0.0" + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.1.0" }, "engines": { "node": ">= 0.4" @@ -23827,6 +23977,19 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/get-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", + "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", + "license": "MIT", + "dependencies": { + "dunder-proto": "^1.0.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/get-stream": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-5.2.0.tgz", @@ -23843,14 +24006,14 @@ } }, "node_modules/get-symbol-description": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.2.tgz", - "integrity": "sha512-g0QYk1dZBxGwk+Ngc+ltRH2IBp2f7zBkBMBJZCDerh6EhlhSR6+9irMCuT/09zD6qkarHUSn529sK/yL4S27mg==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.1.0.tgz", + "integrity": "sha512-w9UMqWwJxHNOvoNzSJ2oPF5wvYcvP7jUvYzhp67yEhTi17ZDBBC1z9pTdGuzjD+EFIqLSYRweZjqfiPzQ06Ebg==", "license": "MIT", "dependencies": { - "call-bind": "^1.0.5", + "call-bound": "^1.0.3", "es-errors": "^1.3.0", - "get-intrinsic": "^1.2.4" + "get-intrinsic": "^1.2.6" }, "engines": { "node": ">= 0.4" @@ -24070,11 +24233,13 @@ } }, "node_modules/globalthis": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/globalthis/-/globalthis-1.0.3.tgz", - "integrity": "sha512-sFdI5LyBiNTHjRd7cGPWapiHWMOXKyuBNX/cWJ3NfzrZQVa8GI/8cofCl74AOVqq9W5kNmguTIzJ/1s2gyI9wA==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/globalthis/-/globalthis-1.0.4.tgz", + "integrity": "sha512-DpLKbNU4WylpxJykQujfCcwYWiV/Jhm50Goo0wrVILAv5jOr9d+H+UR3PhSCD2rCCEIg0uc+G+muBTwD54JhDQ==", + "license": "MIT", "dependencies": { - "define-properties": "^1.1.3" + "define-properties": "^1.2.1", + "gopd": "^1.0.1" }, "engines": { "node": ">= 0.4" @@ -24621,11 +24786,12 @@ "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" }, "node_modules/gopd": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.0.1.tgz", - "integrity": "sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==", - "dependencies": { - "get-intrinsic": "^1.1.3" + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", + "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", + "license": "MIT", + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -24645,6 +24811,7 @@ "version": "2.1.8", "resolved": "https://registry.npmjs.org/graphlib/-/graphlib-2.1.8.tgz", "integrity": "sha512-jcLLfkpoVGmH7/InMC/1hIvOPSUh38oJtGhvrOFGzioE1DZ+0YW16RgmOJhHiuWTvGiJQ9Z1Ik43JvkRPRvE+A==", + "license": "MIT", "dependencies": { "lodash": "^4.17.15" } @@ -24865,10 +25032,13 @@ } }, "node_modules/has-proto": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.3.tgz", - "integrity": "sha512-SJ1amZAJUiZS+PhsVLf5tGydlaVB8EdFpaSO4gmiUKUOxk8qzn5AIy4ZeJUmh22znIdk/uMAUT2pl3FxzVUH+Q==", + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.2.0.tgz", + "integrity": "sha512-KIL7eQPfHQRC8+XluaIw7BHUwwqL19bQn4hzNgdr+1wXoU0KKj6rufu47lhY7KbJR2C6T6+PfyN0Ea7wkSS+qQ==", "license": "MIT", + "dependencies": { + "dunder-proto": "^1.0.0" + }, "engines": { "node": ">= 0.4" }, @@ -24877,9 +25047,10 @@ } }, "node_modules/has-symbols": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", - "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", + "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", + "license": "MIT", "engines": { "node": ">= 0.4" }, @@ -25837,14 +26008,14 @@ } }, "node_modules/internal-slot": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.7.tgz", - "integrity": "sha512-NGnrKwXzSms2qUUih/ILZ5JBqNTSa1+ZmP6flaIp6KmSElgE9qdndzS3cqjrDovwFdmwsGsLdeFgB6suw+1e9g==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.1.0.tgz", + "integrity": "sha512-4gd7VpWNQNB4UKKCFFVcp1AVv+FMOgs9NKzjHKusc8jTMhd5eL1NqQqOpE0KzMds804/yHlglp3uxgluOqAPLw==", "license": "MIT", "dependencies": { "es-errors": "^1.3.0", - "hasown": "^2.0.0", - "side-channel": "^1.0.4" + "hasown": "^2.0.2", + "side-channel": "^1.1.0" }, "engines": { "node": ">= 0.4" @@ -26022,13 +26193,14 @@ } }, "node_modules/is-array-buffer": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.4.tgz", - "integrity": "sha512-wcjaerHw0ydZwfhiKbXJWLDY8A7yV7KhjQOpb83hGgGfId/aQa4TOvwyzn2PuswW2gPCYEL/nEAiSVpdOj1lXw==", + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.5.tgz", + "integrity": "sha512-DDfANUiiG2wC1qawP66qlTugJeL5HyzMpfr8lLK+jMQirGzNod0B12cFB/9q838Ru27sBwfw78/rdoU7RERz6A==", "license": "MIT", "dependencies": { - "call-bind": "^1.0.2", - "get-intrinsic": "^1.2.1" + "call-bind": "^1.0.8", + "call-bound": "^1.0.3", + "get-intrinsic": "^1.2.6" }, "engines": { "node": ">= 0.4" @@ -26043,12 +26215,35 @@ "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=", "dev": true }, - "node_modules/is-bigint": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.4.tgz", - "integrity": "sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg==", + "node_modules/is-async-function": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-async-function/-/is-async-function-2.1.1.tgz", + "integrity": "sha512-9dgM/cZBnNvjzaMYHVoxxfPj2QXt22Ev7SuuPrs+xav0ukGB0S6d4ydZdEiM48kLx5kDV+QBPrpVnFyefL8kkQ==", + "license": "MIT", "dependencies": { - "has-bigints": "^1.0.1" + "async-function": "^1.0.0", + "call-bound": "^1.0.3", + "get-proto": "^1.0.1", + "has-tostringtag": "^1.0.2", + "safe-regex-test": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-bigint": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.1.0.tgz", + "integrity": "sha512-n4ZT37wG78iz03xPRKJrHTdZbe3IicyucEtdRsV5yglwc3GyUfbAfpSeD0FJ41NbUNSt5wbhqfp1fS+BgnvDFQ==", + "license": "MIT", + "dependencies": { + "has-bigints": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -26067,12 +26262,13 @@ } }, "node_modules/is-boolean-object": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.1.2.tgz", - "integrity": "sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA==", + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.2.2.tgz", + "integrity": "sha512-wa56o2/ElJMYqjCjGkXri7it5FbebW5usLw/nPmCMs5DeZ7eziSYZhSmPRn0txqeW4LnAmQQU7FgqLpsEFKM4A==", + "license": "MIT", "dependencies": { - "call-bind": "^1.0.2", - "has-tostringtag": "^1.0.0" + "call-bound": "^1.0.3", + "has-tostringtag": "^1.0.2" }, "engines": { "node": ">= 0.4" @@ -26137,11 +26333,13 @@ } }, "node_modules/is-data-view": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-data-view/-/is-data-view-1.0.1.tgz", - "integrity": "sha512-AHkaJrsUVW6wq6JS8y3JnM/GJF/9cf+k20+iDzlSaJrinEo5+7vRiteOSwBhHRiAyQATN1AmY4hwzxJKPmYf+w==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-data-view/-/is-data-view-1.0.2.tgz", + "integrity": "sha512-RKtWF8pGmS87i2D6gqQu/l7EYRlVdfzemCJN/P3UOs//x1QE7mfhvzHIApBTRf7axvT6DMGwSwBXYCT0nfB9xw==", "license": "MIT", "dependencies": { + "call-bound": "^1.0.2", + "get-intrinsic": "^1.2.6", "is-typed-array": "^1.1.13" }, "engines": { @@ -26152,11 +26350,13 @@ } }, "node_modules/is-date-object": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.5.tgz", - "integrity": "sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.1.0.tgz", + "integrity": "sha512-PwwhEakHVKTdRNVOw+/Gyh0+MzlCl4R6qKvkhuvLtPMggI1WAHt9sOwZxQLSGpUaDnrdyDsomoRgNnCfKNSXXg==", + "license": "MIT", "dependencies": { - "has-tostringtag": "^1.0.0" + "call-bound": "^1.0.2", + "has-tostringtag": "^1.0.2" }, "engines": { "node": ">= 0.4" @@ -26221,6 +26421,21 @@ "node": ">=0.10.0" } }, + "node_modules/is-finalizationregistry": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-finalizationregistry/-/is-finalizationregistry-1.1.1.tgz", + "integrity": "sha512-1pC6N8qWJbWoPtEjgcL2xyhQOP491EQjeUo3qTKcmV8YSDDJrOepfG8pcC7h/QgnQHYSv0mJ3Z/ZWxmatVrysg==", + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/is-fullwidth-code-point": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", @@ -26318,10 +26533,13 @@ } }, "node_modules/is-map": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/is-map/-/is-map-2.0.2.tgz", - "integrity": "sha512-cOZFQQozTha1f4MxLFzlgKYPTyj26picdZTx82hbc/Xf4K/tZOOXSCkMvU4pKioRXGDLJRn0GM7Upe7kR721yg==", - "dev": true, + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/is-map/-/is-map-2.0.3.tgz", + "integrity": "sha512-1Qed0/Hr2m+YqxnM09CjA2d/i6YZNfF6R2oRAOj36eUdS6qIV/huPJNSEpKbupewFs+ZsJlxsjjPbc0/afW6Lw==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, "funding": { "url": "https://github.com/sponsors/ljharb" } @@ -26383,11 +26601,13 @@ } }, "node_modules/is-number-object": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.7.tgz", - "integrity": "sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ==", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.1.1.tgz", + "integrity": "sha512-lZhclumE1G6VYD8VHe35wFaIif+CTy5SJIi5+3y4psDgWu4wPDoBhF8NxUOinEc7pHgiTsT6MaBb92rKhhD+Xw==", + "license": "MIT", "dependencies": { - "has-tostringtag": "^1.0.0" + "call-bound": "^1.0.3", + "has-tostringtag": "^1.0.2" }, "engines": { "node": ">= 0.4" @@ -26446,12 +26666,15 @@ "integrity": "sha512-+lP4/6lKUBfQjZ2pdxThZvLUAafmZb8OAxFb8XXtiQmS35INgr85hdOGoEs124ez1FCnZJt6jau/T+alh58QFQ==" }, "node_modules/is-regex": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", - "integrity": "sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==", + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.2.1.tgz", + "integrity": "sha512-MjYsKHO5O7mCsmRGxWcLWheFqN9DJ/2TmngvjKXihe6efViPqc274+Fx/4fYj/r03+ESvBdTXK0V6tA3rgez1g==", + "license": "MIT", "dependencies": { - "call-bind": "^1.0.2", - "has-tostringtag": "^1.0.0" + "call-bound": "^1.0.2", + "gopd": "^1.2.0", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.2" }, "engines": { "node": ">= 0.4" @@ -26461,10 +26684,13 @@ } }, "node_modules/is-set": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/is-set/-/is-set-2.0.2.tgz", - "integrity": "sha512-+2cnTEZeY5z/iXGbLhPrOAaK/Mau5k5eXq9j14CpRTftq0pAJu2MwVRSZhyZWBzx3o6X795Lz6Bpb6R0GKf37g==", - "dev": true, + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/is-set/-/is-set-2.0.3.tgz", + "integrity": "sha512-iPAjerrse27/ygGLxw+EBR9agv9Y6uLeYVJMu+QNCoouJ1/1ri0mGrcWpfCqFZuzzx3WjtwxG098X+n4OuRkPg==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, "funding": { "url": "https://github.com/sponsors/ljharb" } @@ -26477,12 +26703,12 @@ "license": "MIT" }, "node_modules/is-shared-array-buffer": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.3.tgz", - "integrity": "sha512-nA2hv5XIhLR3uVzDDfCIknerhx8XUKnstuOERPNNIinXG7v9u+ohXF67vxm4TPTEPU6lm61ZkwP3c9PCB97rhg==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.4.tgz", + "integrity": "sha512-ISWac8drv4ZGfwKl5slpHG9OwPNty4jOWPRIhBpxOoD+hqITiwuipOQ2bNthAzwA3B4fIjO4Nln74N0S9byq8A==", "license": "MIT", "dependencies": { - "call-bind": "^1.0.7" + "call-bound": "^1.0.3" }, "engines": { "node": ">= 0.4" @@ -26503,11 +26729,13 @@ } }, "node_modules/is-string": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.7.tgz", - "integrity": "sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.1.1.tgz", + "integrity": "sha512-BtEeSsoaQjlSPBemMQIrY1MY0uM6vnS1g5fmufYOtnxLGUZM2178PKbhsk7Ffv58IX+ZtcvoGwccYsh0PglkAA==", + "license": "MIT", "dependencies": { - "has-tostringtag": "^1.0.0" + "call-bound": "^1.0.3", + "has-tostringtag": "^1.0.2" }, "engines": { "node": ">= 0.4" @@ -26517,11 +26745,14 @@ } }, "node_modules/is-symbol": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.4.tgz", - "integrity": "sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg==", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.1.1.tgz", + "integrity": "sha512-9gGx6GTtCQM73BgmHQXfDmLtfjjTUDSyoxTCbp5WtoixAhfgsDirWIcVQ/IHpvI5Vgd5i/J5F7B9cN/WlVbC/w==", + "license": "MIT", "dependencies": { - "has-symbols": "^1.0.2" + "call-bound": "^1.0.2", + "has-symbols": "^1.1.0", + "safe-regex-test": "^1.1.0" }, "engines": { "node": ">= 0.4" @@ -26531,12 +26762,12 @@ } }, "node_modules/is-typed-array": { - "version": "1.1.13", - "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.13.tgz", - "integrity": "sha512-uZ25/bUAlUY5fR4OKT4rZQEBrzQWYV9ZJYGGsUmEJ6thodVJ1HX64ePQ6Z0qPWP+m+Uq6e9UugrE38jeYsDSMw==", + "version": "1.1.15", + "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.15.tgz", + "integrity": "sha512-p3EcsicXjit7SaskXHs1hA91QxgTw46Fv6EFKKGS5DRFLD8yKnohjF3hxoju94b/OcMZoQukzpPpBE9uLVKzgQ==", "license": "MIT", "dependencies": { - "which-typed-array": "^1.1.14" + "which-typed-array": "^1.1.16" }, "engines": { "node": ">= 0.4" @@ -26577,33 +26808,43 @@ "integrity": "sha512-X/kiF3Xndj6WI7l/yLyzR7V1IbQd6L4S4cewSL0fRciemPmHbaXIKR2qtf+zseH+lbMG0vFp4HvCUe7amGZVhw==" }, "node_modules/is-weakmap": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/is-weakmap/-/is-weakmap-2.0.1.tgz", - "integrity": "sha512-NSBR4kH5oVj1Uwvv970ruUkCV7O1mzgVFO4/rev2cLRda9Tm9HrL70ZPut4rOHgY0FNrUu9BCbXA2sdQ+x0chA==", - "dev": true, + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/is-weakmap/-/is-weakmap-2.0.2.tgz", + "integrity": "sha512-K5pXYOm9wqY1RgjpL3YTkF39tni1XajUIkawTLUo9EZEVUFga5gSQJF8nNS7ZwJQ02y+1YCNYcMh+HIf1ZqE+w==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/is-weakref": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.0.2.tgz", - "integrity": "sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ==", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.1.1.tgz", + "integrity": "sha512-6i9mGWSlqzNMEqpCp93KwRS1uUOodk2OJ6b+sq7ZPDSy2WuI5NFIxp/254TytR8ftefexkWn5xNiHUNpPOfSew==", + "license": "MIT", "dependencies": { - "call-bind": "^1.0.2" + "call-bound": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/is-weakset": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/is-weakset/-/is-weakset-2.0.2.tgz", - "integrity": "sha512-t2yVvttHkQktwnNNmBQ98AhENLdPUTDTE21uPqAQ0ARwQfGeQKRVS0NNurH7bTf7RrvcVn1OOge45CnBeHCSmg==", - "dev": true, + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/is-weakset/-/is-weakset-2.0.4.tgz", + "integrity": "sha512-mfcwb6IzQyOKTs84CQMrOwW4gQcaTOAWJ0zzJCl2WSPDrWk/OzDaImWFH3djXhb24g4eudZfLRozAvPGw4d9hQ==", + "license": "MIT", "dependencies": { - "call-bind": "^1.0.2", - "get-intrinsic": "^1.1.1" + "call-bound": "^1.0.3", + "get-intrinsic": "^1.2.6" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -27324,6 +27565,7 @@ "version": "3.0.15", "resolved": "https://registry.npmjs.org/json-refs/-/json-refs-3.0.15.tgz", "integrity": "sha512-0vOQd9eLNBL18EGl5yYaO44GhixmImes2wiYn9Z3sag3QnehWrYWlB9AFtMxCL2Bj3fyxgDYkxGFEU/chlYssw==", + "license": "MIT", "dependencies": { "commander": "~4.1.1", "graphlib": "^2.1.8", @@ -27345,14 +27587,25 @@ "version": "1.0.10", "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "license": "MIT", "dependencies": { "sprintf-js": "~1.0.2" } }, + "node_modules/json-refs/node_modules/commander": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-4.1.1.tgz", + "integrity": "sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==", + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, "node_modules/json-refs/node_modules/js-yaml": { "version": "3.14.1", "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", + "license": "MIT", "dependencies": { "argparse": "^1.0.7", "esprima": "^4.0.0" @@ -27365,6 +27618,7 @@ "version": "3.0.0", "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "license": "MIT", "engines": { "node": ">=8" } @@ -28130,12 +28384,14 @@ "node_modules/lodash._arraypool": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash._arraypool/-/lodash._arraypool-2.4.1.tgz", - "integrity": "sha1-6I7suS4ruEyQZWEv2VigcZzUf5Q=" + "integrity": "sha512-tC2aLC7bbkDXKNrjDu9OLiVx9pFIvjinID2eD9PzNdAQGZScWUd/h8faqOw5d6oLsOvFRCRbz1ASoB+deyMVUw==", + "license": "MIT" }, "node_modules/lodash._basebind": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash._basebind/-/lodash._basebind-2.4.1.tgz", - "integrity": "sha1-6UC5690nwyfgqNqxtVkWxTQelXU=", + "integrity": "sha512-VGHm6DH+1UiuafQdE/DNMqxOcSyhRu0xO9+jPDq7xITRn5YOorGrHVQmavMVXCYmTm80YRTZZCn/jTW7MokwLg==", + "license": "MIT", "dependencies": { "lodash._basecreate": "~2.4.1", "lodash._setbinddata": "~2.4.1", @@ -28146,7 +28402,8 @@ "node_modules/lodash._baseclone": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash._baseclone/-/lodash._baseclone-2.4.1.tgz", - "integrity": "sha1-MPgj5X4X43NdODvWK2Czh1Q7QYY=", + "integrity": "sha512-+zJVXs0VxC/Au+/7foiKzw8UaWvfSfPh20XhqK/6HFQiUeclL5fz05zY7G9yDAFItAKKZwB4cgpzGvxiwuG1wQ==", + "license": "MIT", "dependencies": { "lodash._getarray": "~2.4.1", "lodash._releasearray": "~2.4.1", @@ -28161,7 +28418,8 @@ "node_modules/lodash._basecreate": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash._basecreate/-/lodash._basecreate-2.4.1.tgz", - "integrity": "sha1-+Ob1tXip405UEXm1a47uv0oofgg=", + "integrity": "sha512-8JJ3FnMPm54t3BwPLk8q8mPyQKQXm/rt9df+awr4NGtyJrtcCXM3Of1I86S6jVy1b4yAyFBb8wbKPEauuqzRmQ==", + "license": "MIT", "dependencies": { "lodash._isnative": "~2.4.1", "lodash.isobject": "~2.4.1", @@ -28171,7 +28429,8 @@ "node_modules/lodash._basecreatecallback": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash._basecreatecallback/-/lodash._basecreatecallback-2.4.1.tgz", - "integrity": "sha1-fQsmdknLKeehOdAQO3wR+uhOSFE=", + "integrity": "sha512-SLczhg860fGW7AKlYcuOFstDtJuQhaANlJ4Y/jrOoRxhmVtK41vbJDH3OefVRSRkSCQo4HI82QVkAVsoGa5gSw==", + "license": "MIT", "dependencies": { "lodash._setbinddata": "~2.4.1", "lodash.bind": "~2.4.1", @@ -28182,7 +28441,8 @@ "node_modules/lodash._basecreatewrapper": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash._basecreatewrapper/-/lodash._basecreatewrapper-2.4.1.tgz", - "integrity": "sha1-TTHy595+E0+/KAN2K4FQsyUZZm8=", + "integrity": "sha512-x2ja1fa/qmzbizuXgVM4QAP9svtMbdxjG8Anl9bCeDAwLOVQ1vLrA0hLb/NkpbGi9evjtkl0aWLTEoOlUdBPQA==", + "license": "MIT", "dependencies": { "lodash._basecreate": "~2.4.1", "lodash._setbinddata": "~2.4.1", @@ -28193,7 +28453,8 @@ "node_modules/lodash._createwrapper": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash._createwrapper/-/lodash._createwrapper-2.4.1.tgz", - "integrity": "sha1-UdaVeXPaTtVW43KQ2MGhjFPeFgc=", + "integrity": "sha512-5TCfLt1haQpsa7bgLYRKNNE4yqhO4ZxIayN1btQmazMchO6Q8JYFRMqbJ3W+uNmMm4R0Jw7KGkZX5YfDDnywuw==", + "license": "MIT", "dependencies": { "lodash._basebind": "~2.4.1", "lodash._basecreatewrapper": "~2.4.1", @@ -28204,7 +28465,8 @@ "node_modules/lodash._getarray": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash._getarray/-/lodash._getarray-2.4.1.tgz", - "integrity": "sha1-+vH3+BD6mFolHCGHQESBCUg55e4=", + "integrity": "sha512-iIrScwY3atGvLVbQL/+CNUznaPwBJg78S/JO4cTUFXRkRsZgEBhscB27cVoT4tsIOUyFu/5M/0umfHNGJ6wYwg==", + "license": "MIT", "dependencies": { "lodash._arraypool": "~2.4.1" } @@ -28212,22 +28474,26 @@ "node_modules/lodash._isnative": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash._isnative/-/lodash._isnative-2.4.1.tgz", - "integrity": "sha1-PqZAS3hKe+g2x7V1gOHN95sUgyw=" + "integrity": "sha512-BOlKGKNHhCHswGOWtmVb5zBygyxN7EmTuzVOSQI6QSoGhG+kvv71gICFS1TBpnqvT1n53txK8CDK3u5D2/GZxQ==", + "license": "MIT" }, "node_modules/lodash._maxpoolsize": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash._maxpoolsize/-/lodash._maxpoolsize-2.4.1.tgz", - "integrity": "sha1-nUgvRjuOZq++WcLBTtsRcGAXIzQ=" + "integrity": "sha512-xKDem1BxoIfcCtaJHotjtyfdIvZO9qrF+mv3G1+ngQmaI3MJt3Qm46i9HLk/CbzABbavUrr1/EomQT8KxtsrYA==", + "license": "MIT" }, "node_modules/lodash._objecttypes": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash._objecttypes/-/lodash._objecttypes-2.4.1.tgz", - "integrity": "sha1-fAt/admKH3ZSn4kLDNsbTf7BHBE=" + "integrity": "sha512-XpqGh1e7hhkOzftBfWE7zt+Yn9mVHFkDhicVttvKLsoCMLVVL+xTQjfjB4X4vtznauxv0QZ5ZAeqjvat0dh62Q==", + "license": "MIT" }, "node_modules/lodash._releasearray": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash._releasearray/-/lodash._releasearray-2.4.1.tgz", - "integrity": "sha1-phOWMNdtFTawfdyAliiJsIL2pkE=", + "integrity": "sha512-wwCwWX8PK/mYR5VZjcU5JFl6py/qrfLGMxzpKOfSqgA1PaZ6Z625CZLCxH1KsqyxSkOFmNm+mEYjeDpXlM4hrg==", + "license": "MIT", "dependencies": { "lodash._arraypool": "~2.4.1", "lodash._maxpoolsize": "~2.4.1" @@ -28236,7 +28502,8 @@ "node_modules/lodash._setbinddata": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash._setbinddata/-/lodash._setbinddata-2.4.1.tgz", - "integrity": "sha1-98IAzRuS7yNrOZ7s9zxkjReqlNI=", + "integrity": "sha512-Vx0XKzpg2DFbQw4wrp1xSWd2sfl3W/BG6bucSRZmftS1AzbWRemCmBQDxyQTNhlLNec428PXkuuja+VNBZgu2A==", + "license": "MIT", "dependencies": { "lodash._isnative": "~2.4.1", "lodash.noop": "~2.4.1" @@ -28245,7 +28512,8 @@ "node_modules/lodash._shimkeys": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash._shimkeys/-/lodash._shimkeys-2.4.1.tgz", - "integrity": "sha1-bpzJZm/wgfC1psl4uD4kLmlJ0gM=", + "integrity": "sha512-lBrglYxLD/6KAJ8IEa5Lg+YHgNAL7FyKqXg4XOUI+Du/vtniLs1ZqS+yHNKPkK54waAgkdUnDOYaWf+rv4B+AA==", + "license": "MIT", "dependencies": { "lodash._objecttypes": "~2.4.1" } @@ -28253,12 +28521,14 @@ "node_modules/lodash._slice": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash._slice/-/lodash._slice-2.4.1.tgz", - "integrity": "sha1-dFz0GlNZexj2iImFREBe+isG2Q8=" + "integrity": "sha512-+odPJa4PE2UgYnQgJgkLs0UD03QU78R2ivhrFnG9GdtYOZdE6ObxOj7KiUEUlqOOgatFT+ZqSypFjDSduTigKg==", + "license": "MIT" }, "node_modules/lodash.assign": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash.assign/-/lodash.assign-2.4.1.tgz", - "integrity": "sha1-hMOVlt1xGBqXsGUpE6fJZ15Jsao=", + "integrity": "sha512-AqQ4AJz5buSx9ELXWt5dONwJyVPd4NTADMKhoVYWCugjoVf172/LpvVhwmSJn4g8/Dc0S8hxTe8rt5Dob3X9KQ==", + "license": "MIT", "dependencies": { "lodash._basecreatecallback": "~2.4.1", "lodash._objecttypes": "~2.4.1", @@ -28268,7 +28538,8 @@ "node_modules/lodash.bind": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash.bind/-/lodash.bind-2.4.1.tgz", - "integrity": "sha1-XRn6AFyMTSNvr0dCx7eh/Kvikmc=", + "integrity": "sha512-hn2VWYZ+N9aYncRad4jORvlGgpFrn+axnPIWRvFxjk6CWcZH5b5alI8EymYsHITI23Z9wrW/+ORq+azrVFpOfw==", + "license": "MIT", "dependencies": { "lodash._createwrapper": "~2.4.1", "lodash._slice": "~2.4.1" @@ -28282,7 +28553,8 @@ "node_modules/lodash.clonedeep": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash.clonedeep/-/lodash.clonedeep-2.4.1.tgz", - "integrity": "sha1-8pIDtAsS/uCkXTYxZIJZvrq8eGg=", + "integrity": "sha512-zj5vReFLkR+lJOBKP1wyteZ13zut/KSmXtdCBgxcy/m4UTitcBxpeVZT7gwk8BQrztPI5dIgO4bhBppXV4rpTQ==", + "license": "MIT", "dependencies": { "lodash._baseclone": "~2.4.1", "lodash._basecreatecallback": "~2.4.1" @@ -28312,7 +28584,8 @@ "node_modules/lodash.foreach": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash.foreach/-/lodash.foreach-2.4.1.tgz", - "integrity": "sha1-/j/Do0yGyUyrb5UiVgKCdB4BYwk=", + "integrity": "sha512-AvOobAkE7qBtIiHU5QHQIfveWH5Usr9pIcFIzBv7u4S6bvb3FWpFrh9ltqBY7UeL5lw6e8d+SggiUXQVyh+FpA==", + "license": "MIT", "dependencies": { "lodash._basecreatecallback": "~2.4.1", "lodash.forown": "~2.4.1" @@ -28321,7 +28594,8 @@ "node_modules/lodash.forown": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash.forown/-/lodash.forown-2.4.1.tgz", - "integrity": "sha1-eLQer+FAX6lmRZ6kGT/VAtCEUks=", + "integrity": "sha512-VC+CKm/zSs5t3i/MHv71HZoQphuqOvez1xhjWBwHU5zAbsCYrqwHr+MyQyMk14HzA3hSRNA5lCqDMSw5G2Qscg==", + "license": "MIT", "dependencies": { "lodash._basecreatecallback": "~2.4.1", "lodash._objecttypes": "~2.4.1", @@ -28342,7 +28616,8 @@ "node_modules/lodash.identity": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash.identity/-/lodash.identity-2.4.1.tgz", - "integrity": "sha1-ZpTP+mX++TH3wxzobHRZfPVg9PE=" + "integrity": "sha512-VRYX+8XipeLjorag5bz3YBBRJ+5kj8hVBzfnaHgXPZAVTYowBdY5l0M5ZnOmlAMCOXBFabQtm7f5VqjMKEji0w==", + "license": "MIT" }, "node_modules/lodash.includes": { "version": "4.3.0", @@ -28357,7 +28632,8 @@ "node_modules/lodash.isarray": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash.isarray/-/lodash.isarray-2.4.1.tgz", - "integrity": "sha1-tSoybB9i9tfac6MdVAHfbvRPD6E=", + "integrity": "sha512-yRDd0z+APziDqbk0MqR6Qfwj/Qn3jLxFJbI9U8MuvdTnqIXdZ5YXyGLnwuzCpZmjr26F1GNOjKLMMZ10i/wy6A==", + "license": "MIT", "dependencies": { "lodash._isnative": "~2.4.1" } @@ -28370,12 +28646,15 @@ "node_modules/lodash.isequal": { "version": "4.5.0", "resolved": "https://registry.npmjs.org/lodash.isequal/-/lodash.isequal-4.5.0.tgz", - "integrity": "sha1-QVxEePK8wwEgwizhDtMib30+GOA=" + "integrity": "sha512-pDo3lu8Jhfjqls6GkMgpahsF9kCyayhgykjyLMNFTKWrpVdAQtYyB4muAMWozBB4ig/dtWAmsMxLEI8wuz+DYQ==", + "deprecated": "This package is deprecated. Use require('node:util').isDeepStrictEqual instead.", + "license": "MIT" }, "node_modules/lodash.isfunction": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash.isfunction/-/lodash.isfunction-2.4.1.tgz", - "integrity": "sha1-LP1XXHPkmKtX4xm3f6Aq3vE6lNE=" + "integrity": "sha512-6XcAB3izeQxPOQQNAJbbdjXbvWEt2Pn9ezPrjr4CwoLwmqsLVbsiEXD19cmmt4mbzOCOCdHzOQiUivUOJLra7w==", + "license": "MIT" }, "node_modules/lodash.isinteger": { "version": "4.0.4", @@ -28390,7 +28669,8 @@ "node_modules/lodash.isobject": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash.isobject/-/lodash.isobject-2.4.1.tgz", - "integrity": "sha1-Wi5H/mmVPx7mMafrof5k0tBlWPU=", + "integrity": "sha512-sTebg2a1PoicYEZXD5PBdQcTlIJ6hUslrlWr7iV0O7n+i4596s2NQ9I5CaZ5FbXSfya/9WQsrYLANUJv9paYVA==", + "license": "MIT", "dependencies": { "lodash._objecttypes": "~2.4.1" } @@ -28408,7 +28688,8 @@ "node_modules/lodash.keys": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash.keys/-/lodash.keys-2.4.1.tgz", - "integrity": "sha1-SN6kbfj/djKxDXBrissmWR4rNyc=", + "integrity": "sha512-ZpJhwvUXHSNL5wYd1RM6CUa2ZuqorG9ngoJ9Ix5Cce+uX7I5O/E06FCJdhSZ33b5dVyeQDnIlWH7B2s5uByZ7g==", + "license": "MIT", "dependencies": { "lodash._isnative": "~2.4.1", "lodash._shimkeys": "~2.4.1", @@ -28429,7 +28710,8 @@ "node_modules/lodash.noop": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash.noop/-/lodash.noop-2.4.1.tgz", - "integrity": "sha1-T7VPgWZS5a4Q6PcvcXo4jHMmU4o=" + "integrity": "sha512-uNcV98/blRhInPUGQEnj9ekXXfG+q+rfoNSFZgl/eBfog9yBDW9gfUv2AHX/rAF7zZRlzWhbslGhbGQFZlCkZA==", + "license": "MIT" }, "node_modules/lodash.once": { "version": "4.1.1", @@ -28445,7 +28727,8 @@ "node_modules/lodash.support": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash.support/-/lodash.support-2.4.1.tgz", - "integrity": "sha1-Mg4LZwMWc8KNeiu12eAzGkUkBRU=", + "integrity": "sha512-6SwqWwGFHhTXEiqB/yQgu8FYd//tm786d49y7kizHVCJH7zdzs191UQn3ES3tkkDbUddNRfkCRYqJFHtbLnbCw==", + "license": "MIT", "dependencies": { "lodash._isnative": "~2.4.1" } @@ -28835,6 +29118,15 @@ "dev": true, "license": "ISC" }, + "node_modules/math-intrinsics": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", + "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, "node_modules/mathjax": { "version": "3.2.2", "resolved": "https://registry.npmjs.org/mathjax/-/mathjax-3.2.2.tgz", @@ -29448,7 +29740,6 @@ "version": "2.6.0", "resolved": "https://registry.npmjs.org/mime/-/mime-2.6.0.tgz", "integrity": "sha512-USPkMeET31rOMiarsBNIHZKLGgvKc/LrjofAnBlOttf5ajRvqiRA8QsenbcooctK6d6Ts6aqZXBA+XbkKthiQg==", - "dev": true, "bin": { "mime": "cli.js" }, @@ -29457,9 +29748,10 @@ } }, "node_modules/mime-db": { - "version": "1.51.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.51.0.tgz", - "integrity": "sha512-5y8A56jg7XVQx2mbv1lu49NR4dokRnhZYTtL+KGfaa27uq4pSTXkwQkFJl4pkRMyNFz/EtYDSkiiEHx3F7UN6g==", + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "license": "MIT", "engines": { "node": ">= 0.6" } @@ -29475,11 +29767,12 @@ } }, "node_modules/mime-types": { - "version": "2.1.34", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.34.tgz", - "integrity": "sha512-6cP692WwGIs9XXdOO4++N+7qjqv0rqxxVvJ3VHPh/Sc9mVZcQP+ZGhkKiTvWMQRr2tbHkJP/Yn7Y0npb3ZBs4A==", + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "license": "MIT", "dependencies": { - "mime-db": "1.51.0" + "mime-db": "1.52.0" }, "engines": { "node": ">= 0.6" @@ -29716,7 +30009,8 @@ "node_modules/mkdirp-classic": { "version": "0.5.3", "resolved": "https://registry.npmjs.org/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz", - "integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==" + "integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==", + "license": "MIT" }, "node_modules/mlly": { "version": "1.7.4", @@ -30181,18 +30475,18 @@ } }, "node_modules/multer": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/multer/-/multer-2.0.0.tgz", - "integrity": "sha512-bS8rPZurbAuHGAnApbM9d4h1wSoYqrOqkE+6a64KLMK9yWU7gJXBDDVklKQ3TPi9DRb85cRs6yXaC0+cjxRtRg==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/multer/-/multer-2.0.1.tgz", + "integrity": "sha512-Ug8bXeTIUlxurg8xLTEskKShvcKDZALo1THEX5E41pYCD2sCVub5/kIRIGqWNoqV6szyLyQKV6mD4QUrWE5GCQ==", "license": "MIT", "dependencies": { "append-field": "^1.0.0", - "busboy": "^1.0.0", - "concat-stream": "^1.5.2", - "mkdirp": "^0.5.4", + "busboy": "^1.6.0", + "concat-stream": "^2.0.0", + "mkdirp": "^0.5.6", "object-assign": "^4.1.1", - "type-is": "^1.6.4", - "xtend": "^4.0.0" + "type-is": "^1.6.18", + "xtend": "^4.0.2" }, "engines": { "node": ">= 10.16.0" @@ -30322,7 +30616,8 @@ "node_modules/native-promise-only": { "version": "0.8.1", "resolved": "https://registry.npmjs.org/native-promise-only/-/native-promise-only-0.8.1.tgz", - "integrity": "sha1-IKMYwwy0X3H+et+/eyHJnBRy7xE=" + "integrity": "sha512-zkVhZUA3y8mbz652WrL5x0fB0ehrBkulWT3TomAQ9iDtyXZvzKeEA6GPxAItBYeNYl5yngKRX612qHOhvMkDeg==", + "license": "MIT" }, "node_modules/native-request": { "version": "1.1.0", @@ -30876,9 +31171,13 @@ } }, "node_modules/object-inspect": { - "version": "1.13.1", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.1.tgz", - "integrity": "sha512-5qoj1RUiKOMsCCNLV1CBiPYE10sziTsnmNxkAI/rZhiD63CF7IqdFGC/XzjWjpSgLf0LxXX3bDFIh0E18f6UhQ==", + "version": "1.13.4", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz", + "integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, "funding": { "url": "https://github.com/sponsors/ljharb" } @@ -30920,14 +31219,16 @@ } }, "node_modules/object.assign": { - "version": "4.1.5", - "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.5.tgz", - "integrity": "sha512-byy+U7gp+FVwmyzKPYhW2h5l3crpmGsxl7X2s8y43IgxvG4g3QZ6CffDtsNQy1WsmZpQbO+ybo0AlW7TY6DcBQ==", + "version": "4.1.7", + "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.7.tgz", + "integrity": "sha512-nK28WOo+QIjBkDduTINE4JkF/UJJKyf2EJxvJKfblDpyg0Q+pkOHNTL0Qwy6NP6FhE/EnzV73BxxqcJaXY9anw==", "license": "MIT", "dependencies": { - "call-bind": "^1.0.5", + "call-bind": "^1.0.8", + "call-bound": "^1.0.3", "define-properties": "^1.2.1", - "has-symbols": "^1.0.3", + "es-object-atoms": "^1.0.0", + "has-symbols": "^1.1.0", "object-keys": "^1.1.1" }, "engines": { @@ -31178,6 +31479,23 @@ "resolved": "libraries/overleaf-editor-core", "link": true }, + "node_modules/own-keys": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/own-keys/-/own-keys-1.0.1.tgz", + "integrity": "sha512-qFOyK5PjiWZd+QQIh+1jhdb9LpxTF0qs7Pm8o5QHYZ0M3vKqSqzsZaEB6oWlxZ+q2sJBMI/Ktgd2N5ZwQoRHfg==", + "license": "MIT", + "dependencies": { + "get-intrinsic": "^1.2.6", + "object-keys": "^1.1.1", + "safe-push-apply": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/p-event": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/p-event/-/p-event-4.2.0.tgz", @@ -31737,12 +32055,80 @@ } }, "node_modules/path-loader": { - "version": "1.0.10", - "resolved": "https://registry.npmjs.org/path-loader/-/path-loader-1.0.10.tgz", - "integrity": "sha512-CMP0v6S6z8PHeJ6NFVyVJm6WyJjIwFvyz2b0n2/4bKdS/0uZa/9sKUlYZzubrn3zuDRU0zIuEDX9DZYQ2ZI8TA==", + "version": "1.0.12", + "resolved": "https://registry.npmjs.org/path-loader/-/path-loader-1.0.12.tgz", + "integrity": "sha512-n7oDG8B+k/p818uweWrOixY9/Dsr89o2TkCm6tOTex3fpdo2+BFDgR+KpB37mGKBRsBAlR8CIJMFN0OEy/7hIQ==", + "license": "MIT", "dependencies": { "native-promise-only": "^0.8.1", - "superagent": "^3.8.3" + "superagent": "^7.1.6" + } + }, + "node_modules/path-loader/node_modules/debug": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz", + "integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==", + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/path-loader/node_modules/formidable": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/formidable/-/formidable-2.1.5.tgz", + "integrity": "sha512-Oz5Hwvwak/DCaXVVUtPn4oLMLLy1CdclLKO1LFgU7XzDpVMUU5UjlSLpGMocyQNNk8F6IJW9M/YdooSn2MRI+Q==", + "license": "MIT", + "dependencies": { + "@paralleldrive/cuid2": "^2.2.2", + "dezalgo": "^1.0.4", + "once": "^1.4.0", + "qs": "^6.11.0" + }, + "funding": { + "url": "https://ko-fi.com/tunnckoCore/commissions" + } + }, + "node_modules/path-loader/node_modules/semver": { + "version": "7.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", + "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/path-loader/node_modules/superagent": { + "version": "7.1.6", + "resolved": "https://registry.npmjs.org/superagent/-/superagent-7.1.6.tgz", + "integrity": "sha512-gZkVCQR1gy/oUXr+kxJMLDjla434KmSOKbx5iGD30Ql+AkJQ/YlPKECJy2nhqOsHLjGHzoDTXNSjhnvWhzKk7g==", + "deprecated": "Please upgrade to v9.0.0+ as we have fixed a public vulnerability with formidable dependency. Note that v9.0.0+ requires Node.js v14.18.0+. See https://github.com/ladjs/superagent/pull/1800 for insight. This project is supported and maintained by the team at Forward Email @ https://forwardemail.net", + "license": "MIT", + "dependencies": { + "component-emitter": "^1.3.0", + "cookiejar": "^2.1.3", + "debug": "^4.3.4", + "fast-safe-stringify": "^2.1.1", + "form-data": "^4.0.0", + "formidable": "^2.0.1", + "methods": "^1.1.2", + "mime": "2.6.0", + "qs": "^6.10.3", + "readable-stream": "^3.6.0", + "semver": "^7.3.7" + }, + "engines": { + "node": ">=6.4.0 <13 || >=14" } }, "node_modules/path-parse": { @@ -35079,6 +35465,28 @@ "node": ">=4.0.0" } }, + "node_modules/reflect.getprototypeof": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/reflect.getprototypeof/-/reflect.getprototypeof-1.0.10.tgz", + "integrity": "sha512-00o4I+DVrefhv+nX0ulyi3biSHCPDe+yLv5o/p6d/UVlirijB8E16FtfwSAi4g3tcqrQ4lRAqQSoFEZJehYEcw==", + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.9", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "get-intrinsic": "^1.2.7", + "get-proto": "^1.0.1", + "which-builtin-type": "^1.2.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/regenerate": { "version": "1.4.2", "resolved": "https://registry.npmjs.org/regenerate/-/regenerate-1.4.2.tgz", @@ -35143,15 +35551,17 @@ } }, "node_modules/regexp.prototype.flags": { - "version": "1.5.2", - "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.2.tgz", - "integrity": "sha512-NcDiDkTLuPR+++OCKB0nWafEmhg/Da8aUPLPMQbK+bxKKCm1/S5he+AqYa4PlMCVBalb4/yxIRub6qkEx5yJbw==", + "version": "1.5.4", + "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.4.tgz", + "integrity": "sha512-dYqgNSZbDwkaJ2ceRd9ojCGjBq+mOm9LmtXnAnEGyHhN/5R7iDW2TRw3h+o/jCFxus3P2LfWIIiwowAjANm7IA==", "license": "MIT", "dependencies": { - "call-bind": "^1.0.6", + "call-bind": "^1.0.8", "define-properties": "^1.2.1", "es-errors": "^1.3.0", - "set-function-name": "^2.0.1" + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "set-function-name": "^2.0.2" }, "engines": { "node": ">= 0.4" @@ -35666,14 +36076,15 @@ } }, "node_modules/safe-array-concat": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/safe-array-concat/-/safe-array-concat-1.1.2.tgz", - "integrity": "sha512-vj6RsCsWBCf19jIeHEfkRMw8DPiBb+DMXklQ/1SGDHOMlHdPUkZXFQ2YdplS23zESTijAcurb1aSgJA3AgMu1Q==", + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/safe-array-concat/-/safe-array-concat-1.1.3.tgz", + "integrity": "sha512-AURm5f0jYEOydBj7VQlVvDrjeFgthDdEF5H1dP+6mNpoXOMo1quQqJ4wvJDyRZ9+pO3kGWoOdmV08cSv2aJV6Q==", "license": "MIT", "dependencies": { - "call-bind": "^1.0.7", - "get-intrinsic": "^1.2.4", - "has-symbols": "^1.0.3", + "call-bind": "^1.0.8", + "call-bound": "^1.0.2", + "get-intrinsic": "^1.2.6", + "has-symbols": "^1.1.0", "isarray": "^2.0.5" }, "engines": { @@ -35694,6 +36105,22 @@ "integrity": "sha512-gH8eh2nZudPQO6TytOvbxnuhYBOvDBBLW52tz5q6X58lJcd/tkmqFR+5Z9adS8aJtURSXWThWy/xJtJwixErvg==", "optional": true }, + "node_modules/safe-push-apply": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/safe-push-apply/-/safe-push-apply-1.0.0.tgz", + "integrity": "sha512-iKE9w/Z7xCzUMIZqdBsp6pEQvwuEebH4vdpjcDWnyzaI6yl6O9FHvVpmGelvEHNsoY6wGblkxR6Zty/h00WiSA==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "isarray": "^2.0.5" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/safe-regex": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/safe-regex/-/safe-regex-1.1.0.tgz", @@ -35704,14 +36131,14 @@ } }, "node_modules/safe-regex-test": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.0.3.tgz", - "integrity": "sha512-CdASjNJPvRa7roO6Ra/gLYBTzYzzPyyBXxIMdGW3USQLyjWEls2RgW5UBTXaQVp+OrpeCK3bLem8smtmheoRuw==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.1.0.tgz", + "integrity": "sha512-x/+Cz4YrimQxQccJf5mKEbIa1NzeCRNI5Ecl/ekmlYaampdNLPalVyIcCZNNH3MvmqBugV5TMYZXv0ljslUlaw==", "license": "MIT", "dependencies": { - "call-bind": "^1.0.6", + "call-bound": "^1.0.2", "es-errors": "^1.3.0", - "is-regex": "^1.1.4" + "is-regex": "^1.2.1" }, "engines": { "node": ">= 0.4" @@ -36429,13 +36856,29 @@ } }, "node_modules/set-function-name": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.1.tgz", - "integrity": "sha512-tMNCiqYVkXIZgc2Hnoy2IvC/f8ezc5koaRFkCjrpWzGpCd3qbZXPzVy9MAZzK1ch/X0jvSkojys3oqJN0qCmdA==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.2.tgz", + "integrity": "sha512-7PGFlmtwsEADb0WYyvCMa1t+yke6daIG4Wirafur5kcf+MhUnPms1UeR0CKQdTZD81yESwMHbtn+TR+dMviakQ==", + "license": "MIT", "dependencies": { - "define-data-property": "^1.0.1", + "define-data-property": "^1.1.4", + "es-errors": "^1.3.0", "functions-have-names": "^1.2.3", - "has-property-descriptors": "^1.0.0" + "has-property-descriptors": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/set-proto": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/set-proto/-/set-proto-1.0.0.tgz", + "integrity": "sha512-RJRdvCo6IAnPdsvP/7m6bsQqNnn1FCBX5ZNtFL98MmFF/4xAIJTIg1YbHW5DC2W5SKZanrC6i4HsJqlajw/dZw==", + "license": "MIT", + "dependencies": { + "dunder-proto": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0" }, "engines": { "node": ">= 0.4" @@ -36546,14 +36989,69 @@ "integrity": "sha512-sQTKC1Re/rM6XyFM6fIAGHRPVGvyXfgzIDvzoq608vM+jeyVD0Tu1E6Np0Kc2zAIFWIj963V2800iF/9LPieQw==" }, "node_modules/side-channel": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.6.tgz", - "integrity": "sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz", + "integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==", + "license": "MIT", "dependencies": { - "call-bind": "^1.0.7", "es-errors": "^1.3.0", - "get-intrinsic": "^1.2.4", - "object-inspect": "^1.13.1" + "object-inspect": "^1.13.3", + "side-channel-list": "^1.0.0", + "side-channel-map": "^1.0.1", + "side-channel-weakmap": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-list": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/side-channel-list/-/side-channel-list-1.0.0.tgz", + "integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-map": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/side-channel-map/-/side-channel-map-1.0.1.tgz", + "integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==", + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-weakmap": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz", + "integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==", + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3", + "side-channel-map": "^1.0.1" }, "engines": { "node": ">= 0.4" @@ -37132,7 +37630,8 @@ "node_modules/spark-md5": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/spark-md5/-/spark-md5-3.0.2.tgz", - "integrity": "sha512-wcFzz9cDfbuqe0FZzfi2or1sgyIrsDwmPwfZC4hiNidPdPINjeUwNfv5kldczoEAcjl9Y1L3SM7Uz2PUEQzxQw==" + "integrity": "sha512-wcFzz9cDfbuqe0FZzfi2or1sgyIrsDwmPwfZC4hiNidPdPINjeUwNfv5kldczoEAcjl9Y1L3SM7Uz2PUEQzxQw==", + "license": "(WTFPL OR MIT)" }, "node_modules/sparse-bitfield": { "version": "3.0.3", @@ -37221,7 +37720,8 @@ "node_modules/split-ca": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/split-ca/-/split-ca-1.0.1.tgz", - "integrity": "sha1-bIOv82kvphJW4M0ZfgXp3hV2kaY=" + "integrity": "sha512-Q5thBSxp5t8WPTTJQS59LrGqOZqOsrhDGDVm8azCqIBjSBd7nd9o2PM+mDulQQkh8h//4U6hFZnc/mul8t5pWQ==", + "license": "ISC" }, "node_modules/split-string": { "version": "3.1.0", @@ -37256,6 +37756,23 @@ "es5-ext": "^0.10.53" } }, + "node_modules/ssh2": { + "version": "1.16.0", + "resolved": "https://registry.npmjs.org/ssh2/-/ssh2-1.16.0.tgz", + "integrity": "sha512-r1X4KsBGedJqo7h8F5c4Ybpcr5RjyP+aWIG007uBPRjmdQWfEiVLzSK71Zji1B9sKxwaCvD8y8cwSkYrlLiRRg==", + "hasInstallScript": true, + "dependencies": { + "asn1": "^0.2.6", + "bcrypt-pbkdf": "^1.0.2" + }, + "engines": { + "node": ">=10.16.0" + }, + "optionalDependencies": { + "cpu-features": "~0.0.10", + "nan": "^2.20.0" + } + }, "node_modules/sshpk": { "version": "1.17.0", "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.17.0.tgz", @@ -37370,12 +37887,13 @@ "license": "MIT" }, "node_modules/stop-iteration-iterator": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/stop-iteration-iterator/-/stop-iteration-iterator-1.0.0.tgz", - "integrity": "sha512-iCGQj+0l0HOdZ2AEeBADlsRC+vsnDsZsbdSiH1yNSjcfKM7fdpCMfqAL/dwF5BLiw/XhRft/Wax6zQbhq2BcjQ==", - "dev": true, + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/stop-iteration-iterator/-/stop-iteration-iterator-1.1.0.tgz", + "integrity": "sha512-eLoXW/DHyl62zxY4SCaIgnRhuMr6ri4juEYARS8E6sCEqzKpOiE521Ucofdx+KnDZl5xmvGYaaKCk5FEOxJCoQ==", + "license": "MIT", "dependencies": { - "internal-slot": "^1.0.4" + "es-errors": "^1.3.0", + "internal-slot": "^1.1.0" }, "engines": { "node": ">= 0.4" @@ -37548,15 +38066,18 @@ } }, "node_modules/string.prototype.trim": { - "version": "1.2.9", - "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.9.tgz", - "integrity": "sha512-klHuCNxiMZ8MlsOihJhJEBJAiMVqU3Z2nEXWfWnIqjN0gEFS9J9+IxKozWWtQGcgoa1WUZzLjKPTr4ZHNFTFxw==", + "version": "1.2.10", + "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.10.tgz", + "integrity": "sha512-Rs66F0P/1kedk5lyYyH9uBzuiI/kNRmwJAR9quK6VOtIpZ2G+hMZd+HQbbv25MgCA6gEffoMZYxlTod4WcdrKA==", "license": "MIT", "dependencies": { - "call-bind": "^1.0.7", + "call-bind": "^1.0.8", + "call-bound": "^1.0.2", + "define-data-property": "^1.1.4", "define-properties": "^1.2.1", - "es-abstract": "^1.23.0", - "es-object-atoms": "^1.0.0" + "es-abstract": "^1.23.5", + "es-object-atoms": "^1.0.0", + "has-property-descriptors": "^1.0.2" }, "engines": { "node": ">= 0.4" @@ -37566,15 +38087,19 @@ } }, "node_modules/string.prototype.trimend": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.8.tgz", - "integrity": "sha512-p73uL5VCHCO2BZZ6krwwQE3kCzM7NKmis8S//xEC6fQonchbum4eP6kR4DLEjQFO3Wnj3Fuo8NM0kOSjVdHjZQ==", + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.9.tgz", + "integrity": "sha512-G7Ok5C6E/j4SGfyLCloXTrngQIQU3PWtXGst3yM7Bea9FRURf1S42ZHlZZtsNque2FN2PoUhfZXYLNWwEr4dLQ==", "license": "MIT", "dependencies": { - "call-bind": "^1.0.7", + "call-bind": "^1.0.8", + "call-bound": "^1.0.2", "define-properties": "^1.2.1", "es-object-atoms": "^1.0.0" }, + "engines": { + "node": ">= 0.4" + }, "funding": { "url": "https://github.com/sponsors/ljharb" } @@ -38063,7 +38588,8 @@ "version": "3.8.3", "resolved": "https://registry.npmjs.org/superagent/-/superagent-3.8.3.tgz", "integrity": "sha512-GLQtLMCoEIK4eDv6OGtkOoSMt3D+oq0y3dsxMuYuDvaNUvuT8eFBuLmfR0iYYzHC1e8hpzC6ZsxbuP6DIalMFA==", - "deprecated": "Please upgrade to v7.0.2+ of superagent. We have fixed numerous issues with streams, form-data, attach(), filesystem errors not bubbling up (ENOENT on attach()), and all tests are now passing. See the releases tab for more information at . Thanks to @shadowgate15, @spence-s, and @niftylettuce. Superagent is sponsored by Forward Email at .", + "deprecated": "Please upgrade to v9.0.0+ as we have fixed a public vulnerability with formidable dependency. Note that v9.0.0+ requires Node.js v14.18.0+. See https://github.com/ladjs/superagent/pull/1800 for insight. This project is supported and maintained by the team at Forward Email @ https://forwardemail.net", + "license": "MIT", "dependencies": { "component-emitter": "^1.2.0", "cookiejar": "^2.1.0", @@ -38084,32 +38610,58 @@ "version": "3.2.7", "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "license": "MIT", "dependencies": { "ms": "^2.1.1" } }, "node_modules/superagent/node_modules/form-data": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.1.tgz", - "integrity": "sha512-m21N3WOmEEURgk6B9GLOE4RuWOFf28Lhh9qGYeNlGq4VDXUlJy2th2slBNU8Gp8EzloYZOibZJ7t5ecIrFSjVA==", + "version": "2.5.3", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.3.tgz", + "integrity": "sha512-XHIrMD0NpDrNM/Ckf7XJiBbLl57KEhT3+i3yY+eWm+cqYZJQTZrKo8Y8AWKnuV5GT4scfuUGt9LzNoIx3dU1nQ==", + "license": "MIT", "dependencies": { "asynckit": "^0.4.0", - "combined-stream": "^1.0.6", - "mime-types": "^2.1.12" + "combined-stream": "^1.0.8", + "es-set-tostringtag": "^2.1.0", + "mime-types": "^2.1.35", + "safe-buffer": "^5.2.1" }, "engines": { "node": ">= 0.12" } }, + "node_modules/superagent/node_modules/form-data/node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, "node_modules/superagent/node_modules/isarray": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", - "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=" + "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==", + "license": "MIT" }, "node_modules/superagent/node_modules/mime": { "version": "1.6.0", "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", + "license": "MIT", "bin": { "mime": "cli.js" }, @@ -38118,9 +38670,10 @@ } }, "node_modules/superagent/node_modules/readable-stream": { - "version": "2.3.7", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", - "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", + "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", + "license": "MIT", "dependencies": { "core-util-is": "~1.0.0", "inherits": "~2.0.3", @@ -38135,6 +38688,7 @@ "version": "1.1.1", "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "license": "MIT", "dependencies": { "safe-buffer": "~5.1.0" } @@ -38375,7 +38929,8 @@ "node_modules/swagger-converter": { "version": "0.1.7", "resolved": "https://registry.npmjs.org/swagger-converter/-/swagger-converter-0.1.7.tgz", - "integrity": "sha1-oJdRnG8e5N1n4wjZtT3cnCslf5c=", + "integrity": "sha512-O2hZbWqq8x6j0uZ4qWj5dw45WPoAxKsJLJZqOgTqRtPNi8IqA+rDkDV/48S8qanS3KGv1QcVoPNLivMbyHHdAQ==", + "license": "MIT", "dependencies": { "lodash.clonedeep": "^2.4.1" } @@ -38426,12 +38981,6 @@ "lodash": "^4.17.14" } }, - "node_modules/swagger-tools/node_modules/commander": { - "version": "2.11.0", - "resolved": "https://registry.npmjs.org/commander/-/commander-2.11.0.tgz", - "integrity": "sha512-b0553uYA5YAEGgyYIGYROzKQ7X5RAqedkfjiZxwi0kL1g3bOaBNNZfYkzt/CL0umgD5wc9Jec2FbB98CjkMRvQ==", - "license": "MIT" - }, "node_modules/swagger-tools/node_modules/debug": { "version": "3.2.7", "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", @@ -38530,9 +39079,9 @@ } }, "node_modules/tar-fs": { - "version": "3.0.8", - "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-3.0.8.tgz", - "integrity": "sha512-ZoROL70jptorGAlgAYiLoBLItEKw/fUxg9BSYK/dF/GAGYFJOJJJMvjPAKDJraCXFwadD456FCuvLWgfhMsPwg==", + "version": "3.0.9", + "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-3.0.9.tgz", + "integrity": "sha512-XF4w9Xp+ZQgifKakjZYmFdkLoSWd34VGKcsTCwlNWM7QG3ZbaxnTsaBwnjFZqHRf/rROxaR8rXnbtwdvaDI+lA==", "license": "MIT", "dependencies": { "pump": "^3.0.0", @@ -39356,14 +39905,14 @@ } }, "node_modules/traverse": { - "version": "0.6.9", - "resolved": "https://registry.npmjs.org/traverse/-/traverse-0.6.9.tgz", - "integrity": "sha512-7bBrcF+/LQzSgFmT0X5YclVqQxtv7TDJ1f8Wj7ibBu/U6BMLeOpUxuZjV7rMc44UtKxlnMFigdhFAIszSX1DMg==", + "version": "0.6.11", + "resolved": "https://registry.npmjs.org/traverse/-/traverse-0.6.11.tgz", + "integrity": "sha512-vxXDZg8/+p3gblxB6BhhG5yWVn1kGRlaL8O78UDXc3wRnPizB5g83dcvWV1jpDMIPnjZjOFuxlMmE82XJ4407w==", "license": "MIT", "dependencies": { - "gopd": "^1.0.1", - "typedarray.prototype.slice": "^1.0.3", - "which-typed-array": "^1.1.15" + "gopd": "^1.2.0", + "typedarray.prototype.slice": "^1.0.5", + "which-typed-array": "^1.1.18" }, "engines": { "node": ">= 0.4" @@ -39520,30 +40069,30 @@ } }, "node_modules/typed-array-buffer": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.2.tgz", - "integrity": "sha512-gEymJYKZtKXzzBzM4jqa9w6Q1Jjm7x2d+sh19AdsD4wqnMPDYyvwpsIc2Q/835kHuo3BEQ7CjelGhfTsoBb2MQ==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.3.tgz", + "integrity": "sha512-nAYYwfY3qnzX30IkA6AQZjVbtK6duGontcQm1WSG1MD94YLqK0515GNApXkoxKOWMusVssAHWLh9SeaoefYFGw==", "license": "MIT", "dependencies": { - "call-bind": "^1.0.7", + "call-bound": "^1.0.3", "es-errors": "^1.3.0", - "is-typed-array": "^1.1.13" + "is-typed-array": "^1.1.14" }, "engines": { "node": ">= 0.4" } }, "node_modules/typed-array-byte-length": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/typed-array-byte-length/-/typed-array-byte-length-1.0.1.tgz", - "integrity": "sha512-3iMJ9q0ao7WE9tWcaYKIptkNBuOIcZCCT0d4MRvuuH88fEoEH62IuQe0OtraD3ebQEoTRk8XCBoknUNc1Y67pw==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/typed-array-byte-length/-/typed-array-byte-length-1.0.3.tgz", + "integrity": "sha512-BaXgOuIxz8n8pIq3e7Atg/7s+DpiYrxn4vdot3w9KbnBhcRQq6o3xemQdIfynqSeXeDrF32x+WvfzmOjPiY9lg==", "license": "MIT", "dependencies": { - "call-bind": "^1.0.7", + "call-bind": "^1.0.8", "for-each": "^0.3.3", - "gopd": "^1.0.1", - "has-proto": "^1.0.3", - "is-typed-array": "^1.1.13" + "gopd": "^1.2.0", + "has-proto": "^1.2.0", + "is-typed-array": "^1.1.14" }, "engines": { "node": ">= 0.4" @@ -39553,17 +40102,18 @@ } }, "node_modules/typed-array-byte-offset": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/typed-array-byte-offset/-/typed-array-byte-offset-1.0.2.tgz", - "integrity": "sha512-Ous0vodHa56FviZucS2E63zkgtgrACj7omjwd/8lTEMEPFFyjfixMZ1ZXenpgCFBBt4EC1J2XsyVS2gkG0eTFA==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/typed-array-byte-offset/-/typed-array-byte-offset-1.0.4.tgz", + "integrity": "sha512-bTlAFB/FBYMcuX81gbL4OcpH5PmlFHqlCCpAl8AlEzMz5k53oNDvN8p1PNOWLEmI2x4orp3raOFB51tv9X+MFQ==", "license": "MIT", "dependencies": { "available-typed-arrays": "^1.0.7", - "call-bind": "^1.0.7", + "call-bind": "^1.0.8", "for-each": "^0.3.3", - "gopd": "^1.0.1", - "has-proto": "^1.0.3", - "is-typed-array": "^1.1.13" + "gopd": "^1.2.0", + "has-proto": "^1.2.0", + "is-typed-array": "^1.1.15", + "reflect.getprototypeof": "^1.0.9" }, "engines": { "node": ">= 0.4" @@ -39573,17 +40123,17 @@ } }, "node_modules/typed-array-length": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.6.tgz", - "integrity": "sha512-/OxDN6OtAk5KBpGb28T+HZc2M+ADtvRxXrKKbUwtsLgdoxgX13hyy7ek6bFRl5+aBs2yZzB0c4CnQfAtVypW/g==", + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.7.tgz", + "integrity": "sha512-3KS2b+kL7fsuk/eJZ7EQdnEmQoaho/r6KUef7hxvltNA5DR8NAUM+8wJMbJyZ4G9/7i3v5zPBIMN5aybAh2/Jg==", "license": "MIT", "dependencies": { "call-bind": "^1.0.7", "for-each": "^0.3.3", "gopd": "^1.0.1", - "has-proto": "^1.0.3", "is-typed-array": "^1.1.13", - "possible-typed-array-names": "^1.0.0" + "possible-typed-array-names": "^1.0.0", + "reflect.getprototypeof": "^1.0.6" }, "engines": { "node": ">= 0.4" @@ -39598,17 +40148,19 @@ "integrity": "sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c=" }, "node_modules/typedarray.prototype.slice": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/typedarray.prototype.slice/-/typedarray.prototype.slice-1.0.3.tgz", - "integrity": "sha512-8WbVAQAUlENo1q3c3zZYuy5k9VzBQvp8AX9WOtbvyWlLM1v5JaSRmjubLjzHF4JFtptjH/5c/i95yaElvcjC0A==", + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/typedarray.prototype.slice/-/typedarray.prototype.slice-1.0.5.tgz", + "integrity": "sha512-q7QNVDGTdl702bVFiI5eY4l/HkgCM6at9KhcFbgUAzezHFbOVy4+0O/lCjsABEQwbZPravVfBIiBVGo89yzHFg==", "license": "MIT", "dependencies": { - "call-bind": "^1.0.7", + "call-bind": "^1.0.8", "define-properties": "^1.2.1", - "es-abstract": "^1.23.0", + "es-abstract": "^1.23.9", "es-errors": "^1.3.0", - "typed-array-buffer": "^1.0.2", - "typed-array-byte-offset": "^1.0.2" + "get-proto": "^1.0.1", + "math-intrinsics": "^1.1.0", + "typed-array-buffer": "^1.0.3", + "typed-array-byte-offset": "^1.0.4" }, "engines": { "node": ">= 0.4" @@ -39696,14 +40248,18 @@ } }, "node_modules/unbox-primitive": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.2.tgz", - "integrity": "sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.1.0.tgz", + "integrity": "sha512-nWJ91DjeOkej/TA8pXQ3myruKpKEYgqvpw9lz4OPHj/NWFNluYrjbz9j01CJ8yKQd2g4jFoOkINCTW2I5LEEyw==", + "license": "MIT", "dependencies": { - "call-bind": "^1.0.2", + "call-bound": "^1.0.3", "has-bigints": "^1.0.2", - "has-symbols": "^1.0.3", - "which-boxed-primitive": "^1.0.2" + "has-symbols": "^1.1.0", + "which-boxed-primitive": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -41388,30 +41944,64 @@ } }, "node_modules/which-boxed-primitive": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz", - "integrity": "sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.1.1.tgz", + "integrity": "sha512-TbX3mj8n0odCBFVlY8AxkqcHASw3L60jIuF8jFP78az3C2YhmGvqbHBpAjTRH2/xqYunrJ9g1jSyjCjpoWzIAA==", + "license": "MIT", "dependencies": { - "is-bigint": "^1.0.1", - "is-boolean-object": "^1.1.0", - "is-number-object": "^1.0.4", - "is-string": "^1.0.5", - "is-symbol": "^1.0.3" + "is-bigint": "^1.1.0", + "is-boolean-object": "^1.2.1", + "is-number-object": "^1.1.1", + "is-string": "^1.1.1", + "is-symbol": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/which-builtin-type": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/which-builtin-type/-/which-builtin-type-1.2.1.tgz", + "integrity": "sha512-6iBczoX+kDQ7a3+YJBnh3T+KZRxM/iYNPXicqk66/Qfm1b93iu+yOImkg0zHbj5LNOcNv1TEADiZ0xa34B4q6Q==", + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "function.prototype.name": "^1.1.6", + "has-tostringtag": "^1.0.2", + "is-async-function": "^2.0.0", + "is-date-object": "^1.1.0", + "is-finalizationregistry": "^1.1.0", + "is-generator-function": "^1.0.10", + "is-regex": "^1.2.1", + "is-weakref": "^1.0.2", + "isarray": "^2.0.5", + "which-boxed-primitive": "^1.1.0", + "which-collection": "^1.0.2", + "which-typed-array": "^1.1.16" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/which-collection": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/which-collection/-/which-collection-1.0.1.tgz", - "integrity": "sha512-W8xeTUwaln8i3K/cY1nGXzdnVZlidBcagyNFtBdD5kxnb4TvGKR7FfSIS3mYpwWS1QUCutfKz8IY8RjftB0+1A==", - "dev": true, + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/which-collection/-/which-collection-1.0.2.tgz", + "integrity": "sha512-K4jVyjnBdgvc86Y6BkaLZEN933SwYOuBFkdmBu9ZfkcAbdVbpITnDmjvZ/aQjRXQrv5EPkTnD1s39GiiqbngCw==", + "license": "MIT", "dependencies": { - "is-map": "^2.0.1", - "is-set": "^2.0.1", - "is-weakmap": "^2.0.1", - "is-weakset": "^2.0.1" + "is-map": "^2.0.3", + "is-set": "^2.0.3", + "is-weakmap": "^2.0.2", + "is-weakset": "^2.0.3" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -41424,15 +42014,17 @@ "dev": true }, "node_modules/which-typed-array": { - "version": "1.1.15", - "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.15.tgz", - "integrity": "sha512-oV0jmFtUky6CXfkqehVvBP/LSWJ2sy4vWMioiENyJLePrBO/yKyV9OyJySfAKosh+RYkIl5zJCNZ8/4JncrpdA==", + "version": "1.1.19", + "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.19.tgz", + "integrity": "sha512-rEvr90Bck4WZt9HHFC4DJMsjvu7x+r6bImz0/BrbWb7A2djJ8hnZMrWnHo9F8ssv0OMErasDhftrfROTyqSDrw==", "license": "MIT", "dependencies": { "available-typed-arrays": "^1.0.7", - "call-bind": "^1.0.7", - "for-each": "^0.3.3", - "gopd": "^1.0.1", + "call-bind": "^1.0.8", + "call-bound": "^1.0.4", + "for-each": "^0.3.5", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", "has-tostringtag": "^1.0.2" }, "engines": { @@ -41949,6 +42541,7 @@ "version": "3.25.1", "resolved": "https://registry.npmjs.org/z-schema/-/z-schema-3.25.1.tgz", "integrity": "sha512-7tDlwhrBG+oYFdXNOjILSurpfQyuVgkRe3hB2q8TEssamDHB7BbLWYkYO98nTn0FibfdFroFKDjndbgufAgS/Q==", + "license": "MIT", "dependencies": { "core-js": "^2.5.7", "lodash.get": "^4.0.0", @@ -41962,23 +42555,19 @@ "commander": "^2.7.1" } }, - "node_modules/z-schema/node_modules/commander": { - "version": "2.20.3", - "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", - "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==", - "optional": true - }, "node_modules/z-schema/node_modules/core-js": { "version": "2.6.12", "resolved": "https://registry.npmjs.org/core-js/-/core-js-2.6.12.tgz", "integrity": "sha512-Kb2wC0fvsWfQrgk8HU5lW6U/Lcs8+9aaYcy4ZFc6DDlo4nZ7n70dEgE5rtR0oG6ufKDUnrwfWL1mXR5ljDatrQ==", - "deprecated": "core-js@<3.4 is no longer maintained and not recommended for usage due to the number of issues. Because of the V8 engine whims, feature detection in old core-js versions could cause a slowdown up to 100x even if nothing is polyfilled. Please, upgrade your dependencies to the actual version of core-js.", - "hasInstallScript": true + "deprecated": "core-js@<3.23.3 is no longer maintained and not recommended for usage due to the number of issues. Because of the V8 engine whims, feature detection in old core-js versions could cause a slowdown up to 100x even if nothing is polyfilled. Some versions have web compatibility issues. Please, upgrade your dependencies to the actual version of core-js.", + "hasInstallScript": true, + "license": "MIT" }, "node_modules/z-schema/node_modules/validator": { "version": "10.11.0", "resolved": "https://registry.npmjs.org/validator/-/validator-10.11.0.tgz", "integrity": "sha512-X/p3UZerAIsbBfN/IwahhYaBbY68EN/UQBWHtsbXGT5bfrH/p4NQzUCG1kF/rtKaNpnJ7jAu6NGTdSNtyNIXMw==", + "license": "MIT", "engines": { "node": ">= 0.10" } @@ -42102,13 +42691,13 @@ "async": "^3.2.5", "body-parser": "^1.20.3", "bunyan": "^1.8.15", - "dockerode": "^4.0.5", + "dockerode": "^4.0.7", "express": "^4.21.2", "lodash": "^4.17.21", "p-limit": "^3.1.0", "request": "^2.88.2", "send": "^0.19.0", - "tar-fs": "^3.0.4", + "tar-fs": "^3.0.9", "workerpool": "^6.1.5" }, "devDependencies": { @@ -42175,33 +42764,6 @@ "node": ">= 0.6" } }, - "services/clsi/node_modules/@grpc/grpc-js": { - "version": "1.13.2", - "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.13.2.tgz", - "integrity": "sha512-nnR5nmL6lxF8YBqb6gWvEgLdLh/Fn+kvAdX5hUOnt48sNSb0riz/93ASd2E5gvanPA41X6Yp25bIfGRp1SMb2g==", - "license": "Apache-2.0", - "dependencies": { - "@grpc/proto-loader": "^0.7.13", - "@js-sdsl/ordered-map": "^4.4.2" - }, - "engines": { - "node": ">=12.10.0" - } - }, - "services/clsi/node_modules/cpu-features": { - "version": "0.0.10", - "resolved": "https://registry.npmjs.org/cpu-features/-/cpu-features-0.0.10.tgz", - "integrity": "sha512-9IkYqtX3YHPCzoVg1Py+o9057a3i0fp7S530UWokCSaFVTc7CwXPRiOjRjBQQ18ZCNafx78YfnG+HALxtVmOGA==", - "hasInstallScript": true, - "optional": true, - "dependencies": { - "buildcheck": "~0.0.6", - "nan": "^2.19.0" - }, - "engines": { - "node": ">=10.0.0" - } - }, "services/clsi/node_modules/diff": { "version": "4.0.2", "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", @@ -42211,75 +42773,6 @@ "node": ">=0.3.1" } }, - "services/clsi/node_modules/docker-modem": { - "version": "5.0.6", - "resolved": "https://registry.npmjs.org/docker-modem/-/docker-modem-5.0.6.tgz", - "integrity": "sha512-ens7BiayssQz/uAxGzH8zGXCtiV24rRWXdjNha5V4zSOcxmAZsfGVm/PPFbwQdqEkDnhG+SyR9E3zSHUbOKXBQ==", - "license": "Apache-2.0", - "dependencies": { - "debug": "^4.1.1", - "readable-stream": "^3.5.0", - "split-ca": "^1.0.1", - "ssh2": "^1.15.0" - }, - "engines": { - "node": ">= 8.0" - } - }, - "services/clsi/node_modules/dockerode": { - "version": "4.0.5", - "resolved": "https://registry.npmjs.org/dockerode/-/dockerode-4.0.5.tgz", - "integrity": "sha512-ZPmKSr1k1571Mrh7oIBS/j0AqAccoecY2yH420ni5j1KyNMgnoTh4Nu4FWunh0HZIJmRSmSysJjBIpa/zyWUEA==", - "license": "Apache-2.0", - "dependencies": { - "@balena/dockerignore": "^1.0.2", - "@grpc/grpc-js": "^1.11.1", - "@grpc/proto-loader": "^0.7.13", - "docker-modem": "^5.0.6", - "protobufjs": "^7.3.2", - "tar-fs": "~2.1.2", - "uuid": "^10.0.0" - }, - "engines": { - "node": ">= 8.0" - } - }, - "services/clsi/node_modules/dockerode/node_modules/tar-fs": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.2.tgz", - "integrity": "sha512-EsaAXwxmx8UB7FRKqeozqEPop69DXcmYwTQwXvyAPF352HJsPdkVhvTaDPYqfNgruveJIJy3TA2l+2zj8LJIJA==", - "license": "MIT", - "dependencies": { - "chownr": "^1.1.1", - "mkdirp-classic": "^0.5.2", - "pump": "^3.0.0", - "tar-stream": "^2.1.4" - } - }, - "services/clsi/node_modules/protobufjs": { - "version": "7.4.0", - "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-7.4.0.tgz", - "integrity": "sha512-mRUWCc3KUU4w1jU8sGxICXH/gNS94DvI1gxqDvBzhj1JpcsimQkYiOJfwsPUykUI5ZaspFbSgmBLER8IrQ3tqw==", - "hasInstallScript": true, - "license": "BSD-3-Clause", - "dependencies": { - "@protobufjs/aspromise": "^1.1.2", - "@protobufjs/base64": "^1.1.2", - "@protobufjs/codegen": "^2.0.4", - "@protobufjs/eventemitter": "^1.1.0", - "@protobufjs/fetch": "^1.1.0", - "@protobufjs/float": "^1.0.2", - "@protobufjs/inquire": "^1.1.0", - "@protobufjs/path": "^1.1.2", - "@protobufjs/pool": "^1.1.0", - "@protobufjs/utf8": "^1.1.0", - "@types/node": ">=13.7.0", - "long": "^5.0.0" - }, - "engines": { - "node": ">=12.0.0" - } - }, "services/clsi/node_modules/sinon": { "version": "9.0.3", "resolved": "https://registry.npmjs.org/sinon/-/sinon-9.0.3.tgz", @@ -42299,23 +42792,6 @@ "url": "https://opencollective.com/sinon" } }, - "services/clsi/node_modules/ssh2": { - "version": "1.16.0", - "resolved": "https://registry.npmjs.org/ssh2/-/ssh2-1.16.0.tgz", - "integrity": "sha512-r1X4KsBGedJqo7h8F5c4Ybpcr5RjyP+aWIG007uBPRjmdQWfEiVLzSK71Zji1B9sKxwaCvD8y8cwSkYrlLiRRg==", - "hasInstallScript": true, - "dependencies": { - "asn1": "^0.2.6", - "bcrypt-pbkdf": "^1.0.2" - }, - "engines": { - "node": ">=10.16.0" - }, - "optionalDependencies": { - "cpu-features": "~0.0.10", - "nan": "^2.20.0" - } - }, "services/clsi/node_modules/supports-color": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", @@ -42328,19 +42804,6 @@ "node": ">=8" } }, - "services/clsi/node_modules/uuid": { - "version": "10.0.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-10.0.0.tgz", - "integrity": "sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ==", - "funding": [ - "https://github.com/sponsors/broofa", - "https://github.com/sponsors/ctavan" - ], - "license": "MIT", - "bin": { - "uuid": "dist/bin/uuid" - } - }, "services/contacts": { "name": "@overleaf/contacts", "dependencies": { @@ -42408,6 +42871,7 @@ "services/docstore": { "name": "@overleaf/docstore", "dependencies": { + "@overleaf/fetch-utils": "*", "@overleaf/logger": "*", "@overleaf/metrics": "*", "@overleaf/o-error": "*", @@ -42718,6 +43182,7 @@ "license": "Proprietary", "dependencies": { "@google-cloud/secret-manager": "^5.6.0", + "@overleaf/fetch-utils": "*", "@overleaf/logger": "*", "@overleaf/metrics": "*", "@overleaf/mongo-utils": "*", @@ -42747,6 +43212,7 @@ "mongodb": "6.12.0", "overleaf-editor-core": "*", "p-limit": "^6.2.0", + "p-queue": "^8.1.0", "pg": "^8.7.1", "pg-query-stream": "^4.2.4", "swagger-tools": "^0.10.4", @@ -44228,6 +44694,7 @@ "@overleaf/promise-utils": "*", "@overleaf/redis-wrapper": "*", "@overleaf/settings": "*", + "@overleaf/stream-utils": "*", "async": "^3.2.5", "aws-sdk": "^2.650.0", "body-parser": "^1.20.3", @@ -44687,8 +45154,7 @@ "@overleaf/settings": "*", "@phosphor-icons/react": "^2.1.7", "@slack/webhook": "^7.0.2", - "@stripe/react-stripe-js": "^3.1.1", - "@stripe/stripe-js": "^5.6.0", + "@stripe/stripe-js": "^7.3.0", "@xmldom/xmldom": "^0.7.13", "accepts": "^1.3.7", "ajv": "^8.12.0", @@ -44740,7 +45206,7 @@ "moment": "^2.29.4", "mongodb-legacy": "6.1.3", "mongoose": "8.9.5", - "multer": "overleaf/multer#199c5ff05bd375c508f4074498237baead7f5148", + "multer": "overleaf/multer#4dbceda355efc3fc8ac3cf5c66c3778c8a6fdb23", "nocache": "^2.1.0", "node-fetch": "^2.7.0", "nodemailer": "^6.7.0", @@ -44850,6 +45316,7 @@ "@uppy/react": "^3.2.1", "@uppy/utils": "^5.7.0", "@uppy/xhr-upload": "^3.6.0", + "@vitest/eslint-plugin": "1.1.44", "5to6-codemod": "^1.8.0", "abort-controller": "^3.0.0", "acorn": "^7.1.1", @@ -44891,7 +45358,6 @@ "eslint-plugin-react-hooks": "^4.6.0", "eslint-plugin-testing-library": "^7.1.1", "eslint-plugin-unicorn": "^56.0.0", - "esmock": "^2.6.7", "events": "^3.3.0", "fake-indexeddb": "^6.0.0", "fetch-mock": "^12.5.2", @@ -44956,6 +45422,7 @@ "tty-browserify": "^0.0.1", "typescript": "^5.0.4", "uuid": "^9.0.1", + "vitest": "^3.1.2", "w3c-keyname": "^2.2.8", "webpack": "^5.98.0", "webpack-assets-manifest": "^5.2.1", @@ -44965,6 +45432,26 @@ "yup": "^0.32.11" } }, + "services/web/node_modules/@eslint-community/eslint-utils": { + "version": "4.7.0", + "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.7.0.tgz", + "integrity": "sha512-dyybb3AcajC7uha6CvhdVRJqaKyn7w2YKqKyAN37NKYgZT36w+iRb0Dymmc5qEJ549c/S31cMMSFd75bteCpCw==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "eslint-visitor-keys": "^3.4.3" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + }, + "peerDependencies": { + "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" + } + }, "services/web/node_modules/@google-cloud/bigquery": { "version": "6.0.3", "resolved": "https://registry.npmjs.org/@google-cloud/bigquery/-/bigquery-6.0.3.tgz", @@ -45154,6 +45641,15 @@ "lodash": "^4.17.15" } }, + "services/web/node_modules/@stripe/stripe-js": { + "version": "7.3.0", + "resolved": "https://registry.npmjs.org/@stripe/stripe-js/-/stripe-js-7.3.0.tgz", + "integrity": "sha512-xnCyFIEI5SQnQrKkCxVj7nS5fWTZap+zuIGzmmxLMdlmgahFJaihK4zogqE8YyKKTLtrp/EldkEijSgtXsRVDg==", + "license": "MIT", + "engines": { + "node": ">=12.16" + } + }, "services/web/node_modules/@transloadit/prettier-bytes": { "version": "0.0.9", "resolved": "https://registry.npmjs.org/@transloadit/prettier-bytes/-/prettier-bytes-0.0.9.tgz", @@ -45161,6 +45657,18 @@ "dev": true, "license": "MIT" }, + "services/web/node_modules/@types/debug": { + "version": "4.1.12", + "resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.12.tgz", + "integrity": "sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ==", + "dev": true, + "license": "MIT", + "optional": true, + "peer": true, + "dependencies": { + "@types/ms": "*" + } + }, "services/web/node_modules/@types/express": { "version": "4.17.21", "resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.21.tgz", @@ -45179,6 +45687,143 @@ "integrity": "sha512-Z61JK7DKDtdKTWwLeElSEBcWGRLY8g95ic5FoQqI9CMx0ns/Ghep3B4DfcEimiKMvtamNVULVNKEsiwV3aQmXw==", "dev": true }, + "services/web/node_modules/@typescript-eslint/scope-manager": { + "version": "8.32.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.32.1.tgz", + "integrity": "sha512-7IsIaIDeZn7kffk7qXC3o6Z4UblZJKV3UBpkvRNpr5NSyLji7tvTcvmnMNYuYLyh26mN8W723xpo3i4MlD33vA==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "@typescript-eslint/types": "8.32.1", + "@typescript-eslint/visitor-keys": "8.32.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "services/web/node_modules/@typescript-eslint/types": { + "version": "8.32.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.32.1.tgz", + "integrity": "sha512-YmybwXUJcgGqgAp6bEsgpPXEg6dcCyPyCSr0CAAueacR/CCBi25G3V8gGQ2kRzQRBNol7VQknxMs9HvVa9Rvfg==", + "dev": true, + "license": "MIT", + "peer": true, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "services/web/node_modules/@typescript-eslint/typescript-estree": { + "version": "8.32.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.32.1.tgz", + "integrity": "sha512-Y3AP9EIfYwBb4kWGb+simvPaqQoT5oJuzzj9m0i6FCY6SPvlomY2Ei4UEMm7+FXtlNJbor80ximyslzaQF6xhg==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "@typescript-eslint/types": "8.32.1", + "@typescript-eslint/visitor-keys": "8.32.1", + "debug": "^4.3.4", + "fast-glob": "^3.3.2", + "is-glob": "^4.0.3", + "minimatch": "^9.0.4", + "semver": "^7.6.0", + "ts-api-utils": "^2.1.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "typescript": ">=4.8.4 <5.9.0" + } + }, + "services/web/node_modules/@typescript-eslint/typescript-estree/node_modules/minimatch": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "dev": true, + "license": "ISC", + "peer": true, + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "services/web/node_modules/@typescript-eslint/utils": { + "version": "8.32.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.32.1.tgz", + "integrity": "sha512-DsSFNIgLSrc89gpq1LJB7Hm1YpuhK086DRDJSNrewcGvYloWW1vZLHBTIvarKZDcAORIy/uWNx8Gad+4oMpkSA==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "@eslint-community/eslint-utils": "^4.7.0", + "@typescript-eslint/scope-manager": "8.32.1", + "@typescript-eslint/types": "8.32.1", + "@typescript-eslint/typescript-estree": "8.32.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <5.9.0" + } + }, + "services/web/node_modules/@typescript-eslint/visitor-keys": { + "version": "8.32.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.32.1.tgz", + "integrity": "sha512-ar0tjQfObzhSaW3C3QNmTc5ofj0hDoNQ5XWrCy6zDyabdr0TWhCkClp+rywGNj/odAFBVzzJrK4tEq5M4Hmu4w==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "@typescript-eslint/types": "8.32.1", + "eslint-visitor-keys": "^4.2.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "services/web/node_modules/@typescript-eslint/visitor-keys/node_modules/eslint-visitor-keys": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.0.tgz", + "integrity": "sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw==", + "dev": true, + "license": "Apache-2.0", + "peer": true, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, "services/web/node_modules/@uppy/core": { "version": "3.8.0", "resolved": "https://registry.npmjs.org/@uppy/core/-/core-3.8.0.tgz", @@ -45332,6 +45977,130 @@ "@uppy/core": "^3.8.0" } }, + "services/web/node_modules/@vitest/eslint-plugin": { + "version": "1.1.44", + "resolved": "https://registry.npmjs.org/@vitest/eslint-plugin/-/eslint-plugin-1.1.44.tgz", + "integrity": "sha512-m4XeohMT+Dj2RZfxnbiFR+Cv5dEC0H7C6TlxRQT7GK2556solm99kxgzJp/trKrZvanZcOFyw7aABykUTfWyrg==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "@typescript-eslint/utils": ">= 8.24.0", + "eslint": ">= 8.57.0", + "typescript": ">= 5.0.0", + "vitest": "*" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + }, + "vitest": { + "optional": true + } + } + }, + "services/web/node_modules/@vitest/expect": { + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-3.1.4.tgz", + "integrity": "sha512-xkD/ljeliyaClDYqHPNCiJ0plY5YIcM0OlRiZizLhlPmpXWpxnGMyTZXOHFhFeG7w9P5PBeL4IdtJ/HeQwTbQA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/spy": "3.1.4", + "@vitest/utils": "3.1.4", + "chai": "^5.2.0", + "tinyrainbow": "^2.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "services/web/node_modules/@vitest/expect/node_modules/chai": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/chai/-/chai-5.2.0.tgz", + "integrity": "sha512-mCuXncKXk5iCLhfhwTc0izo0gtEmpz5CtG2y8GiOINBlMVS6v8TMRc5TaLWKS6692m9+dVVfzgeVxR5UxWHTYw==", + "dev": true, + "license": "MIT", + "dependencies": { + "assertion-error": "^2.0.1", + "check-error": "^2.1.1", + "deep-eql": "^5.0.1", + "loupe": "^3.1.0", + "pathval": "^2.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "services/web/node_modules/@vitest/pretty-format": { + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-3.1.4.tgz", + "integrity": "sha512-cqv9H9GvAEoTaoq+cYqUTCGscUjKqlJZC7PRwY5FMySVj5J+xOm1KQcCiYHJOEzOKRUhLH4R2pTwvFlWCEScsg==", + "dev": true, + "license": "MIT", + "dependencies": { + "tinyrainbow": "^2.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "services/web/node_modules/@vitest/runner": { + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-3.1.4.tgz", + "integrity": "sha512-djTeF1/vt985I/wpKVFBMWUlk/I7mb5hmD5oP8K9ACRmVXgKTae3TUOtXAEBfslNKPzUQvnKhNd34nnRSYgLNQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/utils": "3.1.4", + "pathe": "^2.0.3" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "services/web/node_modules/@vitest/snapshot": { + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-3.1.4.tgz", + "integrity": "sha512-JPHf68DvuO7vilmvwdPr9TS0SuuIzHvxeaCkxYcCD4jTk67XwL45ZhEHFKIuCm8CYstgI6LZ4XbwD6ANrwMpFg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/pretty-format": "3.1.4", + "magic-string": "^0.30.17", + "pathe": "^2.0.3" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "services/web/node_modules/@vitest/spy": { + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-3.1.4.tgz", + "integrity": "sha512-Xg1bXhu+vtPXIodYN369M86K8shGLouNjoVI78g8iAq2rFoHFdajNvJJ5A/9bPMFcfQqdaCpOgWKEoMQg/s0Yg==", + "dev": true, + "license": "MIT", + "dependencies": { + "tinyspy": "^3.0.2" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "services/web/node_modules/@vitest/utils": { + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-3.1.4.tgz", + "integrity": "sha512-yriMuO1cfFhmiGc8ataN51+9ooHRuURdfAZfwFd3usWynjzpLslZdYnRegTv32qdgtJTsj15FoeZe2g15fY1gg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/pretty-format": "3.1.4", + "loupe": "^3.1.3", + "tinyrainbow": "^2.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, "services/web/node_modules/agent-base": { "version": "7.1.3", "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.3.tgz", @@ -45368,6 +46137,16 @@ "ajv": "^8.8.2" } }, + "services/web/node_modules/assertion-error": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-2.0.1.tgz", + "integrity": "sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + } + }, "services/web/node_modules/base-x": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/base-x/-/base-x-4.0.1.tgz", @@ -45405,6 +46184,16 @@ "ieee754": "^1.2.1" } }, + "services/web/node_modules/check-error": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/check-error/-/check-error-2.1.1.tgz", + "integrity": "sha512-OAlb+T7V4Op9OwdkjmguYRqncdlx5JiofwOAUkmTF+jNdHwzTaTs4sRAGpzLF3oOz5xAyDGrPgeIDFQmDOTiJw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 16" + } + }, "services/web/node_modules/csv": { "version": "6.2.5", "resolved": "https://registry.npmjs.org/csv/-/csv-6.2.5.tgz", @@ -45451,6 +46240,16 @@ } } }, + "services/web/node_modules/deep-eql": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-5.0.2.tgz", + "integrity": "sha512-h5k/5U50IJJFpzfL6nO9jaaumfjO/f2NjK/oYB2Djzm4p9L+3T9qWpZqZ2hAbLPuuYq9wrU08WQyBTL5GbPk5Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, "services/web/node_modules/duplexify": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.2.tgz", @@ -45462,13 +46261,18 @@ "stream-shift": "^1.0.0" } }, - "services/web/node_modules/esmock": { - "version": "2.6.7", - "resolved": "https://registry.npmjs.org/esmock/-/esmock-2.6.7.tgz", - "integrity": "sha512-4DmjZ0qQIG+NQV1njHvWrua/cZEuJq56A3pSELT2BjNuol1aads7BluofCbLErdO41Ic1XCd2UMepVLpjL64YQ==", + "services/web/node_modules/eslint-visitor-keys": { + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", + "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", "dev": true, + "license": "Apache-2.0", + "peer": true, "engines": { - "node": ">=14.16.0" + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" } }, "services/web/node_modules/events": { @@ -45479,6 +46283,21 @@ "node": ">=0.8.x" } }, + "services/web/node_modules/fdir": { + "version": "6.4.4", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.4.4.tgz", + "integrity": "sha512-1NZP+GK4GfuAv3PqKvxQRDMjdSRZjnkq7KfhlNrCNNlZ0ygQFpebfrnfnq/W7fpUnAv9aGWmY1zKx7FYL3gwhg==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } + } + }, "services/web/node_modules/fetch-mock": { "version": "12.5.2", "resolved": "https://registry.npmjs.org/fetch-mock/-/fetch-mock-12.5.2.tgz", @@ -45598,6 +46417,18 @@ "integrity": "sha512-D2S+3GLxWH+uhrNEcoh/fnmYeP8E8/zHl644d/jdA0g2uyXvy3sb0qxotE+ne0LtccHknQzWwZEzhak7oJ0COQ==", "dev": true }, + "services/web/node_modules/jiti": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/jiti/-/jiti-2.4.2.tgz", + "integrity": "sha512-rg9zJN+G4n2nfJl5MW3BMygZX56zKPNVEYYqq7adpmMh4Jn2QNEwhvQlFy6jPVdcod7txZtKHWnyZiA3a0zP7A==", + "dev": true, + "license": "MIT", + "optional": true, + "peer": true, + "bin": { + "jiti": "lib/jiti-cli.mjs" + } + }, "services/web/node_modules/json-schema-traverse": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", @@ -45609,6 +46440,13 @@ "integrity": "sha512-gKO5uExCXvSm6zbF562EvM+rd1kQDnB9AZBbiQVzf1ZmdDpxUSvpnAaVOP83N/31mRK8Ml8/VE8DMvsAZQ+7wg==", "dev": true }, + "services/web/node_modules/loupe": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/loupe/-/loupe-3.1.3.tgz", + "integrity": "sha512-kkIp7XSkP78ZxJEsSxW3712C6teJVoeHHwgo9zJ380de7IYyJ2ISlxojcH2pC5OFLewESmnRi/+XCDIEEVyoug==", + "dev": true, + "license": "MIT" + }, "services/web/node_modules/lru-cache": { "version": "7.10.1", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.10.1.tgz", @@ -45659,18 +46497,18 @@ } }, "services/web/node_modules/multer": { - "version": "2.0.0", - "resolved": "git+ssh://git@github.com/overleaf/multer.git#199c5ff05bd375c508f4074498237baead7f5148", - "integrity": "sha512-S5MlIoOgrDr+a2jLS8z7jQlbzvZ0m30U2tRwdyLrxhnnMUQZYEzkVysEv10Dw41RTpM5bQQDs563Vzl1LLhxhQ==", + "version": "2.0.1", + "resolved": "git+ssh://git@github.com/overleaf/multer.git#4dbceda355efc3fc8ac3cf5c66c3778c8a6fdb23", + "integrity": "sha512-kkvPK48OQibR5vIoTQBbZp1uWVCvT9MrW3Y0mqdhFYJP/HVJujb4eSCEU0yj+hyf0Y+H/BKCmPdM4fJnzqAO4w==", "license": "MIT", "dependencies": { "append-field": "^1.0.0", - "busboy": "^1.0.0", - "concat-stream": "^1.5.2", - "mkdirp": "^0.5.4", + "busboy": "^1.6.0", + "concat-stream": "^2.0.0", + "mkdirp": "^0.5.6", "object-assign": "^4.1.1", - "type-is": "^1.6.4", - "xtend": "^4.0.0" + "type-is": "^1.6.18", + "xtend": "^4.0.2" }, "engines": { "node": ">= 10.16.0" @@ -45735,6 +46573,29 @@ "isarray": "0.0.1" } }, + "services/web/node_modules/pathval": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/pathval/-/pathval-2.0.0.tgz", + "integrity": "sha512-vE7JKRyES09KiunauX7nd2Q9/L7lhok4smP9RZTDeD4MVs72Dp2qNFVz39Nz5a0FVEW0BJR6C0DYrq6unoziZA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 14.16" + } + }, + "services/web/node_modules/picomatch": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.2.tgz", + "integrity": "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, "services/web/node_modules/retry-request": { "version": "5.0.2", "resolved": "https://registry.npmjs.org/retry-request/-/retry-request-5.0.2.tgz", @@ -45778,6 +46639,20 @@ "url": "https://opencollective.com/webpack" } }, + "services/web/node_modules/semver": { + "version": "7.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", + "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", + "dev": true, + "license": "ISC", + "peer": true, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, "services/web/node_modules/sinon": { "version": "7.5.0", "resolved": "https://registry.npmjs.org/sinon/-/sinon-7.5.0.tgz", @@ -45941,6 +46816,30 @@ } } }, + "services/web/node_modules/tinyrainbow": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/tinyrainbow/-/tinyrainbow-2.0.0.tgz", + "integrity": "sha512-op4nsTR47R6p0vMUUoYl/a+ljLFVtlfaXkLQmqfLR1qHma1h/ysYk4hEXZ880bf2CYgTskvTa/e196Vd5dDQXw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14.0.0" + } + }, + "services/web/node_modules/ts-api-utils": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.1.0.tgz", + "integrity": "sha512-CUgTZL1irw8u29bzrOD/nH85jqyc74D6SshFgujOIA7osm2Rz7dYH77agkx7H4FBNxDq7Cjf+IjaX/8zwFW+ZQ==", + "dev": true, + "license": "MIT", + "peer": true, + "engines": { + "node": ">=18.12" + }, + "peerDependencies": { + "typescript": ">=4.8.4" + } + }, "services/web/node_modules/uuid": { "version": "9.0.1", "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz", @@ -45953,6 +46852,294 @@ "uuid": "dist/bin/uuid" } }, + "services/web/node_modules/vite-node": { + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/vite-node/-/vite-node-3.1.4.tgz", + "integrity": "sha512-6enNwYnpyDo4hEgytbmc6mYWHXDHYEn0D1/rw4Q+tnHUGtKTJsn8T1YkX6Q18wI5LCrS8CTYlBaiCqxOy2kvUA==", + "dev": true, + "license": "MIT", + "dependencies": { + "cac": "^6.7.14", + "debug": "^4.4.0", + "es-module-lexer": "^1.7.0", + "pathe": "^2.0.3", + "vite": "^5.0.0 || ^6.0.0" + }, + "bin": { + "vite-node": "vite-node.mjs" + }, + "engines": { + "node": "^18.0.0 || ^20.0.0 || >=22.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "services/web/node_modules/vite-node/node_modules/vite": { + "version": "6.3.5", + "resolved": "https://registry.npmjs.org/vite/-/vite-6.3.5.tgz", + "integrity": "sha512-cZn6NDFE7wdTpINgs++ZJ4N49W2vRp8LCKrn3Ob1kYNtOo21vfDoaV5GzBfLU4MovSAB8uNRm4jgzVQZ+mBzPQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "esbuild": "^0.25.0", + "fdir": "^6.4.4", + "picomatch": "^4.0.2", + "postcss": "^8.5.3", + "rollup": "^4.34.9", + "tinyglobby": "^0.2.13" + }, + "bin": { + "vite": "bin/vite.js" + }, + "engines": { + "node": "^18.0.0 || ^20.0.0 || >=22.0.0" + }, + "funding": { + "url": "https://github.com/vitejs/vite?sponsor=1" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + }, + "peerDependencies": { + "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", + "jiti": ">=1.21.0", + "less": "*", + "lightningcss": "^1.21.0", + "sass": "*", + "sass-embedded": "*", + "stylus": "*", + "sugarss": "*", + "terser": "^5.16.0", + "tsx": "^4.8.1", + "yaml": "^2.4.2" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "jiti": { + "optional": true + }, + "less": { + "optional": true + }, + "lightningcss": { + "optional": true + }, + "sass": { + "optional": true + }, + "sass-embedded": { + "optional": true + }, + "stylus": { + "optional": true + }, + "sugarss": { + "optional": true + }, + "terser": { + "optional": true + }, + "tsx": { + "optional": true + }, + "yaml": { + "optional": true + } + } + }, + "services/web/node_modules/vitest": { + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/vitest/-/vitest-3.1.4.tgz", + "integrity": "sha512-Ta56rT7uWxCSJXlBtKgIlApJnT6e6IGmTYxYcmxjJ4ujuZDI59GUQgVDObXXJujOmPDBYXHK1qmaGtneu6TNIQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/expect": "3.1.4", + "@vitest/mocker": "3.1.4", + "@vitest/pretty-format": "^3.1.4", + "@vitest/runner": "3.1.4", + "@vitest/snapshot": "3.1.4", + "@vitest/spy": "3.1.4", + "@vitest/utils": "3.1.4", + "chai": "^5.2.0", + "debug": "^4.4.0", + "expect-type": "^1.2.1", + "magic-string": "^0.30.17", + "pathe": "^2.0.3", + "std-env": "^3.9.0", + "tinybench": "^2.9.0", + "tinyexec": "^0.3.2", + "tinyglobby": "^0.2.13", + "tinypool": "^1.0.2", + "tinyrainbow": "^2.0.0", + "vite": "^5.0.0 || ^6.0.0", + "vite-node": "3.1.4", + "why-is-node-running": "^2.3.0" + }, + "bin": { + "vitest": "vitest.mjs" + }, + "engines": { + "node": "^18.0.0 || ^20.0.0 || >=22.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + }, + "peerDependencies": { + "@edge-runtime/vm": "*", + "@types/debug": "^4.1.12", + "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", + "@vitest/browser": "3.1.4", + "@vitest/ui": "3.1.4", + "happy-dom": "*", + "jsdom": "*" + }, + "peerDependenciesMeta": { + "@edge-runtime/vm": { + "optional": true + }, + "@types/debug": { + "optional": true + }, + "@types/node": { + "optional": true + }, + "@vitest/browser": { + "optional": true + }, + "@vitest/ui": { + "optional": true + }, + "happy-dom": { + "optional": true + }, + "jsdom": { + "optional": true + } + } + }, + "services/web/node_modules/vitest/node_modules/@vitest/mocker": { + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-3.1.4.tgz", + "integrity": "sha512-8IJ3CvwtSw/EFXqWFL8aCMu+YyYXG2WUSrQbViOZkWTKTVicVwZ/YiEZDSqD00kX+v/+W+OnxhNWoeVKorHygA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/spy": "3.1.4", + "estree-walker": "^3.0.3", + "magic-string": "^0.30.17" + }, + "funding": { + "url": "https://opencollective.com/vitest" + }, + "peerDependencies": { + "msw": "^2.4.9", + "vite": "^5.0.0 || ^6.0.0" + }, + "peerDependenciesMeta": { + "msw": { + "optional": true + }, + "vite": { + "optional": true + } + } + }, + "services/web/node_modules/vitest/node_modules/chai": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/chai/-/chai-5.2.0.tgz", + "integrity": "sha512-mCuXncKXk5iCLhfhwTc0izo0gtEmpz5CtG2y8GiOINBlMVS6v8TMRc5TaLWKS6692m9+dVVfzgeVxR5UxWHTYw==", + "dev": true, + "license": "MIT", + "dependencies": { + "assertion-error": "^2.0.1", + "check-error": "^2.1.1", + "deep-eql": "^5.0.1", + "loupe": "^3.1.0", + "pathval": "^2.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "services/web/node_modules/vitest/node_modules/vite": { + "version": "6.3.5", + "resolved": "https://registry.npmjs.org/vite/-/vite-6.3.5.tgz", + "integrity": "sha512-cZn6NDFE7wdTpINgs++ZJ4N49W2vRp8LCKrn3Ob1kYNtOo21vfDoaV5GzBfLU4MovSAB8uNRm4jgzVQZ+mBzPQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "esbuild": "^0.25.0", + "fdir": "^6.4.4", + "picomatch": "^4.0.2", + "postcss": "^8.5.3", + "rollup": "^4.34.9", + "tinyglobby": "^0.2.13" + }, + "bin": { + "vite": "bin/vite.js" + }, + "engines": { + "node": "^18.0.0 || ^20.0.0 || >=22.0.0" + }, + "funding": { + "url": "https://github.com/vitejs/vite?sponsor=1" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + }, + "peerDependencies": { + "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", + "jiti": ">=1.21.0", + "less": "*", + "lightningcss": "^1.21.0", + "sass": "*", + "sass-embedded": "*", + "stylus": "*", + "sugarss": "*", + "terser": "^5.16.0", + "tsx": "^4.8.1", + "yaml": "^2.4.2" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "jiti": { + "optional": true + }, + "less": { + "optional": true + }, + "lightningcss": { + "optional": true + }, + "sass": { + "optional": true + }, + "sass-embedded": { + "optional": true + }, + "stylus": { + "optional": true + }, + "sugarss": { + "optional": true + }, + "terser": { + "optional": true + }, + "tsx": { + "optional": true + }, + "yaml": { + "optional": true + } + } + }, "services/web/node_modules/xml-crypto": { "version": "2.1.6", "resolved": "https://registry.npmjs.org/xml-crypto/-/xml-crypto-2.1.6.tgz", @@ -45980,6 +47167,21 @@ "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" }, + "services/web/node_modules/yaml": { + "version": "2.8.0", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.8.0.tgz", + "integrity": "sha512-4lLa/EcQCB0cJkyts+FpIRx5G/llPxfP6VQU5KByHEhLxY3IJCH0f0Hy1MHI8sClTvsIb8qwRJ6R/ZdlDJ/leQ==", + "dev": true, + "license": "ISC", + "optional": true, + "peer": true, + "bin": { + "yaml": "bin.mjs" + }, + "engines": { + "node": ">= 14.6" + } + }, "tools/saas-e2e": { "name": "@overleaf/saas-e2e", "devDependencies": { diff --git a/package.json b/package.json index 64fbd258ed..a51bbcd743 100644 --- a/package.json +++ b/package.json @@ -37,7 +37,7 @@ }, "swagger-tools": { "body-parser": "1.20.3", - "multer": "2.0.0", + "multer": "2.0.1", "path-to-regexp": "3.3.0", "qs": "6.13.0" } diff --git a/server-ce/config/settings.js b/server-ce/config/settings.js index 164d8b0196..a7e8219858 100644 --- a/server-ce/config/settings.js +++ b/server-ce/config/settings.js @@ -140,6 +140,7 @@ const settings = { api: redisConfig, pubsub: redisConfig, project_history: redisConfig, + references: redisConfig, project_history_migration: { host: redisConfig.host, diff --git a/server-ce/hotfix/5.5.1/Dockerfile b/server-ce/hotfix/5.5.1/Dockerfile new file mode 100644 index 0000000000..9572d29740 --- /dev/null +++ b/server-ce/hotfix/5.5.1/Dockerfile @@ -0,0 +1,28 @@ +FROM sharelatex/sharelatex:5.5.0 + + +# fix tls configuration in redis for history-v1 +COPY pr_25168.patch . +RUN patch -p1 < pr_25168.patch && rm pr_25168.patch + +# improve logging in history system +COPY pr_26086.patch . +RUN patch -p1 < pr_26086.patch && rm pr_26086.patch + +# fix create-user.mjs script +COPY pr_26152.patch . +RUN patch -p1 < pr_26152.patch && rm pr_26152.patch + +# check mongo featureCompatibilityVersion +COPY pr_26091.patch . +RUN patch -p1 < pr_26091.patch && rm pr_26091.patch + +# update multer and tar-fs +RUN sed -i 's/"multer": "2.0.0"/"multer": "2.0.1"/g' package.json +RUN sed -i 's/"dockerode": "^4.0.5"/"dockerode": "^4.0.7"/g' services/clsi/package.json +RUN sed -i 's/"tar-fs": "^3.0.4"/"tar-fs": "^3.0.9"/g' services/clsi/package.json +RUN sed -i 's/199c5ff05bd375c508f4074498237baead7f5148/4dbceda355efc3fc8ac3cf5c66c3778c8a6fdb23/g' services/web/package.json +COPY package-lock.json.diff . +RUN patch package-lock.json < package-lock.json.diff +RUN npm install --omit=dev +RUN npm install @paralleldrive/cuid2@2.2.2 -w services/history-v1 diff --git a/server-ce/hotfix/5.5.1/package-lock.json.diff b/server-ce/hotfix/5.5.1/package-lock.json.diff new file mode 100644 index 0000000000..ecbf851bc8 --- /dev/null +++ b/server-ce/hotfix/5.5.1/package-lock.json.diff @@ -0,0 +1,2202 @@ +4954,4956c4954,4957 +< "version": "1.8.22", +< "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.8.22.tgz", +< "integrity": "sha512-oAjDdN7fzbUi+4hZjKG96MR6KTEubAeMpQEb+77qy+3r0Ua5xTFuie6JOLr4ZZgl5g+W5/uRTS2M1V8mVAFPuA==", +--- +> "version": "1.13.4", +> "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.13.4.tgz", +> "integrity": "sha512-GsFaMXCkMqkKIvwCQjCrwH+GHbPKBjhwo/8ZuUkWHqbI73Kky9I+pQltrlT0+MWpedCoosda53lgjYfyEPgxBg==", +> "license": "Apache-2.0", +4958,4959c4959,4960 +< "@grpc/proto-loader": "^0.7.0", +< "@types/node": ">=12.12.47" +--- +> "@grpc/proto-loader": "^0.7.13", +> "@js-sdsl/ordered-map": "^4.4.2" +4962c4963 +< "node": "^8.13.0 || >=10.10.0" +--- +> "node": ">=12.10.0" +5915a5917,5928 +> "node_modules/@noble/hashes": { +> "version": "1.8.0", +> "resolved": "https://registry.npmjs.org/@noble/hashes/-/hashes-1.8.0.tgz", +> "integrity": "sha512-jCs9ldd7NwzpgXDIf6P3+NrHh9/sD6CQdxHyjQI+h/6rDNo88ypBxxz45UDuZHz9r3tNz7N/VInSVoVdtXEI4A==", +> "license": "MIT", +> "engines": { +> "node": "^14.21.3 || >=16" +> }, +> "funding": { +> "url": "https://paulmillr.com/funding/" +> } +> }, +7528a7542,7550 +> "node_modules/@paralleldrive/cuid2": { +> "version": "2.2.2", +> "resolved": "https://registry.npmjs.org/@paralleldrive/cuid2/-/cuid2-2.2.2.tgz", +> "integrity": "sha512-ZOBkgDwEdoYVlSeRbYYXs0S9MejQofiVYoTbKzy/6GQa39/q5tQU2IX46+shYnUkpEl3wc+J6wRlar7r2EK2xA==", +> "license": "MIT", +> "dependencies": { +> "@noble/hashes": "^1.1.5" +> } +> }, +13479,13481c13501,13503 +< "version": "1.0.1", +< "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.1.tgz", +< "integrity": "sha512-ahC5W1xgou+KTXix4sAO8Ki12Q+jf4i0+tmk3sC+zgcynshkHxzpXdImBehiUYKKKDwvfFiJl1tZt6ewscS1Mg==", +--- +> "version": "1.0.2", +> "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.2.tgz", +> "integrity": "sha512-LHE+8BuR7RYGDKvnrmcuSq3tDcKv9OFEXQt/HpbZhY7V6h0zlUXutnAD82GiFx9rdieCMjkvtcsPqBwgUl1Iiw==", +13484,13485c13506,13507 +< "call-bind": "^1.0.5", +< "is-array-buffer": "^3.0.4" +--- +> "call-bound": "^1.0.3", +> "is-array-buffer": "^3.0.5" +13601,13603c13623,13625 +< "version": "1.0.3", +< "resolved": "https://registry.npmjs.org/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.3.tgz", +< "integrity": "sha512-bMxMKAjg13EBSVscxTaYA4mRc5t1UAXa2kXiGTNfZ079HIWXEkKmkgFrh/nJqamaLSrXO5H4WFFkPEaLJWbs3A==", +--- +> "version": "1.0.4", +> "resolved": "https://registry.npmjs.org/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.4.tgz", +> "integrity": "sha512-BNoCY6SXXPQ7gF2opIP4GBE+Xw7U+pHMYKuzjgCN3GwiaIR09UUeKfheyIry77QtrCBlC0KK0q5/TER/tYh3PQ==", +13607c13629 +< "call-bind": "^1.0.5", +--- +> "call-bind": "^1.0.8", +13609,13613c13631,13634 +< "es-abstract": "^1.22.3", +< "es-errors": "^1.2.1", +< "get-intrinsic": "^1.2.3", +< "is-array-buffer": "^3.0.4", +< "is-shared-array-buffer": "^1.0.2" +--- +> "es-abstract": "^1.23.5", +> "es-errors": "^1.3.0", +> "get-intrinsic": "^1.2.6", +> "is-array-buffer": "^3.0.4" +13706a13728,13736 +> "node_modules/async-function": { +> "version": "1.0.0", +> "resolved": "https://registry.npmjs.org/async-function/-/async-function-1.0.0.tgz", +> "integrity": "sha512-hsU18Ae8CDTR6Kgu9DYf0EbCr/a5iGL0rytQDobUcdpYOKokk8LEjVphnXkDkgpi0wYVsqrXuP0bZxJaTqdgoA==", +> "license": "MIT", +> "engines": { +> "node": ">= 0.4" +> } +> }, +14255,14257c14285,14287 +< "version": "4.0.1", +< "resolved": "https://registry.npmjs.org/bare-fs/-/bare-fs-4.0.1.tgz", +< "integrity": "sha512-ilQs4fm/l9eMfWY2dY0WCIUplSUp7U0CT1vrqMg1MUdeZl4fypu5UP0XcDBK5WBQPJAKP1b7XEodISmekH/CEg==", +--- +> "version": "4.1.5", +> "resolved": "https://registry.npmjs.org/bare-fs/-/bare-fs-4.1.5.tgz", +> "integrity": "sha512-1zccWBMypln0jEE05LzZt+V/8y8AQsQQqxtklqaIyg5nu6OAYFhZxPXinJTSG+kU5qyNmeLgcn9AW7eHiCHVLA==", +14261c14291 +< "bare-events": "^2.0.0", +--- +> "bare-events": "^2.5.4", +14263c14293 +< "bare-stream": "^2.0.0" +--- +> "bare-stream": "^2.6.4" +14266c14296,14304 +< "bare": ">=1.7.0" +--- +> "bare": ">=1.16.0" +> }, +> "peerDependencies": { +> "bare-buffer": "*" +> }, +> "peerDependenciesMeta": { +> "bare-buffer": { +> "optional": true +> } +14270,14272c14308,14310 +< "version": "3.6.0", +< "resolved": "https://registry.npmjs.org/bare-os/-/bare-os-3.6.0.tgz", +< "integrity": "sha512-BUrFS5TqSBdA0LwHop4OjPJwisqxGy6JsWVqV6qaFoe965qqtaKfDzHY5T2YA1gUL0ZeeQeA+4BBc1FJTcHiPw==", +--- +> "version": "3.6.1", +> "resolved": "https://registry.npmjs.org/bare-os/-/bare-os-3.6.1.tgz", +> "integrity": "sha512-uaIjxokhFidJP+bmmvKSgiMzj2sV5GPHaZVAIktcxcpCyBFFWO+YlikVAdhmUo2vYFvFhOXIAlldqV29L8126g==", +15110,15112c15148,15151 +< "version": "1.0.7", +< "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.7.tgz", +< "integrity": "sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==", +--- +> "version": "1.0.8", +> "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.8.tgz", +> "integrity": "sha512-oKlSFMcMwpUg2ednkhQ454wfWiU/ul3CkJe/PEHcTKuiX6RpbehUiFMXu13HalGZxfUwCQzZG747YXBn1im9ww==", +> "license": "MIT", +15113a15153 +> "call-bind-apply-helpers": "^1.0.0", +15115,15116d15154 +< "es-errors": "^1.3.0", +< "function-bind": "^1.1.2", +15118c15156,15185 +< "set-function-length": "^1.2.1" +--- +> "set-function-length": "^1.2.2" +> }, +> "engines": { +> "node": ">= 0.4" +> }, +> "funding": { +> "url": "https://github.com/sponsors/ljharb" +> } +> }, +> "node_modules/call-bind-apply-helpers": { +> "version": "1.0.2", +> "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", +> "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", +> "license": "MIT", +> "dependencies": { +> "es-errors": "^1.3.0", +> "function-bind": "^1.1.2" +> }, +> "engines": { +> "node": ">= 0.4" +> } +> }, +> "node_modules/call-bound": { +> "version": "1.0.4", +> "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.4.tgz", +> "integrity": "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==", +> "license": "MIT", +> "dependencies": { +> "call-bind-apply-helpers": "^1.0.2", +> "get-intrinsic": "^1.3.0" +15423c15490,15491 +< "integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==" +--- +> "integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==", +> "license": "ISC" +15751,15756c15819,15822 +< "version": "4.1.1", +< "resolved": "https://registry.npmjs.org/commander/-/commander-4.1.1.tgz", +< "integrity": "sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==", +< "engines": { +< "node": ">= 6" +< } +--- +> "version": "2.11.0", +> "resolved": "https://registry.npmjs.org/commander/-/commander-2.11.0.tgz", +> "integrity": "sha512-b0553uYA5YAEGgyYIGYROzKQ7X5RAqedkfjiZxwi0kL1g3bOaBNNZfYkzt/CL0umgD5wc9Jec2FbB98CjkMRvQ==", +> "license": "MIT" +15871,15873c15937,15939 +< "version": "1.6.2", +< "resolved": "https://registry.npmjs.org/concat-stream/-/concat-stream-1.6.2.tgz", +< "integrity": "sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw==", +--- +> "version": "2.0.0", +> "resolved": "https://registry.npmjs.org/concat-stream/-/concat-stream-2.0.0.tgz", +> "integrity": "sha512-MWufYdFw53ccGjCA+Ol7XJYpAlW6/prSMzuPOTRnJGcGzuhLn4Scrz7qf6o8bROZ514ltazcIFJZevcfbo0x7A==", +15875c15941 +< "node >= 0.8" +--- +> "node >= 6.0" +15876a15943 +> "license": "MIT", +15880c15947 +< "readable-stream": "^2.2.2", +--- +> "readable-stream": "^3.0.2", +15884,15910d15950 +< "node_modules/concat-stream/node_modules/isarray": { +< "version": "1.0.0", +< "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", +< "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==" +< }, +< "node_modules/concat-stream/node_modules/readable-stream": { +< "version": "2.3.8", +< "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", +< "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", +< "dependencies": { +< "core-util-is": "~1.0.0", +< "inherits": "~2.0.3", +< "isarray": "~1.0.0", +< "process-nextick-args": "~2.0.0", +< "safe-buffer": "~5.1.1", +< "string_decoder": "~1.1.1", +< "util-deprecate": "~1.0.1" +< } +< }, +< "node_modules/concat-stream/node_modules/string_decoder": { +< "version": "1.1.1", +< "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", +< "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", +< "dependencies": { +< "safe-buffer": "~5.1.0" +< } +< }, +16125c16165,16166 +< "integrity": "sha512-LDx6oHrK+PhzLKJU9j5S7/Y3jM/mUHvD/DeI1WQmJn652iPC5Y4TBzC9l+5OMOXlyTTA+SmVUPm0HQUwpD5Jqw==" +--- +> "integrity": "sha512-LDx6oHrK+PhzLKJU9j5S7/Y3jM/mUHvD/DeI1WQmJn652iPC5Y4TBzC9l+5OMOXlyTTA+SmVUPm0HQUwpD5Jqw==", +> "license": "MIT" +16337a16379,16392 +> "node_modules/cpu-features": { +> "version": "0.0.10", +> "resolved": "https://registry.npmjs.org/cpu-features/-/cpu-features-0.0.10.tgz", +> "integrity": "sha512-9IkYqtX3YHPCzoVg1Py+o9057a3i0fp7S530UWokCSaFVTc7CwXPRiOjRjBQQ18ZCNafx78YfnG+HALxtVmOGA==", +> "hasInstallScript": true, +> "optional": true, +> "dependencies": { +> "buildcheck": "~0.0.6", +> "nan": "^2.19.0" +> }, +> "engines": { +> "node": ">=10.0.0" +> } +> }, +17268,17270c17323,17325 +< "version": "1.0.1", +< "resolved": "https://registry.npmjs.org/data-view-buffer/-/data-view-buffer-1.0.1.tgz", +< "integrity": "sha512-0lht7OugA5x3iJLOWFhWK/5ehONdprk0ISXqVFn/NFrDu+cuc8iADFrGQz5BnRK7LLU3JmkbXSxaqX+/mXYtUA==", +--- +> "version": "1.0.2", +> "resolved": "https://registry.npmjs.org/data-view-buffer/-/data-view-buffer-1.0.2.tgz", +> "integrity": "sha512-EmKO5V3OLXh1rtK2wgXRansaK1/mtVdTUEiEI0W8RkvgT05kfxaH29PliLnpLP73yYO6142Q72QNa8Wx/A5CqQ==", +17273c17328 +< "call-bind": "^1.0.6", +--- +> "call-bound": "^1.0.3", +17275c17330 +< "is-data-view": "^1.0.1" +--- +> "is-data-view": "^1.0.2" +17285,17287c17340,17342 +< "version": "1.0.1", +< "resolved": "https://registry.npmjs.org/data-view-byte-length/-/data-view-byte-length-1.0.1.tgz", +< "integrity": "sha512-4J7wRJD3ABAzr8wP+OcIcqq2dlUKp4DVflx++hs5h5ZKydWMI6/D/fAot+yh6g2tHh8fLFTvNOaVN357NvSrOQ==", +--- +> "version": "1.0.2", +> "resolved": "https://registry.npmjs.org/data-view-byte-length/-/data-view-byte-length-1.0.2.tgz", +> "integrity": "sha512-tuhGbE6CfTM9+5ANGf+oQb72Ky/0+s3xKUpHvShfiz2RxMFgFPjsXuRLBVMtvMs15awe45SRb83D6wH4ew6wlQ==", +17290c17345 +< "call-bind": "^1.0.7", +--- +> "call-bound": "^1.0.3", +17292c17347 +< "is-data-view": "^1.0.1" +--- +> "is-data-view": "^1.0.2" +17298c17353 +< "url": "https://github.com/sponsors/ljharb" +--- +> "url": "https://github.com/sponsors/inspect-js" +17302,17304c17357,17359 +< "version": "1.0.0", +< "resolved": "https://registry.npmjs.org/data-view-byte-offset/-/data-view-byte-offset-1.0.0.tgz", +< "integrity": "sha512-t/Ygsytq+R995EJ5PZlD4Cu56sWa8InXySaViRzw9apusqsOO2bQP+SbYzAhR0pFKoB+43lYy8rWban9JSuXnA==", +--- +> "version": "1.0.1", +> "resolved": "https://registry.npmjs.org/data-view-byte-offset/-/data-view-byte-offset-1.0.1.tgz", +> "integrity": "sha512-BS8PfmtDGnrgYdOonGZQdLZslWIeCGFP9tpan0hi1Co2Zr2NKADsvGYA8XxuG/4UWgJ6Cjtv+YJnB6MM69QGlQ==", +17307c17362 +< "call-bind": "^1.0.6", +--- +> "call-bound": "^1.0.2", +17666a17722,17731 +> "node_modules/dezalgo": { +> "version": "1.0.4", +> "resolved": "https://registry.npmjs.org/dezalgo/-/dezalgo-1.0.4.tgz", +> "integrity": "sha512-rXSP0bf+5n0Qonsb+SVVfNfIsimO4HEtmnIpPHY8Q1UCzKlQrDMfdobr8nJOOsRgWCyMRqeSBQzmWUMq7zvVig==", +> "license": "ISC", +> "dependencies": { +> "asap": "^2.0.0", +> "wrappy": "1" +> } +> }, +17725a17791,17872 +> "node_modules/docker-modem": { +> "version": "5.0.6", +> "resolved": "https://registry.npmjs.org/docker-modem/-/docker-modem-5.0.6.tgz", +> "integrity": "sha512-ens7BiayssQz/uAxGzH8zGXCtiV24rRWXdjNha5V4zSOcxmAZsfGVm/PPFbwQdqEkDnhG+SyR9E3zSHUbOKXBQ==", +> "license": "Apache-2.0", +> "dependencies": { +> "debug": "^4.1.1", +> "readable-stream": "^3.5.0", +> "split-ca": "^1.0.1", +> "ssh2": "^1.15.0" +> }, +> "engines": { +> "node": ">= 8.0" +> } +> }, +> "node_modules/dockerode": { +> "version": "4.0.7", +> "resolved": "https://registry.npmjs.org/dockerode/-/dockerode-4.0.7.tgz", +> "integrity": "sha512-R+rgrSRTRdU5mH14PZTCPZtW/zw3HDWNTS/1ZAQpL/5Upe/ye5K9WQkIysu4wBoiMwKynsz0a8qWuGsHgEvSAA==", +> "license": "Apache-2.0", +> "dependencies": { +> "@balena/dockerignore": "^1.0.2", +> "@grpc/grpc-js": "^1.11.1", +> "@grpc/proto-loader": "^0.7.13", +> "docker-modem": "^5.0.6", +> "protobufjs": "^7.3.2", +> "tar-fs": "~2.1.2", +> "uuid": "^10.0.0" +> }, +> "engines": { +> "node": ">= 8.0" +> } +> }, +> "node_modules/dockerode/node_modules/protobufjs": { +> "version": "7.5.3", +> "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-7.5.3.tgz", +> "integrity": "sha512-sildjKwVqOI2kmFDiXQ6aEB0fjYTafpEvIBs8tOR8qI4spuL9OPROLVu2qZqi/xgCfsHIwVqlaF8JBjWFHnKbw==", +> "hasInstallScript": true, +> "license": "BSD-3-Clause", +> "dependencies": { +> "@protobufjs/aspromise": "^1.1.2", +> "@protobufjs/base64": "^1.1.2", +> "@protobufjs/codegen": "^2.0.4", +> "@protobufjs/eventemitter": "^1.1.0", +> "@protobufjs/fetch": "^1.1.0", +> "@protobufjs/float": "^1.0.2", +> "@protobufjs/inquire": "^1.1.0", +> "@protobufjs/path": "^1.1.2", +> "@protobufjs/pool": "^1.1.0", +> "@protobufjs/utf8": "^1.1.0", +> "@types/node": ">=13.7.0", +> "long": "^5.0.0" +> }, +> "engines": { +> "node": ">=12.0.0" +> } +> }, +> "node_modules/dockerode/node_modules/tar-fs": { +> "version": "2.1.3", +> "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.3.tgz", +> "integrity": "sha512-090nwYJDmlhwFwEW3QQl+vaNnxsO2yVsd45eTKRBzSzu+hlb1w2K9inVq5b0ngXuLVqQ4ApvsUHHnu/zQNkWAg==", +> "license": "MIT", +> "dependencies": { +> "chownr": "^1.1.1", +> "mkdirp-classic": "^0.5.2", +> "pump": "^3.0.0", +> "tar-stream": "^2.1.4" +> } +> }, +> "node_modules/dockerode/node_modules/uuid": { +> "version": "10.0.0", +> "resolved": "https://registry.npmjs.org/uuid/-/uuid-10.0.0.tgz", +> "integrity": "sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ==", +> "funding": [ +> "https://github.com/sponsors/broofa", +> "https://github.com/sponsors/ctavan" +> ], +> "license": "MIT", +> "bin": { +> "uuid": "dist/bin/uuid" +> } +> }, +17926a18074,18087 +> "node_modules/dunder-proto": { +> "version": "1.0.1", +> "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", +> "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", +> "license": "MIT", +> "dependencies": { +> "call-bind-apply-helpers": "^1.0.1", +> "es-errors": "^1.3.0", +> "gopd": "^1.2.0" +> }, +> "engines": { +> "node": ">= 0.4" +> } +> }, +18212,18214c18373,18375 +< "version": "1.23.3", +< "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.23.3.tgz", +< "integrity": "sha512-e+HfNH61Bj1X9/jLc5v1owaLYuHdeHHSQlkhCBiTK8rBvKaULl/beGMxwrMXjpYrv4pz22BlY570vVePA2ho4A==", +--- +> "version": "1.24.0", +> "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.24.0.tgz", +> "integrity": "sha512-WSzPgsdLtTcQwm4CROfS5ju2Wa1QQcVeT37jFjYzdFz1r9ahadC8B8/a4qxJxM+09F18iumCdRmlr96ZYkQvEg==", +18217,18218c18378,18379 +< "array-buffer-byte-length": "^1.0.1", +< "arraybuffer.prototype.slice": "^1.0.3", +--- +> "array-buffer-byte-length": "^1.0.2", +> "arraybuffer.prototype.slice": "^1.0.4", +18220,18224c18381,18386 +< "call-bind": "^1.0.7", +< "data-view-buffer": "^1.0.1", +< "data-view-byte-length": "^1.0.1", +< "data-view-byte-offset": "^1.0.0", +< "es-define-property": "^1.0.0", +--- +> "call-bind": "^1.0.8", +> "call-bound": "^1.0.4", +> "data-view-buffer": "^1.0.2", +> "data-view-byte-length": "^1.0.2", +> "data-view-byte-offset": "^1.0.1", +> "es-define-property": "^1.0.1", +18226,18233c18388,18396 +< "es-object-atoms": "^1.0.0", +< "es-set-tostringtag": "^2.0.3", +< "es-to-primitive": "^1.2.1", +< "function.prototype.name": "^1.1.6", +< "get-intrinsic": "^1.2.4", +< "get-symbol-description": "^1.0.2", +< "globalthis": "^1.0.3", +< "gopd": "^1.0.1", +--- +> "es-object-atoms": "^1.1.1", +> "es-set-tostringtag": "^2.1.0", +> "es-to-primitive": "^1.3.0", +> "function.prototype.name": "^1.1.8", +> "get-intrinsic": "^1.3.0", +> "get-proto": "^1.0.1", +> "get-symbol-description": "^1.1.0", +> "globalthis": "^1.0.4", +> "gopd": "^1.2.0", +18235,18236c18398,18399 +< "has-proto": "^1.0.3", +< "has-symbols": "^1.0.3", +--- +> "has-proto": "^1.2.0", +> "has-symbols": "^1.1.0", +18238,18239c18401,18402 +< "internal-slot": "^1.0.7", +< "is-array-buffer": "^3.0.4", +--- +> "internal-slot": "^1.1.0", +> "is-array-buffer": "^3.0.5", +18241c18404 +< "is-data-view": "^1.0.1", +--- +> "is-data-view": "^1.0.2", +18243,18248c18406,18413 +< "is-regex": "^1.1.4", +< "is-shared-array-buffer": "^1.0.3", +< "is-string": "^1.0.7", +< "is-typed-array": "^1.1.13", +< "is-weakref": "^1.0.2", +< "object-inspect": "^1.13.1", +--- +> "is-regex": "^1.2.1", +> "is-set": "^2.0.3", +> "is-shared-array-buffer": "^1.0.4", +> "is-string": "^1.1.1", +> "is-typed-array": "^1.1.15", +> "is-weakref": "^1.1.1", +> "math-intrinsics": "^1.1.0", +> "object-inspect": "^1.13.4", +18250,18255c18415,18424 +< "object.assign": "^4.1.5", +< "regexp.prototype.flags": "^1.5.2", +< "safe-array-concat": "^1.1.2", +< "safe-regex-test": "^1.0.3", +< "string.prototype.trim": "^1.2.9", +< "string.prototype.trimend": "^1.0.8", +--- +> "object.assign": "^4.1.7", +> "own-keys": "^1.0.1", +> "regexp.prototype.flags": "^1.5.4", +> "safe-array-concat": "^1.1.3", +> "safe-push-apply": "^1.0.0", +> "safe-regex-test": "^1.1.0", +> "set-proto": "^1.0.0", +> "stop-iteration-iterator": "^1.1.0", +> "string.prototype.trim": "^1.2.10", +> "string.prototype.trimend": "^1.0.9", +18257,18262c18426,18431 +< "typed-array-buffer": "^1.0.2", +< "typed-array-byte-length": "^1.0.1", +< "typed-array-byte-offset": "^1.0.2", +< "typed-array-length": "^1.0.6", +< "unbox-primitive": "^1.0.2", +< "which-typed-array": "^1.1.15" +--- +> "typed-array-buffer": "^1.0.3", +> "typed-array-byte-length": "^1.0.3", +> "typed-array-byte-offset": "^1.0.4", +> "typed-array-length": "^1.0.7", +> "unbox-primitive": "^1.1.0", +> "which-typed-array": "^1.1.19" +18272,18277c18441,18444 +< "version": "1.0.0", +< "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.0.tgz", +< "integrity": "sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==", +< "dependencies": { +< "get-intrinsic": "^1.2.4" +< }, +--- +> "version": "1.0.1", +> "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", +> "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", +> "license": "MIT", +18318,18320c18485,18487 +< "version": "1.0.0", +< "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.0.0.tgz", +< "integrity": "sha512-MZ4iQ6JwHOBQjahnjwaC1ZtIBH+2ohjamzAO3oaHcXYup7qxjF2fixyH+Q71voWHeOkI2q/TnJao/KfXYIZWbw==", +--- +> "version": "1.1.1", +> "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", +> "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", +18330,18332c18497,18499 +< "version": "2.0.3", +< "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.0.3.tgz", +< "integrity": "sha512-3T8uNMC3OQTHkFUsFq8r/BwAXLHvU/9O9mE0fBc/MY5iq/8H7ncvO947LmYA6ldWw9Uh8Yhf25zu6n7nML5QWQ==", +--- +> "version": "2.1.0", +> "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz", +> "integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==", +18335c18502,18503 +< "get-intrinsic": "^1.2.4", +--- +> "es-errors": "^1.3.0", +> "get-intrinsic": "^1.2.6", +18337c18505 +< "hasown": "^2.0.1" +--- +> "hasown": "^2.0.2" +18353,18355c18521,18524 +< "version": "1.2.1", +< "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", +< "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", +--- +> "version": "1.3.0", +> "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.3.0.tgz", +> "integrity": "sha512-w+5mJ3GuFL+NjVtJlvydShqE1eN3h3PbI7/5LAsYJP/2qtuMXjfL2LpHSRqo4b4eSF5K/DH1JXKUAHSB2UW50g==", +> "license": "MIT", +18357,18359c18526,18528 +< "is-callable": "^1.1.4", +< "is-date-object": "^1.0.1", +< "is-symbol": "^1.0.2" +--- +> "is-callable": "^1.2.7", +> "is-date-object": "^1.0.5", +> "is-symbol": "^1.0.4" +20463a20633,20638 +> "node_modules/fast-safe-stringify": { +> "version": "2.1.1", +> "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", +> "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==", +> "license": "MIT" +> }, +20933,20935c21108,21111 +< "version": "0.3.3", +< "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.3.tgz", +< "integrity": "sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw==", +--- +> "version": "0.3.5", +> "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.5.tgz", +> "integrity": "sha512-dKx12eRCVIzqCxFGplyFKJMPvLEWgmNtUrpTiJIR5u97zEhRG8ySrtboPHZXx7daLxQVrl643cTzbab2tkQjxg==", +> "license": "MIT", +20937c21113,21119 +< "is-callable": "^1.1.3" +--- +> "is-callable": "^1.2.7" +> }, +> "engines": { +> "node": ">= 0.4" +> }, +> "funding": { +> "url": "https://github.com/sponsors/ljharb" +21100a21283 +> "license": "MIT", +21272,21274c21455,21458 +< "version": "1.1.6", +< "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.6.tgz", +< "integrity": "sha512-Z5kx79swU5P27WEayXM1tBi5Ze/lbIyiNgU3qyXUOf9b2rgXYyF9Dy9Cx+IQv/Lc8WCG6L82zwUPpSS9hGehIg==", +--- +> "version": "1.1.8", +> "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.8.tgz", +> "integrity": "sha512-e5iwyodOHhbMr/yNrc7fDYG4qlbIvI5gajyzPnb5TCwyhjApznQh1BMFou9b30SevY43gCJKXycoCBjMbsuW0Q==", +> "license": "MIT", +21276,21279c21460,21465 +< "call-bind": "^1.0.2", +< "define-properties": "^1.2.0", +< "es-abstract": "^1.22.1", +< "functions-have-names": "^1.2.3" +--- +> "call-bind": "^1.0.8", +> "call-bound": "^1.0.3", +> "define-properties": "^1.2.1", +> "functions-have-names": "^1.2.3", +> "hasown": "^2.0.2", +> "is-callable": "^1.2.7" +21385,21387c21571,21574 +< "version": "1.2.4", +< "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.4.tgz", +< "integrity": "sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==", +--- +> "version": "1.3.0", +> "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", +> "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", +> "license": "MIT", +21388a21576,21577 +> "call-bind-apply-helpers": "^1.0.2", +> "es-define-property": "^1.0.1", +21389a21579 +> "es-object-atoms": "^1.1.1", +21391,21393c21581,21585 +< "has-proto": "^1.0.1", +< "has-symbols": "^1.0.3", +< "hasown": "^2.0.0" +--- +> "get-proto": "^1.0.1", +> "gopd": "^1.2.0", +> "has-symbols": "^1.1.0", +> "hasown": "^2.0.2", +> "math-intrinsics": "^1.1.0" +21420a21613,21625 +> "node_modules/get-proto": { +> "version": "1.0.1", +> "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", +> "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", +> "license": "MIT", +> "dependencies": { +> "dunder-proto": "^1.0.1", +> "es-object-atoms": "^1.0.0" +> }, +> "engines": { +> "node": ">= 0.4" +> } +> }, +21437,21439c21642,21644 +< "version": "1.0.2", +< "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.2.tgz", +< "integrity": "sha512-g0QYk1dZBxGwk+Ngc+ltRH2IBp2f7zBkBMBJZCDerh6EhlhSR6+9irMCuT/09zD6qkarHUSn529sK/yL4S27mg==", +--- +> "version": "1.1.0", +> "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.1.0.tgz", +> "integrity": "sha512-w9UMqWwJxHNOvoNzSJ2oPF5wvYcvP7jUvYzhp67yEhTi17ZDBBC1z9pTdGuzjD+EFIqLSYRweZjqfiPzQ06Ebg==", +21442c21647 +< "call-bind": "^1.0.5", +--- +> "call-bound": "^1.0.3", +21444c21649 +< "get-intrinsic": "^1.2.4" +--- +> "get-intrinsic": "^1.2.6" +21664,21666c21869,21872 +< "version": "1.0.3", +< "resolved": "https://registry.npmjs.org/globalthis/-/globalthis-1.0.3.tgz", +< "integrity": "sha512-sFdI5LyBiNTHjRd7cGPWapiHWMOXKyuBNX/cWJ3NfzrZQVa8GI/8cofCl74AOVqq9W5kNmguTIzJ/1s2gyI9wA==", +--- +> "version": "1.0.4", +> "resolved": "https://registry.npmjs.org/globalthis/-/globalthis-1.0.4.tgz", +> "integrity": "sha512-DpLKbNU4WylpxJykQujfCcwYWiV/Jhm50Goo0wrVILAv5jOr9d+H+UR3PhSCD2rCCEIg0uc+G+muBTwD54JhDQ==", +> "license": "MIT", +21668c21874,21875 +< "define-properties": "^1.1.3" +--- +> "define-properties": "^1.2.1", +> "gopd": "^1.0.1" +22055,22059c22262,22267 +< "version": "1.0.1", +< "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.0.1.tgz", +< "integrity": "sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==", +< "dependencies": { +< "get-intrinsic": "^1.1.3" +--- +> "version": "1.2.0", +> "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", +> "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", +> "license": "MIT", +> "engines": { +> "node": ">= 0.4" +22079a22288 +> "license": "MIT", +22300,22302c22509,22511 +< "version": "1.0.3", +< "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.3.tgz", +< "integrity": "sha512-SJ1amZAJUiZS+PhsVLf5tGydlaVB8EdFpaSO4gmiUKUOxk8qzn5AIy4ZeJUmh22znIdk/uMAUT2pl3FxzVUH+Q==", +--- +> "version": "1.2.0", +> "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.2.0.tgz", +> "integrity": "sha512-KIL7eQPfHQRC8+XluaIw7BHUwwqL19bQn4hzNgdr+1wXoU0KKj6rufu47lhY7KbJR2C6T6+PfyN0Ea7wkSS+qQ==", +22303a22513,22515 +> "dependencies": { +> "dunder-proto": "^1.0.0" +> }, +22312,22314c22524,22527 +< "version": "1.0.3", +< "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", +< "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==", +--- +> "version": "1.1.0", +> "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", +> "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", +> "license": "MIT", +23257,23259c23470,23472 +< "version": "1.0.7", +< "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.7.tgz", +< "integrity": "sha512-NGnrKwXzSms2qUUih/ILZ5JBqNTSa1+ZmP6flaIp6KmSElgE9qdndzS3cqjrDovwFdmwsGsLdeFgB6suw+1e9g==", +--- +> "version": "1.1.0", +> "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.1.0.tgz", +> "integrity": "sha512-4gd7VpWNQNB4UKKCFFVcp1AVv+FMOgs9NKzjHKusc8jTMhd5eL1NqQqOpE0KzMds804/yHlglp3uxgluOqAPLw==", +23263,23264c23476,23477 +< "hasown": "^2.0.0", +< "side-channel": "^1.0.4" +--- +> "hasown": "^2.0.2", +> "side-channel": "^1.1.0" +23402,23404c23615,23617 +< "version": "3.0.4", +< "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.4.tgz", +< "integrity": "sha512-wcjaerHw0ydZwfhiKbXJWLDY8A7yV7KhjQOpb83hGgGfId/aQa4TOvwyzn2PuswW2gPCYEL/nEAiSVpdOj1lXw==", +--- +> "version": "3.0.5", +> "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.5.tgz", +> "integrity": "sha512-DDfANUiiG2wC1qawP66qlTugJeL5HyzMpfr8lLK+jMQirGzNod0B12cFB/9q838Ru27sBwfw78/rdoU7RERz6A==", +23407,23408c23620,23622 +< "call-bind": "^1.0.2", +< "get-intrinsic": "^1.2.1" +--- +> "call-bind": "^1.0.8", +> "call-bound": "^1.0.3", +> "get-intrinsic": "^1.2.6" +23422a23637,23655 +> "node_modules/is-async-function": { +> "version": "2.1.1", +> "resolved": "https://registry.npmjs.org/is-async-function/-/is-async-function-2.1.1.tgz", +> "integrity": "sha512-9dgM/cZBnNvjzaMYHVoxxfPj2QXt22Ev7SuuPrs+xav0ukGB0S6d4ydZdEiM48kLx5kDV+QBPrpVnFyefL8kkQ==", +> "license": "MIT", +> "dependencies": { +> "async-function": "^1.0.0", +> "call-bound": "^1.0.3", +> "get-proto": "^1.0.1", +> "has-tostringtag": "^1.0.2", +> "safe-regex-test": "^1.1.0" +> }, +> "engines": { +> "node": ">= 0.4" +> }, +> "funding": { +> "url": "https://github.com/sponsors/ljharb" +> } +> }, +23424,23426c23657,23660 +< "version": "1.0.4", +< "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.4.tgz", +< "integrity": "sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg==", +--- +> "version": "1.1.0", +> "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.1.0.tgz", +> "integrity": "sha512-n4ZT37wG78iz03xPRKJrHTdZbe3IicyucEtdRsV5yglwc3GyUfbAfpSeD0FJ41NbUNSt5wbhqfp1fS+BgnvDFQ==", +> "license": "MIT", +23428c23662,23665 +< "has-bigints": "^1.0.1" +--- +> "has-bigints": "^1.0.2" +> }, +> "engines": { +> "node": ">= 0.4" +23447,23449c23684,23687 +< "version": "1.1.2", +< "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.1.2.tgz", +< "integrity": "sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA==", +--- +> "version": "1.2.2", +> "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.2.2.tgz", +> "integrity": "sha512-wa56o2/ElJMYqjCjGkXri7it5FbebW5usLw/nPmCMs5DeZ7eziSYZhSmPRn0txqeW4LnAmQQU7FgqLpsEFKM4A==", +> "license": "MIT", +23451,23452c23689,23690 +< "call-bind": "^1.0.2", +< "has-tostringtag": "^1.0.0" +--- +> "call-bound": "^1.0.3", +> "has-tostringtag": "^1.0.2" +23517,23519c23755,23757 +< "version": "1.0.1", +< "resolved": "https://registry.npmjs.org/is-data-view/-/is-data-view-1.0.1.tgz", +< "integrity": "sha512-AHkaJrsUVW6wq6JS8y3JnM/GJF/9cf+k20+iDzlSaJrinEo5+7vRiteOSwBhHRiAyQATN1AmY4hwzxJKPmYf+w==", +--- +> "version": "1.0.2", +> "resolved": "https://registry.npmjs.org/is-data-view/-/is-data-view-1.0.2.tgz", +> "integrity": "sha512-RKtWF8pGmS87i2D6gqQu/l7EYRlVdfzemCJN/P3UOs//x1QE7mfhvzHIApBTRf7axvT6DMGwSwBXYCT0nfB9xw==", +23521a23760,23761 +> "call-bound": "^1.0.2", +> "get-intrinsic": "^1.2.6", +23532,23534c23772,23775 +< "version": "1.0.5", +< "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.5.tgz", +< "integrity": "sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ==", +--- +> "version": "1.1.0", +> "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.1.0.tgz", +> "integrity": "sha512-PwwhEakHVKTdRNVOw+/Gyh0+MzlCl4R6qKvkhuvLtPMggI1WAHt9sOwZxQLSGpUaDnrdyDsomoRgNnCfKNSXXg==", +> "license": "MIT", +23536c23777,23778 +< "has-tostringtag": "^1.0.0" +--- +> "call-bound": "^1.0.2", +> "has-tostringtag": "^1.0.2" +23601a23844,23858 +> "node_modules/is-finalizationregistry": { +> "version": "1.1.1", +> "resolved": "https://registry.npmjs.org/is-finalizationregistry/-/is-finalizationregistry-1.1.1.tgz", +> "integrity": "sha512-1pC6N8qWJbWoPtEjgcL2xyhQOP491EQjeUo3qTKcmV8YSDDJrOepfG8pcC7h/QgnQHYSv0mJ3Z/ZWxmatVrysg==", +> "license": "MIT", +> "dependencies": { +> "call-bound": "^1.0.3" +> }, +> "engines": { +> "node": ">= 0.4" +> }, +> "funding": { +> "url": "https://github.com/sponsors/ljharb" +> } +> }, +23688,23691c23945,23951 +< "version": "2.0.2", +< "resolved": "https://registry.npmjs.org/is-map/-/is-map-2.0.2.tgz", +< "integrity": "sha512-cOZFQQozTha1f4MxLFzlgKYPTyj26picdZTx82hbc/Xf4K/tZOOXSCkMvU4pKioRXGDLJRn0GM7Upe7kR721yg==", +< "dev": true, +--- +> "version": "2.0.3", +> "resolved": "https://registry.npmjs.org/is-map/-/is-map-2.0.3.tgz", +> "integrity": "sha512-1Qed0/Hr2m+YqxnM09CjA2d/i6YZNfF6R2oRAOj36eUdS6qIV/huPJNSEpKbupewFs+ZsJlxsjjPbc0/afW6Lw==", +> "license": "MIT", +> "engines": { +> "node": ">= 0.4" +> }, +23753,23755c24013,24016 +< "version": "1.0.7", +< "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.7.tgz", +< "integrity": "sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ==", +--- +> "version": "1.1.1", +> "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.1.1.tgz", +> "integrity": "sha512-lZhclumE1G6VYD8VHe35wFaIif+CTy5SJIi5+3y4psDgWu4wPDoBhF8NxUOinEc7pHgiTsT6MaBb92rKhhD+Xw==", +> "license": "MIT", +23757c24018,24019 +< "has-tostringtag": "^1.0.0" +--- +> "call-bound": "^1.0.3", +> "has-tostringtag": "^1.0.2" +23817,23819c24079,24082 +< "version": "1.1.4", +< "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", +< "integrity": "sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==", +--- +> "version": "1.2.1", +> "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.2.1.tgz", +> "integrity": "sha512-MjYsKHO5O7mCsmRGxWcLWheFqN9DJ/2TmngvjKXihe6efViPqc274+Fx/4fYj/r03+ESvBdTXK0V6tA3rgez1g==", +> "license": "MIT", +23821,23822c24084,24087 +< "call-bind": "^1.0.2", +< "has-tostringtag": "^1.0.0" +--- +> "call-bound": "^1.0.2", +> "gopd": "^1.2.0", +> "has-tostringtag": "^1.0.2", +> "hasown": "^2.0.2" +23832,23835c24097,24103 +< "version": "2.0.2", +< "resolved": "https://registry.npmjs.org/is-set/-/is-set-2.0.2.tgz", +< "integrity": "sha512-+2cnTEZeY5z/iXGbLhPrOAaK/Mau5k5eXq9j14CpRTftq0pAJu2MwVRSZhyZWBzx3o6X795Lz6Bpb6R0GKf37g==", +< "dev": true, +--- +> "version": "2.0.3", +> "resolved": "https://registry.npmjs.org/is-set/-/is-set-2.0.3.tgz", +> "integrity": "sha512-iPAjerrse27/ygGLxw+EBR9agv9Y6uLeYVJMu+QNCoouJ1/1ri0mGrcWpfCqFZuzzx3WjtwxG098X+n4OuRkPg==", +> "license": "MIT", +> "engines": { +> "node": ">= 0.4" +> }, +23848,23850c24116,24118 +< "version": "1.0.3", +< "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.3.tgz", +< "integrity": "sha512-nA2hv5XIhLR3uVzDDfCIknerhx8XUKnstuOERPNNIinXG7v9u+ohXF67vxm4TPTEPU6lm61ZkwP3c9PCB97rhg==", +--- +> "version": "1.0.4", +> "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.4.tgz", +> "integrity": "sha512-ISWac8drv4ZGfwKl5slpHG9OwPNty4jOWPRIhBpxOoD+hqITiwuipOQ2bNthAzwA3B4fIjO4Nln74N0S9byq8A==", +23853c24121 +< "call-bind": "^1.0.7" +--- +> "call-bound": "^1.0.3" +23874,23876c24142,24145 +< "version": "1.0.7", +< "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.7.tgz", +< "integrity": "sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==", +--- +> "version": "1.1.1", +> "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.1.1.tgz", +> "integrity": "sha512-BtEeSsoaQjlSPBemMQIrY1MY0uM6vnS1g5fmufYOtnxLGUZM2178PKbhsk7Ffv58IX+ZtcvoGwccYsh0PglkAA==", +> "license": "MIT", +23878c24147,24148 +< "has-tostringtag": "^1.0.0" +--- +> "call-bound": "^1.0.3", +> "has-tostringtag": "^1.0.2" +23888,23890c24158,24161 +< "version": "1.0.4", +< "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.4.tgz", +< "integrity": "sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg==", +--- +> "version": "1.1.1", +> "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.1.1.tgz", +> "integrity": "sha512-9gGx6GTtCQM73BgmHQXfDmLtfjjTUDSyoxTCbp5WtoixAhfgsDirWIcVQ/IHpvI5Vgd5i/J5F7B9cN/WlVbC/w==", +> "license": "MIT", +23892c24163,24165 +< "has-symbols": "^1.0.2" +--- +> "call-bound": "^1.0.2", +> "has-symbols": "^1.1.0", +> "safe-regex-test": "^1.1.0" +23902,23904c24175,24177 +< "version": "1.1.13", +< "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.13.tgz", +< "integrity": "sha512-uZ25/bUAlUY5fR4OKT4rZQEBrzQWYV9ZJYGGsUmEJ6thodVJ1HX64ePQ6Z0qPWP+m+Uq6e9UugrE38jeYsDSMw==", +--- +> "version": "1.1.15", +> "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.15.tgz", +> "integrity": "sha512-p3EcsicXjit7SaskXHs1hA91QxgTw46Fv6EFKKGS5DRFLD8yKnohjF3hxoju94b/OcMZoQukzpPpBE9uLVKzgQ==", +23907c24180 +< "which-typed-array": "^1.1.14" +--- +> "which-typed-array": "^1.1.16" +23943,23946c24216,24222 +< "version": "2.0.1", +< "resolved": "https://registry.npmjs.org/is-weakmap/-/is-weakmap-2.0.1.tgz", +< "integrity": "sha512-NSBR4kH5oVj1Uwvv970ruUkCV7O1mzgVFO4/rev2cLRda9Tm9HrL70ZPut4rOHgY0FNrUu9BCbXA2sdQ+x0chA==", +< "dev": true, +--- +> "version": "2.0.2", +> "resolved": "https://registry.npmjs.org/is-weakmap/-/is-weakmap-2.0.2.tgz", +> "integrity": "sha512-K5pXYOm9wqY1RgjpL3YTkF39tni1XajUIkawTLUo9EZEVUFga5gSQJF8nNS7ZwJQ02y+1YCNYcMh+HIf1ZqE+w==", +> "license": "MIT", +> "engines": { +> "node": ">= 0.4" +> }, +23952,23954c24228,24231 +< "version": "1.0.2", +< "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.0.2.tgz", +< "integrity": "sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ==", +--- +> "version": "1.1.1", +> "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.1.1.tgz", +> "integrity": "sha512-6i9mGWSlqzNMEqpCp93KwRS1uUOodk2OJ6b+sq7ZPDSy2WuI5NFIxp/254TytR8ftefexkWn5xNiHUNpPOfSew==", +> "license": "MIT", +23956c24233,24236 +< "call-bind": "^1.0.2" +--- +> "call-bound": "^1.0.3" +> }, +> "engines": { +> "node": ">= 0.4" +23963,23966c24243,24246 +< "version": "2.0.2", +< "resolved": "https://registry.npmjs.org/is-weakset/-/is-weakset-2.0.2.tgz", +< "integrity": "sha512-t2yVvttHkQktwnNNmBQ98AhENLdPUTDTE21uPqAQ0ARwQfGeQKRVS0NNurH7bTf7RrvcVn1OOge45CnBeHCSmg==", +< "dev": true, +--- +> "version": "2.0.4", +> "resolved": "https://registry.npmjs.org/is-weakset/-/is-weakset-2.0.4.tgz", +> "integrity": "sha512-mfcwb6IzQyOKTs84CQMrOwW4gQcaTOAWJ0zzJCl2WSPDrWk/OzDaImWFH3djXhb24g4eudZfLRozAvPGw4d9hQ==", +> "license": "MIT", +23968,23969c24248,24252 +< "call-bind": "^1.0.2", +< "get-intrinsic": "^1.1.1" +--- +> "call-bound": "^1.0.3", +> "get-intrinsic": "^1.2.6" +> }, +> "engines": { +> "node": ">= 0.4" +24543a24827 +> "license": "MIT", +24564a24849 +> "license": "MIT", +24568a24854,24862 +> "node_modules/json-refs/node_modules/commander": { +> "version": "4.1.1", +> "resolved": "https://registry.npmjs.org/commander/-/commander-4.1.1.tgz", +> "integrity": "sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==", +> "license": "MIT", +> "engines": { +> "node": ">= 6" +> } +> }, +24572a24867 +> "license": "MIT", +24584a24880 +> "license": "MIT", +25175,25187d25470 +< "node_modules/less/node_modules/mime": { +< "version": "1.6.0", +< "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", +< "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", +< "dev": true, +< "optional": true, +< "bin": { +< "mime": "cli.js" +< }, +< "engines": { +< "node": ">=4" +< } +< }, +25335c25618,25619 +< "integrity": "sha1-6I7suS4ruEyQZWEv2VigcZzUf5Q=" +--- +> "integrity": "sha512-tC2aLC7bbkDXKNrjDu9OLiVx9pFIvjinID2eD9PzNdAQGZScWUd/h8faqOw5d6oLsOvFRCRbz1ASoB+deyMVUw==", +> "license": "MIT" +25340c25624,25625 +< "integrity": "sha1-6UC5690nwyfgqNqxtVkWxTQelXU=", +--- +> "integrity": "sha512-VGHm6DH+1UiuafQdE/DNMqxOcSyhRu0xO9+jPDq7xITRn5YOorGrHVQmavMVXCYmTm80YRTZZCn/jTW7MokwLg==", +> "license": "MIT", +25351c25636,25637 +< "integrity": "sha1-MPgj5X4X43NdODvWK2Czh1Q7QYY=", +--- +> "integrity": "sha512-+zJVXs0VxC/Au+/7foiKzw8UaWvfSfPh20XhqK/6HFQiUeclL5fz05zY7G9yDAFItAKKZwB4cgpzGvxiwuG1wQ==", +> "license": "MIT", +25366c25652,25653 +< "integrity": "sha1-+Ob1tXip405UEXm1a47uv0oofgg=", +--- +> "integrity": "sha512-8JJ3FnMPm54t3BwPLk8q8mPyQKQXm/rt9df+awr4NGtyJrtcCXM3Of1I86S6jVy1b4yAyFBb8wbKPEauuqzRmQ==", +> "license": "MIT", +25376c25663,25664 +< "integrity": "sha1-fQsmdknLKeehOdAQO3wR+uhOSFE=", +--- +> "integrity": "sha512-SLczhg860fGW7AKlYcuOFstDtJuQhaANlJ4Y/jrOoRxhmVtK41vbJDH3OefVRSRkSCQo4HI82QVkAVsoGa5gSw==", +> "license": "MIT", +25387c25675,25676 +< "integrity": "sha1-TTHy595+E0+/KAN2K4FQsyUZZm8=", +--- +> "integrity": "sha512-x2ja1fa/qmzbizuXgVM4QAP9svtMbdxjG8Anl9bCeDAwLOVQ1vLrA0hLb/NkpbGi9evjtkl0aWLTEoOlUdBPQA==", +> "license": "MIT", +25398c25687,25688 +< "integrity": "sha1-UdaVeXPaTtVW43KQ2MGhjFPeFgc=", +--- +> "integrity": "sha512-5TCfLt1haQpsa7bgLYRKNNE4yqhO4ZxIayN1btQmazMchO6Q8JYFRMqbJ3W+uNmMm4R0Jw7KGkZX5YfDDnywuw==", +> "license": "MIT", +25409c25699,25700 +< "integrity": "sha1-+vH3+BD6mFolHCGHQESBCUg55e4=", +--- +> "integrity": "sha512-iIrScwY3atGvLVbQL/+CNUznaPwBJg78S/JO4cTUFXRkRsZgEBhscB27cVoT4tsIOUyFu/5M/0umfHNGJ6wYwg==", +> "license": "MIT", +25417c25708,25709 +< "integrity": "sha1-PqZAS3hKe+g2x7V1gOHN95sUgyw=" +--- +> "integrity": "sha512-BOlKGKNHhCHswGOWtmVb5zBygyxN7EmTuzVOSQI6QSoGhG+kvv71gICFS1TBpnqvT1n53txK8CDK3u5D2/GZxQ==", +> "license": "MIT" +25422c25714,25715 +< "integrity": "sha1-nUgvRjuOZq++WcLBTtsRcGAXIzQ=" +--- +> "integrity": "sha512-xKDem1BxoIfcCtaJHotjtyfdIvZO9qrF+mv3G1+ngQmaI3MJt3Qm46i9HLk/CbzABbavUrr1/EomQT8KxtsrYA==", +> "license": "MIT" +25427c25720,25721 +< "integrity": "sha1-fAt/admKH3ZSn4kLDNsbTf7BHBE=" +--- +> "integrity": "sha512-XpqGh1e7hhkOzftBfWE7zt+Yn9mVHFkDhicVttvKLsoCMLVVL+xTQjfjB4X4vtznauxv0QZ5ZAeqjvat0dh62Q==", +> "license": "MIT" +25432c25726,25727 +< "integrity": "sha1-phOWMNdtFTawfdyAliiJsIL2pkE=", +--- +> "integrity": "sha512-wwCwWX8PK/mYR5VZjcU5JFl6py/qrfLGMxzpKOfSqgA1PaZ6Z625CZLCxH1KsqyxSkOFmNm+mEYjeDpXlM4hrg==", +> "license": "MIT", +25441c25736,25737 +< "integrity": "sha1-98IAzRuS7yNrOZ7s9zxkjReqlNI=", +--- +> "integrity": "sha512-Vx0XKzpg2DFbQw4wrp1xSWd2sfl3W/BG6bucSRZmftS1AzbWRemCmBQDxyQTNhlLNec428PXkuuja+VNBZgu2A==", +> "license": "MIT", +25450c25746,25747 +< "integrity": "sha1-bpzJZm/wgfC1psl4uD4kLmlJ0gM=", +--- +> "integrity": "sha512-lBrglYxLD/6KAJ8IEa5Lg+YHgNAL7FyKqXg4XOUI+Du/vtniLs1ZqS+yHNKPkK54waAgkdUnDOYaWf+rv4B+AA==", +> "license": "MIT", +25458c25755,25756 +< "integrity": "sha1-dFz0GlNZexj2iImFREBe+isG2Q8=" +--- +> "integrity": "sha512-+odPJa4PE2UgYnQgJgkLs0UD03QU78R2ivhrFnG9GdtYOZdE6ObxOj7KiUEUlqOOgatFT+ZqSypFjDSduTigKg==", +> "license": "MIT" +25463c25761,25762 +< "integrity": "sha1-hMOVlt1xGBqXsGUpE6fJZ15Jsao=", +--- +> "integrity": "sha512-AqQ4AJz5buSx9ELXWt5dONwJyVPd4NTADMKhoVYWCugjoVf172/LpvVhwmSJn4g8/Dc0S8hxTe8rt5Dob3X9KQ==", +> "license": "MIT", +25473c25772,25773 +< "integrity": "sha1-XRn6AFyMTSNvr0dCx7eh/Kvikmc=", +--- +> "integrity": "sha512-hn2VWYZ+N9aYncRad4jORvlGgpFrn+axnPIWRvFxjk6CWcZH5b5alI8EymYsHITI23Z9wrW/+ORq+azrVFpOfw==", +> "license": "MIT", +25487c25787,25788 +< "integrity": "sha1-8pIDtAsS/uCkXTYxZIJZvrq8eGg=", +--- +> "integrity": "sha512-zj5vReFLkR+lJOBKP1wyteZ13zut/KSmXtdCBgxcy/m4UTitcBxpeVZT7gwk8BQrztPI5dIgO4bhBppXV4rpTQ==", +> "license": "MIT", +25517c25818,25819 +< "integrity": "sha1-/j/Do0yGyUyrb5UiVgKCdB4BYwk=", +--- +> "integrity": "sha512-AvOobAkE7qBtIiHU5QHQIfveWH5Usr9pIcFIzBv7u4S6bvb3FWpFrh9ltqBY7UeL5lw6e8d+SggiUXQVyh+FpA==", +> "license": "MIT", +25526c25828,25829 +< "integrity": "sha1-eLQer+FAX6lmRZ6kGT/VAtCEUks=", +--- +> "integrity": "sha512-VC+CKm/zSs5t3i/MHv71HZoQphuqOvez1xhjWBwHU5zAbsCYrqwHr+MyQyMk14HzA3hSRNA5lCqDMSw5G2Qscg==", +> "license": "MIT", +25547c25850,25851 +< "integrity": "sha1-ZpTP+mX++TH3wxzobHRZfPVg9PE=" +--- +> "integrity": "sha512-VRYX+8XipeLjorag5bz3YBBRJ+5kj8hVBzfnaHgXPZAVTYowBdY5l0M5ZnOmlAMCOXBFabQtm7f5VqjMKEji0w==", +> "license": "MIT" +25562c25866,25867 +< "integrity": "sha1-tSoybB9i9tfac6MdVAHfbvRPD6E=", +--- +> "integrity": "sha512-yRDd0z+APziDqbk0MqR6Qfwj/Qn3jLxFJbI9U8MuvdTnqIXdZ5YXyGLnwuzCpZmjr26F1GNOjKLMMZ10i/wy6A==", +> "license": "MIT", +25575c25880,25882 +< "integrity": "sha1-QVxEePK8wwEgwizhDtMib30+GOA=" +--- +> "integrity": "sha512-pDo3lu8Jhfjqls6GkMgpahsF9kCyayhgykjyLMNFTKWrpVdAQtYyB4muAMWozBB4ig/dtWAmsMxLEI8wuz+DYQ==", +> "deprecated": "This package is deprecated. Use require('node:util').isDeepStrictEqual instead.", +> "license": "MIT" +25580c25887,25888 +< "integrity": "sha1-LP1XXHPkmKtX4xm3f6Aq3vE6lNE=" +--- +> "integrity": "sha512-6XcAB3izeQxPOQQNAJbbdjXbvWEt2Pn9ezPrjr4CwoLwmqsLVbsiEXD19cmmt4mbzOCOCdHzOQiUivUOJLra7w==", +> "license": "MIT" +25595c25903,25904 +< "integrity": "sha1-Wi5H/mmVPx7mMafrof5k0tBlWPU=", +--- +> "integrity": "sha512-sTebg2a1PoicYEZXD5PBdQcTlIJ6hUslrlWr7iV0O7n+i4596s2NQ9I5CaZ5FbXSfya/9WQsrYLANUJv9paYVA==", +> "license": "MIT", +25613c25922,25923 +< "integrity": "sha1-SN6kbfj/djKxDXBrissmWR4rNyc=", +--- +> "integrity": "sha512-ZpJhwvUXHSNL5wYd1RM6CUa2ZuqorG9ngoJ9Ix5Cce+uX7I5O/E06FCJdhSZ33b5dVyeQDnIlWH7B2s5uByZ7g==", +> "license": "MIT", +25634c25944,25945 +< "integrity": "sha1-T7VPgWZS5a4Q6PcvcXo4jHMmU4o=" +--- +> "integrity": "sha512-uNcV98/blRhInPUGQEnj9ekXXfG+q+rfoNSFZgl/eBfog9yBDW9gfUv2AHX/rAF7zZRlzWhbslGhbGQFZlCkZA==", +> "license": "MIT" +25650c25961,25962 +< "integrity": "sha1-Mg4LZwMWc8KNeiu12eAzGkUkBRU=", +--- +> "integrity": "sha512-6SwqWwGFHhTXEiqB/yQgu8FYd//tm786d49y7kizHVCJH7zdzs191UQn3ES3tkkDbUddNRfkCRYqJFHtbLnbCw==", +> "license": "MIT", +25997a26310,26318 +> "node_modules/math-intrinsics": { +> "version": "1.1.0", +> "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", +> "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", +> "license": "MIT", +> "engines": { +> "node": ">= 0.4" +> } +> }, +26577a26899,26910 +> "node_modules/mime": { +> "version": "1.6.0", +> "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", +> "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", +> "license": "MIT", +> "bin": { +> "mime": "cli.js" +> }, +> "engines": { +> "node": ">=4" +> } +> }, +26579,26581c26912,26915 +< "version": "1.51.0", +< "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.51.0.tgz", +< "integrity": "sha512-5y8A56jg7XVQx2mbv1lu49NR4dokRnhZYTtL+KGfaa27uq4pSTXkwQkFJl4pkRMyNFz/EtYDSkiiEHx3F7UN6g==", +--- +> "version": "1.52.0", +> "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", +> "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", +> "license": "MIT", +26597,26599c26931,26934 +< "version": "2.1.34", +< "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.34.tgz", +< "integrity": "sha512-6cP692WwGIs9XXdOO4++N+7qjqv0rqxxVvJ3VHPh/Sc9mVZcQP+ZGhkKiTvWMQRr2tbHkJP/Yn7Y0npb3ZBs4A==", +--- +> "version": "2.1.35", +> "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", +> "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", +> "license": "MIT", +26601c26936 +< "mime-db": "1.51.0" +--- +> "mime-db": "1.52.0" +26792c27127,27128 +< "integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==" +--- +> "integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==", +> "license": "MIT" +27248,27250c27584,27586 +< "version": "2.0.0", +< "resolved": "https://registry.npmjs.org/multer/-/multer-2.0.0.tgz", +< "integrity": "sha512-bS8rPZurbAuHGAnApbM9d4h1wSoYqrOqkE+6a64KLMK9yWU7gJXBDDVklKQ3TPi9DRb85cRs6yXaC0+cjxRtRg==", +--- +> "version": "2.0.1", +> "resolved": "https://registry.npmjs.org/multer/-/multer-2.0.1.tgz", +> "integrity": "sha512-Ug8bXeTIUlxurg8xLTEskKShvcKDZALo1THEX5E41pYCD2sCVub5/kIRIGqWNoqV6szyLyQKV6mD4QUrWE5GCQ==", +27254,27256c27590,27592 +< "busboy": "^1.0.0", +< "concat-stream": "^1.5.2", +< "mkdirp": "^0.5.4", +--- +> "busboy": "^1.6.0", +> "concat-stream": "^2.0.0", +> "mkdirp": "^0.5.6", +27258,27259c27594,27595 +< "type-is": "^1.6.4", +< "xtend": "^4.0.0" +--- +> "type-is": "^1.6.18", +> "xtend": "^4.0.2" +27384c27720,27721 +< "integrity": "sha1-IKMYwwy0X3H+et+/eyHJnBRy7xE=" +--- +> "integrity": "sha512-zkVhZUA3y8mbz652WrL5x0fB0ehrBkulWT3TomAQ9iDtyXZvzKeEA6GPxAItBYeNYl5yngKRX612qHOhvMkDeg==", +> "license": "MIT" +27842,27844c28179,28185 +< "version": "1.13.1", +< "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.1.tgz", +< "integrity": "sha512-5qoj1RUiKOMsCCNLV1CBiPYE10sziTsnmNxkAI/rZhiD63CF7IqdFGC/XzjWjpSgLf0LxXX3bDFIh0E18f6UhQ==", +--- +> "version": "1.13.4", +> "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz", +> "integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==", +> "license": "MIT", +> "engines": { +> "node": ">= 0.4" +> }, +27886,27888c28227,28229 +< "version": "4.1.5", +< "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.5.tgz", +< "integrity": "sha512-byy+U7gp+FVwmyzKPYhW2h5l3crpmGsxl7X2s8y43IgxvG4g3QZ6CffDtsNQy1WsmZpQbO+ybo0AlW7TY6DcBQ==", +--- +> "version": "4.1.7", +> "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.7.tgz", +> "integrity": "sha512-nK28WOo+QIjBkDduTINE4JkF/UJJKyf2EJxvJKfblDpyg0Q+pkOHNTL0Qwy6NP6FhE/EnzV73BxxqcJaXY9anw==", +27891c28232,28233 +< "call-bind": "^1.0.5", +--- +> "call-bind": "^1.0.8", +> "call-bound": "^1.0.3", +27893c28235,28236 +< "has-symbols": "^1.0.3", +--- +> "es-object-atoms": "^1.0.0", +> "has-symbols": "^1.1.0", +28129a28473,28489 +> "node_modules/own-keys": { +> "version": "1.0.1", +> "resolved": "https://registry.npmjs.org/own-keys/-/own-keys-1.0.1.tgz", +> "integrity": "sha512-qFOyK5PjiWZd+QQIh+1jhdb9LpxTF0qs7Pm8o5QHYZ0M3vKqSqzsZaEB6oWlxZ+q2sJBMI/Ktgd2N5ZwQoRHfg==", +> "license": "MIT", +> "dependencies": { +> "get-intrinsic": "^1.2.6", +> "object-keys": "^1.1.1", +> "safe-push-apply": "^1.0.0" +> }, +> "engines": { +> "node": ">= 0.4" +> }, +> "funding": { +> "url": "https://github.com/sponsors/ljharb" +> } +> }, +28684,28686c29044,29047 +< "version": "1.0.10", +< "resolved": "https://registry.npmjs.org/path-loader/-/path-loader-1.0.10.tgz", +< "integrity": "sha512-CMP0v6S6z8PHeJ6NFVyVJm6WyJjIwFvyz2b0n2/4bKdS/0uZa/9sKUlYZzubrn3zuDRU0zIuEDX9DZYQ2ZI8TA==", +--- +> "version": "1.0.12", +> "resolved": "https://registry.npmjs.org/path-loader/-/path-loader-1.0.12.tgz", +> "integrity": "sha512-n7oDG8B+k/p818uweWrOixY9/Dsr89o2TkCm6tOTex3fpdo2+BFDgR+KpB37mGKBRsBAlR8CIJMFN0OEy/7hIQ==", +> "license": "MIT", +28689c29050,29129 +< "superagent": "^3.8.3" +--- +> "superagent": "^7.1.6" +> } +> }, +> "node_modules/path-loader/node_modules/debug": { +> "version": "4.4.1", +> "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz", +> "integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==", +> "license": "MIT", +> "dependencies": { +> "ms": "^2.1.3" +> }, +> "engines": { +> "node": ">=6.0" +> }, +> "peerDependenciesMeta": { +> "supports-color": { +> "optional": true +> } +> } +> }, +> "node_modules/path-loader/node_modules/formidable": { +> "version": "2.1.5", +> "resolved": "https://registry.npmjs.org/formidable/-/formidable-2.1.5.tgz", +> "integrity": "sha512-Oz5Hwvwak/DCaXVVUtPn4oLMLLy1CdclLKO1LFgU7XzDpVMUU5UjlSLpGMocyQNNk8F6IJW9M/YdooSn2MRI+Q==", +> "license": "MIT", +> "dependencies": { +> "@paralleldrive/cuid2": "^2.2.2", +> "dezalgo": "^1.0.4", +> "once": "^1.4.0", +> "qs": "^6.11.0" +> }, +> "funding": { +> "url": "https://ko-fi.com/tunnckoCore/commissions" +> } +> }, +> "node_modules/path-loader/node_modules/mime": { +> "version": "2.6.0", +> "resolved": "https://registry.npmjs.org/mime/-/mime-2.6.0.tgz", +> "integrity": "sha512-USPkMeET31rOMiarsBNIHZKLGgvKc/LrjofAnBlOttf5ajRvqiRA8QsenbcooctK6d6Ts6aqZXBA+XbkKthiQg==", +> "license": "MIT", +> "bin": { +> "mime": "cli.js" +> }, +> "engines": { +> "node": ">=4.0.0" +> } +> }, +> "node_modules/path-loader/node_modules/semver": { +> "version": "7.7.2", +> "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", +> "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", +> "license": "ISC", +> "bin": { +> "semver": "bin/semver.js" +> }, +> "engines": { +> "node": ">=10" +> } +> }, +> "node_modules/path-loader/node_modules/superagent": { +> "version": "7.1.6", +> "resolved": "https://registry.npmjs.org/superagent/-/superagent-7.1.6.tgz", +> "integrity": "sha512-gZkVCQR1gy/oUXr+kxJMLDjla434KmSOKbx5iGD30Ql+AkJQ/YlPKECJy2nhqOsHLjGHzoDTXNSjhnvWhzKk7g==", +> "deprecated": "Please upgrade to v9.0.0+ as we have fixed a public vulnerability with formidable dependency. Note that v9.0.0+ requires Node.js v14.18.0+. See https://github.com/ladjs/superagent/pull/1800 for insight. This project is supported and maintained by the team at Forward Email @ https://forwardemail.net", +> "license": "MIT", +> "dependencies": { +> "component-emitter": "^1.3.0", +> "cookiejar": "^2.1.3", +> "debug": "^4.3.4", +> "fast-safe-stringify": "^2.1.1", +> "form-data": "^4.0.0", +> "formidable": "^2.0.1", +> "methods": "^1.1.2", +> "mime": "2.6.0", +> "qs": "^6.10.3", +> "readable-stream": "^3.6.0", +> "semver": "^7.3.7" +> }, +> "engines": { +> "node": ">=6.4.0 <13 || >=14" +31146a31587,31608 +> "node_modules/reflect.getprototypeof": { +> "version": "1.0.10", +> "resolved": "https://registry.npmjs.org/reflect.getprototypeof/-/reflect.getprototypeof-1.0.10.tgz", +> "integrity": "sha512-00o4I+DVrefhv+nX0ulyi3biSHCPDe+yLv5o/p6d/UVlirijB8E16FtfwSAi4g3tcqrQ4lRAqQSoFEZJehYEcw==", +> "license": "MIT", +> "dependencies": { +> "call-bind": "^1.0.8", +> "define-properties": "^1.2.1", +> "es-abstract": "^1.23.9", +> "es-errors": "^1.3.0", +> "es-object-atoms": "^1.0.0", +> "get-intrinsic": "^1.2.7", +> "get-proto": "^1.0.1", +> "which-builtin-type": "^1.2.1" +> }, +> "engines": { +> "node": ">= 0.4" +> }, +> "funding": { +> "url": "https://github.com/sponsors/ljharb" +> } +> }, +31211,31213c31673,31675 +< "version": "1.5.2", +< "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.2.tgz", +< "integrity": "sha512-NcDiDkTLuPR+++OCKB0nWafEmhg/Da8aUPLPMQbK+bxKKCm1/S5he+AqYa4PlMCVBalb4/yxIRub6qkEx5yJbw==", +--- +> "version": "1.5.4", +> "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.4.tgz", +> "integrity": "sha512-dYqgNSZbDwkaJ2ceRd9ojCGjBq+mOm9LmtXnAnEGyHhN/5R7iDW2TRw3h+o/jCFxus3P2LfWIIiwowAjANm7IA==", +31216c31678 +< "call-bind": "^1.0.6", +--- +> "call-bind": "^1.0.8", +31219c31681,31683 +< "set-function-name": "^2.0.1" +--- +> "get-proto": "^1.0.1", +> "gopd": "^1.2.0", +> "set-function-name": "^2.0.2" +31679,31681c32143,32145 +< "version": "1.1.2", +< "resolved": "https://registry.npmjs.org/safe-array-concat/-/safe-array-concat-1.1.2.tgz", +< "integrity": "sha512-vj6RsCsWBCf19jIeHEfkRMw8DPiBb+DMXklQ/1SGDHOMlHdPUkZXFQ2YdplS23zESTijAcurb1aSgJA3AgMu1Q==", +--- +> "version": "1.1.3", +> "resolved": "https://registry.npmjs.org/safe-array-concat/-/safe-array-concat-1.1.3.tgz", +> "integrity": "sha512-AURm5f0jYEOydBj7VQlVvDrjeFgthDdEF5H1dP+6mNpoXOMo1quQqJ4wvJDyRZ9+pO3kGWoOdmV08cSv2aJV6Q==", +31684,31686c32148,32151 +< "call-bind": "^1.0.7", +< "get-intrinsic": "^1.2.4", +< "has-symbols": "^1.0.3", +--- +> "call-bind": "^1.0.8", +> "call-bound": "^1.0.2", +> "get-intrinsic": "^1.2.6", +> "has-symbols": "^1.1.0", +31706a32172,32187 +> "node_modules/safe-push-apply": { +> "version": "1.0.0", +> "resolved": "https://registry.npmjs.org/safe-push-apply/-/safe-push-apply-1.0.0.tgz", +> "integrity": "sha512-iKE9w/Z7xCzUMIZqdBsp6pEQvwuEebH4vdpjcDWnyzaI6yl6O9FHvVpmGelvEHNsoY6wGblkxR6Zty/h00WiSA==", +> "license": "MIT", +> "dependencies": { +> "es-errors": "^1.3.0", +> "isarray": "^2.0.5" +> }, +> "engines": { +> "node": ">= 0.4" +> }, +> "funding": { +> "url": "https://github.com/sponsors/ljharb" +> } +> }, +31717,31719c32198,32200 +< "version": "1.0.3", +< "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.0.3.tgz", +< "integrity": "sha512-CdASjNJPvRa7roO6Ra/gLYBTzYzzPyyBXxIMdGW3USQLyjWEls2RgW5UBTXaQVp+OrpeCK3bLem8smtmheoRuw==", +--- +> "version": "1.1.0", +> "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.1.0.tgz", +> "integrity": "sha512-x/+Cz4YrimQxQccJf5mKEbIa1NzeCRNI5Ecl/ekmlYaampdNLPalVyIcCZNNH3MvmqBugV5TMYZXv0ljslUlaw==", +31722c32203 +< "call-bind": "^1.0.6", +--- +> "call-bound": "^1.0.2", +31724c32205 +< "is-regex": "^1.1.4" +--- +> "is-regex": "^1.2.1" +32123,32133d32603 +< "node_modules/send/node_modules/mime": { +< "version": "1.6.0", +< "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", +< "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", +< "bin": { +< "mime": "cli.js" +< }, +< "engines": { +< "node": ">=4" +< } +< }, +32288,32290c32758,32761 +< "version": "2.0.1", +< "resolved": "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.1.tgz", +< "integrity": "sha512-tMNCiqYVkXIZgc2Hnoy2IvC/f8ezc5koaRFkCjrpWzGpCd3qbZXPzVy9MAZzK1ch/X0jvSkojys3oqJN0qCmdA==", +--- +> "version": "2.0.2", +> "resolved": "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.2.tgz", +> "integrity": "sha512-7PGFlmtwsEADb0WYyvCMa1t+yke6daIG4Wirafur5kcf+MhUnPms1UeR0CKQdTZD81yESwMHbtn+TR+dMviakQ==", +> "license": "MIT", +32292c32763,32764 +< "define-data-property": "^1.0.1", +--- +> "define-data-property": "^1.1.4", +> "es-errors": "^1.3.0", +32294c32766,32780 +< "has-property-descriptors": "^1.0.0" +--- +> "has-property-descriptors": "^1.0.2" +> }, +> "engines": { +> "node": ">= 0.4" +> } +> }, +> "node_modules/set-proto": { +> "version": "1.0.0", +> "resolved": "https://registry.npmjs.org/set-proto/-/set-proto-1.0.0.tgz", +> "integrity": "sha512-RJRdvCo6IAnPdsvP/7m6bsQqNnn1FCBX5ZNtFL98MmFF/4xAIJTIg1YbHW5DC2W5SKZanrC6i4HsJqlajw/dZw==", +> "license": "MIT", +> "dependencies": { +> "dunder-proto": "^1.0.1", +> "es-errors": "^1.3.0", +> "es-object-atoms": "^1.0.0" +32391,32393c32877,32880 +< "version": "1.0.6", +< "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.6.tgz", +< "integrity": "sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==", +--- +> "version": "1.1.0", +> "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz", +> "integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==", +> "license": "MIT", +32395d32881 +< "call-bind": "^1.0.7", +32397,32398c32883,32939 +< "get-intrinsic": "^1.2.4", +< "object-inspect": "^1.13.1" +--- +> "object-inspect": "^1.13.3", +> "side-channel-list": "^1.0.0", +> "side-channel-map": "^1.0.1", +> "side-channel-weakmap": "^1.0.2" +> }, +> "engines": { +> "node": ">= 0.4" +> }, +> "funding": { +> "url": "https://github.com/sponsors/ljharb" +> } +> }, +> "node_modules/side-channel-list": { +> "version": "1.0.0", +> "resolved": "https://registry.npmjs.org/side-channel-list/-/side-channel-list-1.0.0.tgz", +> "integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==", +> "license": "MIT", +> "dependencies": { +> "es-errors": "^1.3.0", +> "object-inspect": "^1.13.3" +> }, +> "engines": { +> "node": ">= 0.4" +> }, +> "funding": { +> "url": "https://github.com/sponsors/ljharb" +> } +> }, +> "node_modules/side-channel-map": { +> "version": "1.0.1", +> "resolved": "https://registry.npmjs.org/side-channel-map/-/side-channel-map-1.0.1.tgz", +> "integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==", +> "license": "MIT", +> "dependencies": { +> "call-bound": "^1.0.2", +> "es-errors": "^1.3.0", +> "get-intrinsic": "^1.2.5", +> "object-inspect": "^1.13.3" +> }, +> "engines": { +> "node": ">= 0.4" +> }, +> "funding": { +> "url": "https://github.com/sponsors/ljharb" +> } +> }, +> "node_modules/side-channel-weakmap": { +> "version": "1.0.2", +> "resolved": "https://registry.npmjs.org/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz", +> "integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==", +> "license": "MIT", +> "dependencies": { +> "call-bound": "^1.0.2", +> "es-errors": "^1.3.0", +> "get-intrinsic": "^1.2.5", +> "object-inspect": "^1.13.3", +> "side-channel-map": "^1.0.1" +32871c33412,33413 +< "integrity": "sha512-wcFzz9cDfbuqe0FZzfi2or1sgyIrsDwmPwfZC4hiNidPdPINjeUwNfv5kldczoEAcjl9Y1L3SM7Uz2PUEQzxQw==" +--- +> "integrity": "sha512-wcFzz9cDfbuqe0FZzfi2or1sgyIrsDwmPwfZC4hiNidPdPINjeUwNfv5kldczoEAcjl9Y1L3SM7Uz2PUEQzxQw==", +> "license": "(WTFPL OR MIT)" +32960c33502,33503 +< "integrity": "sha1-bIOv82kvphJW4M0ZfgXp3hV2kaY=" +--- +> "integrity": "sha512-Q5thBSxp5t8WPTTJQS59LrGqOZqOsrhDGDVm8azCqIBjSBd7nd9o2PM+mDulQQkh8h//4U6hFZnc/mul8t5pWQ==", +> "license": "ISC" +32994a33538,33554 +> "node_modules/ssh2": { +> "version": "1.16.0", +> "resolved": "https://registry.npmjs.org/ssh2/-/ssh2-1.16.0.tgz", +> "integrity": "sha512-r1X4KsBGedJqo7h8F5c4Ybpcr5RjyP+aWIG007uBPRjmdQWfEiVLzSK71Zji1B9sKxwaCvD8y8cwSkYrlLiRRg==", +> "hasInstallScript": true, +> "dependencies": { +> "asn1": "^0.2.6", +> "bcrypt-pbkdf": "^1.0.2" +> }, +> "engines": { +> "node": ">=10.16.0" +> }, +> "optionalDependencies": { +> "cpu-features": "~0.0.10", +> "nan": "^2.20.0" +> } +> }, +33095,33098c33655,33658 +< "version": "1.0.0", +< "resolved": "https://registry.npmjs.org/stop-iteration-iterator/-/stop-iteration-iterator-1.0.0.tgz", +< "integrity": "sha512-iCGQj+0l0HOdZ2AEeBADlsRC+vsnDsZsbdSiH1yNSjcfKM7fdpCMfqAL/dwF5BLiw/XhRft/Wax6zQbhq2BcjQ==", +< "dev": true, +--- +> "version": "1.1.0", +> "resolved": "https://registry.npmjs.org/stop-iteration-iterator/-/stop-iteration-iterator-1.1.0.tgz", +> "integrity": "sha512-eLoXW/DHyl62zxY4SCaIgnRhuMr6ri4juEYARS8E6sCEqzKpOiE521Ucofdx+KnDZl5xmvGYaaKCk5FEOxJCoQ==", +> "license": "MIT", +33100c33660,33661 +< "internal-slot": "^1.0.4" +--- +> "es-errors": "^1.3.0", +> "internal-slot": "^1.1.0" +33265,33267c33826,33828 +< "version": "1.2.9", +< "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.9.tgz", +< "integrity": "sha512-klHuCNxiMZ8MlsOihJhJEBJAiMVqU3Z2nEXWfWnIqjN0gEFS9J9+IxKozWWtQGcgoa1WUZzLjKPTr4ZHNFTFxw==", +--- +> "version": "1.2.10", +> "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.10.tgz", +> "integrity": "sha512-Rs66F0P/1kedk5lyYyH9uBzuiI/kNRmwJAR9quK6VOtIpZ2G+hMZd+HQbbv25MgCA6gEffoMZYxlTod4WcdrKA==", +33270c33831,33833 +< "call-bind": "^1.0.7", +--- +> "call-bind": "^1.0.8", +> "call-bound": "^1.0.2", +> "define-data-property": "^1.1.4", +33272,33273c33835,33837 +< "es-abstract": "^1.23.0", +< "es-object-atoms": "^1.0.0" +--- +> "es-abstract": "^1.23.5", +> "es-object-atoms": "^1.0.0", +> "has-property-descriptors": "^1.0.2" +33283,33285c33847,33849 +< "version": "1.0.8", +< "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.8.tgz", +< "integrity": "sha512-p73uL5VCHCO2BZZ6krwwQE3kCzM7NKmis8S//xEC6fQonchbum4eP6kR4DLEjQFO3Wnj3Fuo8NM0kOSjVdHjZQ==", +--- +> "version": "1.0.9", +> "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.9.tgz", +> "integrity": "sha512-G7Ok5C6E/j4SGfyLCloXTrngQIQU3PWtXGst3yM7Bea9FRURf1S42ZHlZZtsNque2FN2PoUhfZXYLNWwEr4dLQ==", +33288c33852,33853 +< "call-bind": "^1.0.7", +--- +> "call-bind": "^1.0.8", +> "call-bound": "^1.0.2", +33291a33857,33859 +> "engines": { +> "node": ">= 0.4" +> }, +33781c34349,34350 +< "deprecated": "Please upgrade to v7.0.2+ of superagent. We have fixed numerous issues with streams, form-data, attach(), filesystem errors not bubbling up (ENOENT on attach()), and all tests are now passing. See the releases tab for more information at . Thanks to @shadowgate15, @spence-s, and @niftylettuce. Superagent is sponsored by Forward Email at .", +--- +> "deprecated": "Please upgrade to v9.0.0+ as we have fixed a public vulnerability with formidable dependency. Note that v9.0.0+ requires Node.js v14.18.0+. See https://github.com/ladjs/superagent/pull/1800 for insight. This project is supported and maintained by the team at Forward Email @ https://forwardemail.net", +> "license": "MIT", +33801a34371 +> "license": "MIT", +33807,33809c34377,34380 +< "version": "2.5.1", +< "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.1.tgz", +< "integrity": "sha512-m21N3WOmEEURgk6B9GLOE4RuWOFf28Lhh9qGYeNlGq4VDXUlJy2th2slBNU8Gp8EzloYZOibZJ7t5ecIrFSjVA==", +--- +> "version": "2.5.3", +> "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.3.tgz", +> "integrity": "sha512-XHIrMD0NpDrNM/Ckf7XJiBbLl57KEhT3+i3yY+eWm+cqYZJQTZrKo8Y8AWKnuV5GT4scfuUGt9LzNoIx3dU1nQ==", +> "license": "MIT", +33812,33813c34383,34386 +< "combined-stream": "^1.0.6", +< "mime-types": "^2.1.12" +--- +> "combined-stream": "^1.0.8", +> "es-set-tostringtag": "^2.1.0", +> "mime-types": "^2.1.35", +> "safe-buffer": "^5.2.1" +33818a34392,34411 +> "node_modules/superagent/node_modules/form-data/node_modules/safe-buffer": { +> "version": "5.2.1", +> "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", +> "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", +> "funding": [ +> { +> "type": "github", +> "url": "https://github.com/sponsors/feross" +> }, +> { +> "type": "patreon", +> "url": "https://www.patreon.com/feross" +> }, +> { +> "type": "consulting", +> "url": "https://feross.org/support" +> } +> ], +> "license": "MIT" +> }, +33822,33833c34415,34416 +< "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=" +< }, +< "node_modules/superagent/node_modules/mime": { +< "version": "1.6.0", +< "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", +< "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", +< "bin": { +< "mime": "cli.js" +< }, +< "engines": { +< "node": ">=4" +< } +--- +> "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==", +> "license": "MIT" +33836,33838c34419,34422 +< "version": "2.3.7", +< "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", +< "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", +--- +> "version": "2.3.8", +> "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", +> "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", +> "license": "MIT", +33852a34437 +> "license": "MIT", +34099c34684,34685 +< "integrity": "sha1-oJdRnG8e5N1n4wjZtT3cnCslf5c=", +--- +> "integrity": "sha512-O2hZbWqq8x6j0uZ4qWj5dw45WPoAxKsJLJZqOgTqRtPNi8IqA+rDkDV/48S8qanS3KGv1QcVoPNLivMbyHHdAQ==", +> "license": "MIT", +34150,34155d34735 +< "node_modules/swagger-tools/node_modules/commander": { +< "version": "2.11.0", +< "resolved": "https://registry.npmjs.org/commander/-/commander-2.11.0.tgz", +< "integrity": "sha512-b0553uYA5YAEGgyYIGYROzKQ7X5RAqedkfjiZxwi0kL1g3bOaBNNZfYkzt/CL0umgD5wc9Jec2FbB98CjkMRvQ==", +< "license": "MIT" +< }, +34257,34259c34837,34839 +< "version": "3.0.8", +< "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-3.0.8.tgz", +< "integrity": "sha512-ZoROL70jptorGAlgAYiLoBLItEKw/fUxg9BSYK/dF/GAGYFJOJJJMvjPAKDJraCXFwadD456FCuvLWgfhMsPwg==", +--- +> "version": "3.0.9", +> "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-3.0.9.tgz", +> "integrity": "sha512-XF4w9Xp+ZQgifKakjZYmFdkLoSWd34VGKcsTCwlNWM7QG3ZbaxnTsaBwnjFZqHRf/rROxaR8rXnbtwdvaDI+lA==", +34943,34945c35523,35525 +< "version": "0.6.9", +< "resolved": "https://registry.npmjs.org/traverse/-/traverse-0.6.9.tgz", +< "integrity": "sha512-7bBrcF+/LQzSgFmT0X5YclVqQxtv7TDJ1f8Wj7ibBu/U6BMLeOpUxuZjV7rMc44UtKxlnMFigdhFAIszSX1DMg==", +--- +> "version": "0.6.11", +> "resolved": "https://registry.npmjs.org/traverse/-/traverse-0.6.11.tgz", +> "integrity": "sha512-vxXDZg8/+p3gblxB6BhhG5yWVn1kGRlaL8O78UDXc3wRnPizB5g83dcvWV1jpDMIPnjZjOFuxlMmE82XJ4407w==", +34948,34950c35528,35530 +< "gopd": "^1.0.1", +< "typedarray.prototype.slice": "^1.0.3", +< "which-typed-array": "^1.1.15" +--- +> "gopd": "^1.2.0", +> "typedarray.prototype.slice": "^1.0.5", +> "which-typed-array": "^1.1.18" +35110,35112c35690,35692 +< "version": "1.0.2", +< "resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.2.tgz", +< "integrity": "sha512-gEymJYKZtKXzzBzM4jqa9w6Q1Jjm7x2d+sh19AdsD4wqnMPDYyvwpsIc2Q/835kHuo3BEQ7CjelGhfTsoBb2MQ==", +--- +> "version": "1.0.3", +> "resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.3.tgz", +> "integrity": "sha512-nAYYwfY3qnzX30IkA6AQZjVbtK6duGontcQm1WSG1MD94YLqK0515GNApXkoxKOWMusVssAHWLh9SeaoefYFGw==", +35115c35695 +< "call-bind": "^1.0.7", +--- +> "call-bound": "^1.0.3", +35117c35697 +< "is-typed-array": "^1.1.13" +--- +> "is-typed-array": "^1.1.14" +35124,35126c35704,35706 +< "version": "1.0.1", +< "resolved": "https://registry.npmjs.org/typed-array-byte-length/-/typed-array-byte-length-1.0.1.tgz", +< "integrity": "sha512-3iMJ9q0ao7WE9tWcaYKIptkNBuOIcZCCT0d4MRvuuH88fEoEH62IuQe0OtraD3ebQEoTRk8XCBoknUNc1Y67pw==", +--- +> "version": "1.0.3", +> "resolved": "https://registry.npmjs.org/typed-array-byte-length/-/typed-array-byte-length-1.0.3.tgz", +> "integrity": "sha512-BaXgOuIxz8n8pIq3e7Atg/7s+DpiYrxn4vdot3w9KbnBhcRQq6o3xemQdIfynqSeXeDrF32x+WvfzmOjPiY9lg==", +35129c35709 +< "call-bind": "^1.0.7", +--- +> "call-bind": "^1.0.8", +35131,35133c35711,35713 +< "gopd": "^1.0.1", +< "has-proto": "^1.0.3", +< "is-typed-array": "^1.1.13" +--- +> "gopd": "^1.2.0", +> "has-proto": "^1.2.0", +> "is-typed-array": "^1.1.14" +35143,35145c35723,35725 +< "version": "1.0.2", +< "resolved": "https://registry.npmjs.org/typed-array-byte-offset/-/typed-array-byte-offset-1.0.2.tgz", +< "integrity": "sha512-Ous0vodHa56FviZucS2E63zkgtgrACj7omjwd/8lTEMEPFFyjfixMZ1ZXenpgCFBBt4EC1J2XsyVS2gkG0eTFA==", +--- +> "version": "1.0.4", +> "resolved": "https://registry.npmjs.org/typed-array-byte-offset/-/typed-array-byte-offset-1.0.4.tgz", +> "integrity": "sha512-bTlAFB/FBYMcuX81gbL4OcpH5PmlFHqlCCpAl8AlEzMz5k53oNDvN8p1PNOWLEmI2x4orp3raOFB51tv9X+MFQ==", +35149c35729 +< "call-bind": "^1.0.7", +--- +> "call-bind": "^1.0.8", +35151,35153c35731,35734 +< "gopd": "^1.0.1", +< "has-proto": "^1.0.3", +< "is-typed-array": "^1.1.13" +--- +> "gopd": "^1.2.0", +> "has-proto": "^1.2.0", +> "is-typed-array": "^1.1.15", +> "reflect.getprototypeof": "^1.0.9" +35163,35165c35744,35746 +< "version": "1.0.6", +< "resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.6.tgz", +< "integrity": "sha512-/OxDN6OtAk5KBpGb28T+HZc2M+ADtvRxXrKKbUwtsLgdoxgX13hyy7ek6bFRl5+aBs2yZzB0c4CnQfAtVypW/g==", +--- +> "version": "1.0.7", +> "resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.7.tgz", +> "integrity": "sha512-3KS2b+kL7fsuk/eJZ7EQdnEmQoaho/r6KUef7hxvltNA5DR8NAUM+8wJMbJyZ4G9/7i3v5zPBIMN5aybAh2/Jg==", +35171d35751 +< "has-proto": "^1.0.3", +35173c35753,35754 +< "possible-typed-array-names": "^1.0.0" +--- +> "possible-typed-array-names": "^1.0.0", +> "reflect.getprototypeof": "^1.0.6" +35188,35190c35769,35771 +< "version": "1.0.3", +< "resolved": "https://registry.npmjs.org/typedarray.prototype.slice/-/typedarray.prototype.slice-1.0.3.tgz", +< "integrity": "sha512-8WbVAQAUlENo1q3c3zZYuy5k9VzBQvp8AX9WOtbvyWlLM1v5JaSRmjubLjzHF4JFtptjH/5c/i95yaElvcjC0A==", +--- +> "version": "1.0.5", +> "resolved": "https://registry.npmjs.org/typedarray.prototype.slice/-/typedarray.prototype.slice-1.0.5.tgz", +> "integrity": "sha512-q7QNVDGTdl702bVFiI5eY4l/HkgCM6at9KhcFbgUAzezHFbOVy4+0O/lCjsABEQwbZPravVfBIiBVGo89yzHFg==", +35193c35774 +< "call-bind": "^1.0.7", +--- +> "call-bind": "^1.0.8", +35195c35776 +< "es-abstract": "^1.23.0", +--- +> "es-abstract": "^1.23.9", +35197,35198c35778,35781 +< "typed-array-buffer": "^1.0.2", +< "typed-array-byte-offset": "^1.0.2" +--- +> "get-proto": "^1.0.1", +> "math-intrinsics": "^1.1.0", +> "typed-array-buffer": "^1.0.3", +> "typed-array-byte-offset": "^1.0.4" +35274,35276c35857,35860 +< "version": "1.0.2", +< "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.2.tgz", +< "integrity": "sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw==", +--- +> "version": "1.1.0", +> "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.1.0.tgz", +> "integrity": "sha512-nWJ91DjeOkej/TA8pXQ3myruKpKEYgqvpw9lz4OPHj/NWFNluYrjbz9j01CJ8yKQd2g4jFoOkINCTW2I5LEEyw==", +> "license": "MIT", +35278c35862 +< "call-bind": "^1.0.2", +--- +> "call-bound": "^1.0.3", +35280,35281c35864,35868 +< "has-symbols": "^1.0.3", +< "which-boxed-primitive": "^1.0.2" +--- +> "has-symbols": "^1.1.0", +> "which-boxed-primitive": "^1.1.1" +> }, +> "engines": { +> "node": ">= 0.4" +35709a36297,36305 +> "node_modules/validator": { +> "version": "10.11.0", +> "resolved": "https://registry.npmjs.org/validator/-/validator-10.11.0.tgz", +> "integrity": "sha512-X/p3UZerAIsbBfN/IwahhYaBbY68EN/UQBWHtsbXGT5bfrH/p4NQzUCG1kF/rtKaNpnJ7jAu6NGTdSNtyNIXMw==", +> "license": "MIT", +> "engines": { +> "node": ">= 0.10" +> } +> }, +36697,36699c37293,37296 +< "version": "1.0.2", +< "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz", +< "integrity": "sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==", +--- +> "version": "1.1.1", +> "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.1.1.tgz", +> "integrity": "sha512-TbX3mj8n0odCBFVlY8AxkqcHASw3L60jIuF8jFP78az3C2YhmGvqbHBpAjTRH2/xqYunrJ9g1jSyjCjpoWzIAA==", +> "license": "MIT", +36701,36705c37298,37332 +< "is-bigint": "^1.0.1", +< "is-boolean-object": "^1.1.0", +< "is-number-object": "^1.0.4", +< "is-string": "^1.0.5", +< "is-symbol": "^1.0.3" +--- +> "is-bigint": "^1.1.0", +> "is-boolean-object": "^1.2.1", +> "is-number-object": "^1.1.1", +> "is-string": "^1.1.1", +> "is-symbol": "^1.1.1" +> }, +> "engines": { +> "node": ">= 0.4" +> }, +> "funding": { +> "url": "https://github.com/sponsors/ljharb" +> } +> }, +> "node_modules/which-builtin-type": { +> "version": "1.2.1", +> "resolved": "https://registry.npmjs.org/which-builtin-type/-/which-builtin-type-1.2.1.tgz", +> "integrity": "sha512-6iBczoX+kDQ7a3+YJBnh3T+KZRxM/iYNPXicqk66/Qfm1b93iu+yOImkg0zHbj5LNOcNv1TEADiZ0xa34B4q6Q==", +> "license": "MIT", +> "dependencies": { +> "call-bound": "^1.0.2", +> "function.prototype.name": "^1.1.6", +> "has-tostringtag": "^1.0.2", +> "is-async-function": "^2.0.0", +> "is-date-object": "^1.1.0", +> "is-finalizationregistry": "^1.1.0", +> "is-generator-function": "^1.0.10", +> "is-regex": "^1.2.1", +> "is-weakref": "^1.0.2", +> "isarray": "^2.0.5", +> "which-boxed-primitive": "^1.1.0", +> "which-collection": "^1.0.2", +> "which-typed-array": "^1.1.16" +> }, +> "engines": { +> "node": ">= 0.4" +36712,36715c37339,37342 +< "version": "1.0.1", +< "resolved": "https://registry.npmjs.org/which-collection/-/which-collection-1.0.1.tgz", +< "integrity": "sha512-W8xeTUwaln8i3K/cY1nGXzdnVZlidBcagyNFtBdD5kxnb4TvGKR7FfSIS3mYpwWS1QUCutfKz8IY8RjftB0+1A==", +< "dev": true, +--- +> "version": "1.0.2", +> "resolved": "https://registry.npmjs.org/which-collection/-/which-collection-1.0.2.tgz", +> "integrity": "sha512-K4jVyjnBdgvc86Y6BkaLZEN933SwYOuBFkdmBu9ZfkcAbdVbpITnDmjvZ/aQjRXQrv5EPkTnD1s39GiiqbngCw==", +> "license": "MIT", +36717,36720c37344,37350 +< "is-map": "^2.0.1", +< "is-set": "^2.0.1", +< "is-weakmap": "^2.0.1", +< "is-weakset": "^2.0.1" +--- +> "is-map": "^2.0.3", +> "is-set": "^2.0.3", +> "is-weakmap": "^2.0.2", +> "is-weakset": "^2.0.3" +> }, +> "engines": { +> "node": ">= 0.4" +36733,36735c37363,37365 +< "version": "1.1.15", +< "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.15.tgz", +< "integrity": "sha512-oV0jmFtUky6CXfkqehVvBP/LSWJ2sy4vWMioiENyJLePrBO/yKyV9OyJySfAKosh+RYkIl5zJCNZ8/4JncrpdA==", +--- +> "version": "1.1.19", +> "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.19.tgz", +> "integrity": "sha512-rEvr90Bck4WZt9HHFC4DJMsjvu7x+r6bImz0/BrbWb7A2djJ8hnZMrWnHo9F8ssv0OMErasDhftrfROTyqSDrw==", +36739,36741c37369,37373 +< "call-bind": "^1.0.7", +< "for-each": "^0.3.3", +< "gopd": "^1.0.1", +--- +> "call-bind": "^1.0.8", +> "call-bound": "^1.0.4", +> "for-each": "^0.3.5", +> "get-proto": "^1.0.1", +> "gopd": "^1.2.0", +37217a37850 +> "license": "MIT", +37231,37236d37863 +< "node_modules/z-schema/node_modules/commander": { +< "version": "2.20.3", +< "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", +< "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==", +< "optional": true +< }, +37241,37250c37868,37870 +< "deprecated": "core-js@<3.4 is no longer maintained and not recommended for usage due to the number of issues. Because of the V8 engine whims, feature detection in old core-js versions could cause a slowdown up to 100x even if nothing is polyfilled. Please, upgrade your dependencies to the actual version of core-js.", +< "hasInstallScript": true +< }, +< "node_modules/z-schema/node_modules/validator": { +< "version": "10.11.0", +< "resolved": "https://registry.npmjs.org/validator/-/validator-10.11.0.tgz", +< "integrity": "sha512-X/p3UZerAIsbBfN/IwahhYaBbY68EN/UQBWHtsbXGT5bfrH/p4NQzUCG1kF/rtKaNpnJ7jAu6NGTdSNtyNIXMw==", +< "engines": { +< "node": ">= 0.10" +< } +--- +> "deprecated": "core-js@<3.23.3 is no longer maintained and not recommended for usage due to the number of issues. Because of the V8 engine whims, feature detection in old core-js versions could cause a slowdown up to 100x even if nothing is polyfilled. Some versions have web compatibility issues. Please, upgrade your dependencies to the actual version of core-js.", +> "hasInstallScript": true, +> "license": "MIT" +37358c37978 +< "dockerode": "^4.0.5", +--- +> "dockerode": "^4.0.7", +37364c37984 +< "tar-fs": "^3.0.4", +--- +> "tar-fs": "^3.0.9", +37425,37451d38044 +< "services/clsi/node_modules/@grpc/grpc-js": { +< "version": "1.13.2", +< "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.13.2.tgz", +< "integrity": "sha512-nnR5nmL6lxF8YBqb6gWvEgLdLh/Fn+kvAdX5hUOnt48sNSb0riz/93ASd2E5gvanPA41X6Yp25bIfGRp1SMb2g==", +< "license": "Apache-2.0", +< "dependencies": { +< "@grpc/proto-loader": "^0.7.13", +< "@js-sdsl/ordered-map": "^4.4.2" +< }, +< "engines": { +< "node": ">=12.10.0" +< } +< }, +< "services/clsi/node_modules/cpu-features": { +< "version": "0.0.10", +< "resolved": "https://registry.npmjs.org/cpu-features/-/cpu-features-0.0.10.tgz", +< "integrity": "sha512-9IkYqtX3YHPCzoVg1Py+o9057a3i0fp7S530UWokCSaFVTc7CwXPRiOjRjBQQ18ZCNafx78YfnG+HALxtVmOGA==", +< "hasInstallScript": true, +< "optional": true, +< "dependencies": { +< "buildcheck": "~0.0.6", +< "nan": "^2.19.0" +< }, +< "engines": { +< "node": ">=10.0.0" +< } +< }, +37461,37529d38053 +< "services/clsi/node_modules/docker-modem": { +< "version": "5.0.6", +< "resolved": "https://registry.npmjs.org/docker-modem/-/docker-modem-5.0.6.tgz", +< "integrity": "sha512-ens7BiayssQz/uAxGzH8zGXCtiV24rRWXdjNha5V4zSOcxmAZsfGVm/PPFbwQdqEkDnhG+SyR9E3zSHUbOKXBQ==", +< "license": "Apache-2.0", +< "dependencies": { +< "debug": "^4.1.1", +< "readable-stream": "^3.5.0", +< "split-ca": "^1.0.1", +< "ssh2": "^1.15.0" +< }, +< "engines": { +< "node": ">= 8.0" +< } +< }, +< "services/clsi/node_modules/dockerode": { +< "version": "4.0.5", +< "resolved": "https://registry.npmjs.org/dockerode/-/dockerode-4.0.5.tgz", +< "integrity": "sha512-ZPmKSr1k1571Mrh7oIBS/j0AqAccoecY2yH420ni5j1KyNMgnoTh4Nu4FWunh0HZIJmRSmSysJjBIpa/zyWUEA==", +< "license": "Apache-2.0", +< "dependencies": { +< "@balena/dockerignore": "^1.0.2", +< "@grpc/grpc-js": "^1.11.1", +< "@grpc/proto-loader": "^0.7.13", +< "docker-modem": "^5.0.6", +< "protobufjs": "^7.3.2", +< "tar-fs": "~2.1.2", +< "uuid": "^10.0.0" +< }, +< "engines": { +< "node": ">= 8.0" +< } +< }, +< "services/clsi/node_modules/dockerode/node_modules/tar-fs": { +< "version": "2.1.2", +< "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.2.tgz", +< "integrity": "sha512-EsaAXwxmx8UB7FRKqeozqEPop69DXcmYwTQwXvyAPF352HJsPdkVhvTaDPYqfNgruveJIJy3TA2l+2zj8LJIJA==", +< "license": "MIT", +< "dependencies": { +< "chownr": "^1.1.1", +< "mkdirp-classic": "^0.5.2", +< "pump": "^3.0.0", +< "tar-stream": "^2.1.4" +< } +< }, +< "services/clsi/node_modules/protobufjs": { +< "version": "7.4.0", +< "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-7.4.0.tgz", +< "integrity": "sha512-mRUWCc3KUU4w1jU8sGxICXH/gNS94DvI1gxqDvBzhj1JpcsimQkYiOJfwsPUykUI5ZaspFbSgmBLER8IrQ3tqw==", +< "hasInstallScript": true, +< "license": "BSD-3-Clause", +< "dependencies": { +< "@protobufjs/aspromise": "^1.1.2", +< "@protobufjs/base64": "^1.1.2", +< "@protobufjs/codegen": "^2.0.4", +< "@protobufjs/eventemitter": "^1.1.0", +< "@protobufjs/fetch": "^1.1.0", +< "@protobufjs/float": "^1.0.2", +< "@protobufjs/inquire": "^1.1.0", +< "@protobufjs/path": "^1.1.2", +< "@protobufjs/pool": "^1.1.0", +< "@protobufjs/utf8": "^1.1.0", +< "@types/node": ">=13.7.0", +< "long": "^5.0.0" +< }, +< "engines": { +< "node": ">=12.0.0" +< } +< }, +37549,37565d38072 +< "services/clsi/node_modules/ssh2": { +< "version": "1.16.0", +< "resolved": "https://registry.npmjs.org/ssh2/-/ssh2-1.16.0.tgz", +< "integrity": "sha512-r1X4KsBGedJqo7h8F5c4Ybpcr5RjyP+aWIG007uBPRjmdQWfEiVLzSK71Zji1B9sKxwaCvD8y8cwSkYrlLiRRg==", +< "hasInstallScript": true, +< "dependencies": { +< "asn1": "^0.2.6", +< "bcrypt-pbkdf": "^1.0.2" +< }, +< "engines": { +< "node": ">=10.16.0" +< }, +< "optionalDependencies": { +< "cpu-features": "~0.0.10", +< "nan": "^2.20.0" +< } +< }, +37578,37590d38084 +< "services/clsi/node_modules/uuid": { +< "version": "10.0.0", +< "resolved": "https://registry.npmjs.org/uuid/-/uuid-10.0.0.tgz", +< "integrity": "sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ==", +< "funding": [ +< "https://github.com/sponsors/broofa", +< "https://github.com/sponsors/ctavan" +< ], +< "license": "MIT", +< "bin": { +< "uuid": "dist/bin/uuid" +< } +< }, +38683c39177 +< "multer": "overleaf/multer#199c5ff05bd375c508f4074498237baead7f5148", +--- +> "multer": "github:overleaf/multer#4dbceda355efc3fc8ac3cf5c66c3778c8a6fdb23", +39602,39604c40096,40098 +< "version": "2.0.0", +< "resolved": "git+ssh://git@github.com/overleaf/multer.git#199c5ff05bd375c508f4074498237baead7f5148", +< "integrity": "sha512-S5MlIoOgrDr+a2jLS8z7jQlbzvZ0m30U2tRwdyLrxhnnMUQZYEzkVysEv10Dw41RTpM5bQQDs563Vzl1LLhxhQ==", +--- +> "version": "2.0.1", +> "resolved": "git+ssh://git@github.com/overleaf/multer.git#4dbceda355efc3fc8ac3cf5c66c3778c8a6fdb23", +> "integrity": "sha512-kkvPK48OQibR5vIoTQBbZp1uWVCvT9MrW3Y0mqdhFYJP/HVJujb4eSCEU0yj+hyf0Y+H/BKCmPdM4fJnzqAO4w==", +39608,39610c40102,40104 +< "busboy": "^1.0.0", +< "concat-stream": "^1.5.2", +< "mkdirp": "^0.5.4", +--- +> "busboy": "^1.6.0", +> "concat-stream": "^2.0.0", +> "mkdirp": "^0.5.6", +39612,39613c40106,40107 +< "type-is": "^1.6.4", +< "xtend": "^4.0.0" +--- +> "type-is": "^1.6.18", +> "xtend": "^4.0.2" diff --git a/server-ce/hotfix/5.5.1/pr_25168.patch b/server-ce/hotfix/5.5.1/pr_25168.patch new file mode 100644 index 0000000000..5d496d1f67 --- /dev/null +++ b/server-ce/hotfix/5.5.1/pr_25168.patch @@ -0,0 +1,19 @@ +--- a/services/history-v1/config/custom-environment-variables.json ++++ b/services/history-v1/config/custom-environment-variables.json +@@ -50,12 +50,14 @@ + "history": { + "host": "OVERLEAF_REDIS_HOST", + "password": "OVERLEAF_REDIS_PASS", +- "port": "OVERLEAF_REDIS_PORT" ++ "port": "OVERLEAF_REDIS_PORT", ++ "tls": "OVERLEAF_REDIS_TLS" + }, + "lock": { + "host": "OVERLEAF_REDIS_HOST", + "password": "OVERLEAF_REDIS_PASS", +- "port": "OVERLEAF_REDIS_PORT" ++ "port": "OVERLEAF_REDIS_PORT", ++ "tls": "OVERLEAF_REDIS_TLS" + } + } + } diff --git a/server-ce/hotfix/5.5.1/pr_26086.patch b/server-ce/hotfix/5.5.1/pr_26086.patch new file mode 100644 index 0000000000..fec417b3a5 --- /dev/null +++ b/server-ce/hotfix/5.5.1/pr_26086.patch @@ -0,0 +1,200 @@ +--- a/services/history-v1/api/controllers/project_import.js ++++ b/services/history-v1/api/controllers/project_import.js +@@ -35,6 +35,7 @@ async function importSnapshot(req, res) { + try { + snapshot = Snapshot.fromRaw(rawSnapshot) + } catch (err) { ++ logger.warn({ err, projectId }, 'failed to import snapshot') + return render.unprocessableEntity(res) + } + +@@ -43,6 +44,7 @@ async function importSnapshot(req, res) { + historyId = await chunkStore.initializeProject(projectId, snapshot) + } catch (err) { + if (err instanceof chunkStore.AlreadyInitialized) { ++ logger.warn({ err, projectId }, 'already initialized') + return render.conflict(res) + } else { + throw err +--- a/services/history-v1/api/controllers/projects.js ++++ b/services/history-v1/api/controllers/projects.js +@@ -34,6 +34,7 @@ async function initializeProject(req, res, next) { + res.status(HTTPStatus.OK).json({ projectId }) + } catch (err) { + if (err instanceof chunkStore.AlreadyInitialized) { ++ logger.warn({ err, projectId }, 'failed to initialize') + render.conflict(res) + } else { + throw err +@@ -242,11 +243,15 @@ async function createProjectBlob(req, res, next) { + const sizeLimit = new StreamSizeLimit(maxUploadSize) + await pipeline(req, sizeLimit, fs.createWriteStream(tmpPath)) + if (sizeLimit.sizeLimitExceeded) { ++ logger.warn( ++ { projectId, expectedHash, maxUploadSize }, ++ 'blob exceeds size threshold' ++ ) + return render.requestEntityTooLarge(res) + } + const hash = await blobHash.fromFile(tmpPath) + if (hash !== expectedHash) { +- logger.debug({ hash, expectedHash }, 'Hash mismatch') ++ logger.warn({ projectId, hash, expectedHash }, 'Hash mismatch') + return render.conflict(res, 'File hash mismatch') + } + +@@ -343,6 +348,10 @@ async function copyProjectBlob(req, res, next) { + targetBlobStore.getBlob(blobHash), + ]) + if (!sourceBlob) { ++ logger.warn( ++ { sourceProjectId, targetProjectId, blobHash }, ++ 'missing source blob when copying across projects' ++ ) + return render.notFound(res) + } + // Exit early if the blob exists in the target project. +--- a/services/history-v1/app.js ++++ b/services/history-v1/app.js +@@ -100,11 +100,13 @@ function setupErrorHandling() { + }) + } + if (err.code === 'ENUM_MISMATCH') { ++ logger.warn({ err, projectId }, err.message) + return res.status(HTTPStatus.UNPROCESSABLE_ENTITY).json({ + message: 'invalid enum value: ' + err.paramName, + }) + } + if (err.code === 'REQUIRED') { ++ logger.warn({ err, projectId }, err.message) + return res.status(HTTPStatus.UNPROCESSABLE_ENTITY).json({ + message: err.message, + }) +--- a/services/project-history/app/js/HistoryStoreManager.js ++++ b/services/project-history/app/js/HistoryStoreManager.js +@@ -35,7 +35,10 @@ class StringStream extends stream.Readable { + _mocks.getMostRecentChunk = (projectId, historyId, callback) => { + const path = `projects/${historyId}/latest/history` + logger.debug({ projectId, historyId }, 'getting chunk from history service') +- _requestChunk({ path, json: true }, callback) ++ _requestChunk({ path, json: true }, (err, chunk) => { ++ if (err) return callback(OError.tag(err)) ++ callback(null, chunk) ++ }) + } + + /** +@@ -54,7 +57,10 @@ export function getChunkAtVersion(projectId, historyId, version, callback) { + { projectId, historyId, version }, + 'getting chunk from history service for version' + ) +- _requestChunk({ path, json: true }, callback) ++ _requestChunk({ path, json: true }, (err, chunk) => { ++ if (err) return callback(OError.tag(err)) ++ callback(null, chunk) ++ }) + } + + export function getMostRecentVersion(projectId, historyId, callback) { +@@ -68,8 +74,10 @@ export function getMostRecentVersion(projectId, historyId, callback) { + _.sortBy(chunk.chunk.history.changes || [], x => x.timestamp) + ) + // find the latest project and doc versions in the chunk +- _getLatestProjectVersion(projectId, chunk, (err1, projectVersion) => ++ _getLatestProjectVersion(projectId, chunk, (err1, projectVersion) => { ++ if (err1) err1 = OError.tag(err1) + _getLatestV2DocVersions(projectId, chunk, (err2, v2DocVersions) => { ++ if (err2) err2 = OError.tag(err2) + // return the project and doc versions + const projectStructureAndDocVersions = { + project: projectVersion, +@@ -83,7 +91,7 @@ export function getMostRecentVersion(projectId, historyId, callback) { + chunk + ) + }) +- ) ++ }) + }) + } + +@@ -211,7 +219,10 @@ export function getProjectBlob(historyId, blobHash, callback) { + logger.debug({ historyId, blobHash }, 'getting blob from history service') + _requestHistoryService( + { path: `projects/${historyId}/blobs/${blobHash}` }, +- callback ++ (err, blob) => { ++ if (err) return callback(OError.tag(err)) ++ callback(null, blob) ++ } + ) + } + +@@ -277,7 +288,10 @@ function createBlobFromString(historyId, data, fileId, callback) { + (fsPath, cb) => { + _createBlob(historyId, fsPath, cb) + }, +- callback ++ (err, hash) => { ++ if (err) return callback(OError.tag(err)) ++ callback(null, hash) ++ } + ) + } + +@@ -330,7 +344,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) { + try { + ranges = HistoryBlobTranslator.createRangeBlobDataFromUpdate(update) + } catch (error) { +- return callback(error) ++ return callback(OError.tag(error)) + } + createBlobFromString( + historyId, +@@ -338,7 +352,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) { + `project-${projectId}-doc-${update.doc}`, + (err, fileHash) => { + if (err) { +- return callback(err) ++ return callback(OError.tag(err)) + } + if (ranges) { + createBlobFromString( +@@ -347,7 +361,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) { + `project-${projectId}-doc-${update.doc}-ranges`, + (err, rangesHash) => { + if (err) { +- return callback(err) ++ return callback(OError.tag(err)) + } + logger.debug( + { fileHash, rangesHash }, +@@ -415,7 +429,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) { + }, + (err, fileHash) => { + if (err) { +- return callback(err) ++ return callback(OError.tag(err)) + } + if (update.hash && update.hash !== fileHash) { + logger.warn( +@@ -447,7 +461,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) { + }, + (err, fileHash) => { + if (err) { +- return callback(err) ++ return callback(OError.tag(err)) + } + logger.debug({ fileHash }, 'created empty blob for file') + callback(null, { file: fileHash }) +@@ -520,7 +534,10 @@ export function initializeProject(historyId, callback) { + export function deleteProject(projectId, callback) { + _requestHistoryService( + { method: 'DELETE', path: `projects/${projectId}` }, +- callback ++ err => { ++ if (err) return callback(OError.tag(err)) ++ callback(null) ++ } + ) + } + diff --git a/server-ce/hotfix/5.5.1/pr_26091.patch b/server-ce/hotfix/5.5.1/pr_26091.patch new file mode 100644 index 0000000000..c88618b8d0 --- /dev/null +++ b/server-ce/hotfix/5.5.1/pr_26091.patch @@ -0,0 +1,60 @@ +--- a/services/web/modules/server-ce-scripts/scripts/check-mongodb.mjs ++++ b/services/web/modules/server-ce-scripts/scripts/check-mongodb.mjs +@@ -7,6 +7,7 @@ import { + const { ObjectId } = mongodb + + const MIN_MONGO_VERSION = [6, 0] ++const MIN_MONGO_FEATURE_COMPATIBILITY_VERSION = [6, 0] + + async function main() { + let mongoClient +@@ -18,6 +19,7 @@ async function main() { + } + + await checkMongoVersion(mongoClient) ++ await checkFeatureCompatibilityVersion(mongoClient) + + try { + await testTransactions(mongoClient) +@@ -53,6 +55,41 @@ async function checkMongoVersion(mongoClient) { + } + } + ++async function checkFeatureCompatibilityVersion(mongoClient) { ++ const { ++ featureCompatibilityVersion: { version }, ++ } = await mongoClient ++ .db() ++ .admin() ++ .command({ getParameter: 1, featureCompatibilityVersion: 1 }) ++ const [major, minor] = version.split('.').map(v => parseInt(v)) ++ const [minMajor, minMinor] = MIN_MONGO_FEATURE_COMPATIBILITY_VERSION ++ ++ if (major < minMajor || (major === minMajor && minor < minMinor)) { ++ const minVersion = MIN_MONGO_FEATURE_COMPATIBILITY_VERSION.join('.') ++ console.error(` ++The MongoDB server has featureCompatibilityVersion=${version}, but Overleaf requires at least version ${minVersion}. ++ ++Open a mongo shell: ++- Overleaf Toolkit deployments: $ bin/mongo ++- Legacy docker-compose.yml deployments: $ docker exec -it mongo mongosh localhost/sharelatex ++ ++In the mongo shell: ++> db.adminCommand( { setFeatureCompatibilityVersion: "${minMajor}.${minMinor}" } ) ++ ++Verify the new value: ++> db.adminCommand( { getParameter: 1, featureCompatibilityVersion: 1 } ) ++ ... ++ { ++ featureCompatibilityVersion: { version: ${minMajor}.${minMinor}' }, ++... ++ ++Aborting. ++`) ++ process.exit(1) ++ } ++} ++ + main() + .then(() => { + console.error('Mongodb is up.') diff --git a/server-ce/hotfix/5.5.1/pr_26152.patch b/server-ce/hotfix/5.5.1/pr_26152.patch new file mode 100644 index 0000000000..9dc5d50e28 --- /dev/null +++ b/server-ce/hotfix/5.5.1/pr_26152.patch @@ -0,0 +1,16 @@ +--- a/services/web/modules/server-ce-scripts/scripts/create-user.mjs ++++ b/services/web/modules/server-ce-scripts/scripts/create-user.mjs +@@ -48,3 +48,13 @@ Please visit the following URL to set a password for ${email} and log in: + ) + }) + } ++ ++if (filename === process.argv[1]) { ++ try { ++ await main() ++ process.exit(0) ++ } catch (error) { ++ console.error({ error }) ++ process.exit(1) ++ } ++} diff --git a/server-ce/test/Makefile b/server-ce/test/Makefile index 18f4446902..6c56b7e8fe 100644 --- a/server-ce/test/Makefile +++ b/server-ce/test/Makefile @@ -6,8 +6,8 @@ all: test-e2e # Notable the container labels com.docker.compose.project.working_dir and com.docker.compose.project.config_files need to match when creating containers from the docker host (how you started things) and from host-admin (how tests reconfigure the instance). export PWD = $(shell pwd) -export TEX_LIVE_DOCKER_IMAGE ?= gcr.io/overleaf-ops/texlive-full:2023.1 -export ALL_TEX_LIVE_DOCKER_IMAGES ?= gcr.io/overleaf-ops/texlive-full:2023.1,gcr.io/overleaf-ops/texlive-full:2022.1 +export TEX_LIVE_DOCKER_IMAGE ?= us-east1-docker.pkg.dev/overleaf-ops/ol-docker/texlive-full:2023.1 +export ALL_TEX_LIVE_DOCKER_IMAGES ?= us-east1-docker.pkg.dev/overleaf-ops/ol-docker/texlive-full:2023.1,us-east1-docker.pkg.dev/overleaf-ops/ol-docker/texlive-full:2022.1 export IMAGE_TAG_PRO ?= us-east1-docker.pkg.dev/overleaf-ops/ol-docker/pro:latest export CYPRESS_SHARD ?= export COMPOSE_PROJECT_NAME ?= test @@ -20,6 +20,7 @@ test-e2e-native: npm run cypress:open test-e2e: + docker compose build host-admin docker compose up --no-log-prefix --exit-code-from=e2e e2e test-e2e-open: @@ -45,7 +46,7 @@ prefetch_custom_compose_pull: prefetch_custom: prefetch_custom_texlive prefetch_custom_texlive: echo "$$ALL_TEX_LIVE_DOCKER_IMAGES" | tr ',' '\n' | xargs -I% \ - sh -exc 'tag=%; re_tag=quay.io/sharelatex/$${tag#*/*/}; docker pull $$tag; docker tag $$tag $$re_tag' + sh -exc 'tag=%; re_tag=quay.io/sharelatex/$${tag#*/*/*/}; docker pull $$tag; docker tag $$tag $$re_tag' prefetch_custom: prefetch_old prefetch_old: diff --git a/server-ce/test/admin.spec.ts b/server-ce/test/admin.spec.ts index 9031e21b68..50a89fb855 100644 --- a/server-ce/test/admin.spec.ts +++ b/server-ce/test/admin.spec.ts @@ -179,6 +179,21 @@ describe('admin panel', function () { cy.get('nav').findByText('Manage Users').click() }) + it('displays expected tabs', () => { + const tabs = ['Users', 'License Usage'] + cy.get('[role="tab"]').each((el, index) => { + cy.wrap(el).findByText(tabs[index]).click() + }) + cy.get('[role="tab"]').should('have.length', tabs.length) + }) + + it('license usage tab', () => { + cy.get('a').contains('License Usage').click() + cy.findByText( + 'An active user is one who has opened a project in this Server Pro instance in the last 12 months.' + ) + }) + describe('create users', () => { beforeEach(() => { cy.get('a').contains('New User').click() diff --git a/server-ce/test/docker-compose.yml b/server-ce/test/docker-compose.yml index 43f494a084..f4255e241b 100644 --- a/server-ce/test/docker-compose.yml +++ b/server-ce/test/docker-compose.yml @@ -131,7 +131,7 @@ services: saml: restart: always - image: gcr.io/overleaf-ops/saml-test + image: us-east1-docker.pkg.dev/overleaf-ops/ol-docker/saml-test environment: SAML_TEST_SP_ENTITY_ID: 'sharelatex-test-saml' SAML_BASE_URL_PATH: 'http://saml/simplesaml/' diff --git a/server-ce/test/git-bridge.spec.ts b/server-ce/test/git-bridge.spec.ts index 447f28bfd2..1f114574ac 100644 --- a/server-ce/test/git-bridge.spec.ts +++ b/server-ce/test/git-bridge.spec.ts @@ -107,7 +107,7 @@ describe('git-bridge', function () { cy.get('code').contains(`git clone ${gitURL(id.toString())}`) }) cy.findByText('Generate token').should('not.exist') - cy.findByText(/generate a new one in Account settings/i) + cy.findByText(/generate a new one in Account settings/) cy.findByText('Go to settings') .should('have.attr', 'target', '_blank') .and('have.attr', 'href', '/user/settings') diff --git a/server-ce/test/helpers/project.ts b/server-ce/test/helpers/project.ts index 8fb6aa2404..abcce3f9b2 100644 --- a/server-ce/test/helpers/project.ts +++ b/server-ce/test/helpers/project.ts @@ -37,7 +37,8 @@ export function createProject( } cy.findAllByRole('button').contains(newProjectButtonMatcher).click() // FIXME: This should only look in the left menu - cy.findAllByText(new RegExp(type, 'i')).first().click() + // The upgrading tests create projects in older versions of Server Pro which used different casing of the project type. Use case-insensitive match. + cy.findAllByText(type, { exact: false }).first().click() cy.findByRole('dialog').within(() => { cy.get('input').type(name) cy.findByText('Create').click() diff --git a/services/chat/docker-compose.ci.yml b/services/chat/docker-compose.ci.yml index 8fd86c1fbb..24b57ab084 100644 --- a/services/chat/docker-compose.ci.yml +++ b/services/chat/docker-compose.ci.yml @@ -24,10 +24,13 @@ services: MOCHA_GREP: ${MOCHA_GREP} NODE_ENV: test NODE_OPTIONS: "--unhandled-rejections=strict" + volumes: + - ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it depends_on: mongo: condition: service_started user: node + entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 -- command: npm run test:acceptance diff --git a/services/chat/docker-compose.yml b/services/chat/docker-compose.yml index 89a48339bd..43a30e8cc7 100644 --- a/services/chat/docker-compose.yml +++ b/services/chat/docker-compose.yml @@ -26,6 +26,7 @@ services: - .:/overleaf/services/chat - ../../node_modules:/overleaf/node_modules - ../../libraries:/overleaf/libraries + - ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it working_dir: /overleaf/services/chat environment: ELASTIC_SEARCH_DSN: es:9200 @@ -39,6 +40,7 @@ services: depends_on: mongo: condition: service_started + entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 -- command: npm run --silent test:acceptance mongo: diff --git a/services/clsi/README.md b/services/clsi/README.md index 16e40b8990..f1cf927d3d 100644 --- a/services/clsi/README.md +++ b/services/clsi/README.md @@ -19,18 +19,18 @@ The CLSI can be configured through the following environment variables: * `ALLOWED_IMAGES` - Space separated list of allowed Docker TeX Live images * `CATCH_ERRORS` - Set to `true` to log uncaught exceptions * `COMPILE_GROUP_DOCKER_CONFIGS` - JSON string of Docker configs for compile groups -* `COMPILES_HOST_DIR` - Working directory for LaTeX compiles -* `OUTPUT_HOST_DIR` - Output directory for LaTeX compiles +* `SANDBOXED_COMPILES` - Set to true to use sibling containers +* `SANDBOXED_COMPILES_HOST_DIR_COMPILES` - Working directory for LaTeX compiles +* `SANDBOXED_COMPILES_HOST_DIR_OUTPUT` - Output directory for LaTeX compiles * `COMPILE_SIZE_LIMIT` - Sets the body-parser [limit](https://github.com/expressjs/body-parser#limit) -* `DOCKER_RUNNER` - Set to true to use sibling containers * `DOCKER_RUNTIME` - * `FILESTORE_DOMAIN_OVERRIDE` - The url for the filestore service e.g.`http://$FILESTORE_HOST:3009` * `FILESTORE_PARALLEL_FILE_DOWNLOADS` - Number of parallel file downloads * `LISTEN_ADDRESS` - The address for the RESTful service to listen on. Set to `0.0.0.0` to listen on all network interfaces * `PROCESS_LIFE_SPAN_LIMIT_MS` - Process life span limit in milliseconds * `SMOKE_TEST` - Whether to run smoke tests -* `TEXLIVE_IMAGE` - The TeX Live Docker image to use for sibling containers, e.g. `gcr.io/overleaf-ops/texlive-full:2017.1` -* `TEX_LIVE_IMAGE_NAME_OVERRIDE` - The name of the registry for the Docker image e.g. `gcr.io/overleaf-ops` +* `TEXLIVE_IMAGE` - The TeX Live Docker image to use for sibling containers, e.g. `us-east1-docker.pkg.dev/overleaf-ops/ol-docker/texlive-full:2017.1` +* `TEX_LIVE_IMAGE_NAME_OVERRIDE` - The name of the registry for the Docker image e.g. `us-east1-docker.pkg.dev/overleaf-ops/ol-docker` * `TEXLIVE_IMAGE_USER` - When using sibling containers, the user to run as in the TeX Live image. Defaults to `tex` * `TEXLIVE_OPENOUT_ANY` - Sets the `openout_any` environment variable for TeX Live (see the `\openout` primitive [documentation](http://tug.org/texinfohtml/web2c.html#tex-invocation)) @@ -63,10 +63,10 @@ Then start the Docker container: docker run --rm \ -p 127.0.0.1:3013:3013 \ -e LISTEN_ADDRESS=0.0.0.0 \ - -e DOCKER_RUNNER=true \ + -e SANDBOXED_COMPILES=true \ -e TEXLIVE_IMAGE=texlive/texlive \ -e TEXLIVE_IMAGE_USER=root \ - -e COMPILES_HOST_DIR="$PWD/compiles" \ + -e SANDBOXED_COMPILES_HOST_DIR_COMPILES="$PWD/compiles" \ -v "$PWD/compiles:/overleaf/services/clsi/compiles" \ -v "$PWD/cache:/overleaf/services/clsi/cache" \ -v /var/run/docker.sock:/var/run/docker.sock \ diff --git a/services/clsi/buildscript.txt b/services/clsi/buildscript.txt index 709ade18c3..58975135d0 100644 --- a/services/clsi/buildscript.txt +++ b/services/clsi/buildscript.txt @@ -2,7 +2,7 @@ clsi --data-dirs=cache,compiles,output --dependencies= --docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker ---env-add=ENABLE_PDF_CACHING="true",PDF_CACHING_ENABLE_WORKER_POOL="true",ALLOWED_IMAGES=quay.io/sharelatex/texlive-full:2017.1,TEXLIVE_IMAGE=quay.io/sharelatex/texlive-full:2017.1,TEX_LIVE_IMAGE_NAME_OVERRIDE=us-east1-docker.pkg.dev/overleaf-ops/ol-docker,TEXLIVE_IMAGE_USER="tex",DOCKER_RUNNER="true",COMPILES_HOST_DIR=$PWD/compiles,OUTPUT_HOST_DIR=$PWD/output +--env-add=ENABLE_PDF_CACHING="true",PDF_CACHING_ENABLE_WORKER_POOL="true",ALLOWED_IMAGES=quay.io/sharelatex/texlive-full:2017.1,TEXLIVE_IMAGE=quay.io/sharelatex/texlive-full:2017.1,TEX_LIVE_IMAGE_NAME_OVERRIDE=us-east1-docker.pkg.dev/overleaf-ops/ol-docker,TEXLIVE_IMAGE_USER="tex",SANDBOXED_COMPILES="true",SANDBOXED_COMPILES_HOST_DIR_COMPILES=$PWD/compiles,SANDBOXED_COMPILES_HOST_DIR_OUTPUT=$PWD/output --env-pass-through= --esmock-loader=False --node-version=22.15.1 diff --git a/services/clsi/config/settings.defaults.js b/services/clsi/config/settings.defaults.js index d187fe273e..1d82258a8e 100644 --- a/services/clsi/config/settings.defaults.js +++ b/services/clsi/config/settings.defaults.js @@ -141,9 +141,11 @@ if ((process.env.DOCKER_RUNNER || process.env.SANDBOXED_COMPILES) === 'true') { let seccompProfilePath try { seccompProfilePath = Path.resolve(__dirname, '../seccomp/clsi-profile.json') - module.exports.clsi.docker.seccomp_profile = JSON.stringify( - JSON.parse(require('node:fs').readFileSync(seccompProfilePath)) - ) + module.exports.clsi.docker.seccomp_profile = + process.env.SECCOMP_PROFILE || + JSON.stringify( + JSON.parse(require('node:fs').readFileSync(seccompProfilePath)) + ) } catch (error) { console.error( error, diff --git a/services/clsi/docker-compose.ci.yml b/services/clsi/docker-compose.ci.yml index b6643008f7..77a45615b7 100644 --- a/services/clsi/docker-compose.ci.yml +++ b/services/clsi/docker-compose.ci.yml @@ -29,9 +29,9 @@ services: TEXLIVE_IMAGE: quay.io/sharelatex/texlive-full:2017.1 TEX_LIVE_IMAGE_NAME_OVERRIDE: us-east1-docker.pkg.dev/overleaf-ops/ol-docker TEXLIVE_IMAGE_USER: "tex" - DOCKER_RUNNER: "true" - COMPILES_HOST_DIR: $PWD/compiles - OUTPUT_HOST_DIR: $PWD/output + SANDBOXED_COMPILES: "true" + SANDBOXED_COMPILES_HOST_DIR_COMPILES: $PWD/compiles + SANDBOXED_COMPILES_HOST_DIR_OUTPUT: $PWD/output volumes: - ./compiles:/overleaf/services/clsi/compiles - /var/run/docker.sock:/var/run/docker.sock diff --git a/services/clsi/docker-compose.yml b/services/clsi/docker-compose.yml index e0f29ab09d..b8112a8e17 100644 --- a/services/clsi/docker-compose.yml +++ b/services/clsi/docker-compose.yml @@ -47,8 +47,8 @@ services: TEXLIVE_IMAGE: quay.io/sharelatex/texlive-full:2017.1 TEX_LIVE_IMAGE_NAME_OVERRIDE: us-east1-docker.pkg.dev/overleaf-ops/ol-docker TEXLIVE_IMAGE_USER: "tex" - DOCKER_RUNNER: "true" - COMPILES_HOST_DIR: $PWD/compiles - OUTPUT_HOST_DIR: $PWD/output + SANDBOXED_COMPILES: "true" + SANDBOXED_COMPILES_HOST_DIR_COMPILES: $PWD/compiles + SANDBOXED_COMPILES_HOST_DIR_OUTPUT: $PWD/output command: npm run --silent test:acceptance diff --git a/services/clsi/package.json b/services/clsi/package.json index 86566e0f59..b07430391a 100644 --- a/services/clsi/package.json +++ b/services/clsi/package.json @@ -27,13 +27,13 @@ "async": "^3.2.5", "body-parser": "^1.20.3", "bunyan": "^1.8.15", - "dockerode": "^4.0.5", + "dockerode": "^4.0.7", "express": "^4.21.2", "lodash": "^4.17.21", "p-limit": "^3.1.0", "request": "^2.88.2", "send": "^0.19.0", - "tar-fs": "^3.0.4", + "tar-fs": "^3.0.9", "workerpool": "^6.1.5" }, "devDependencies": { diff --git a/services/contacts/docker-compose.ci.yml b/services/contacts/docker-compose.ci.yml index 8fd86c1fbb..24b57ab084 100644 --- a/services/contacts/docker-compose.ci.yml +++ b/services/contacts/docker-compose.ci.yml @@ -24,10 +24,13 @@ services: MOCHA_GREP: ${MOCHA_GREP} NODE_ENV: test NODE_OPTIONS: "--unhandled-rejections=strict" + volumes: + - ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it depends_on: mongo: condition: service_started user: node + entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 -- command: npm run test:acceptance diff --git a/services/contacts/docker-compose.yml b/services/contacts/docker-compose.yml index 65e1a578cd..305232b55d 100644 --- a/services/contacts/docker-compose.yml +++ b/services/contacts/docker-compose.yml @@ -26,6 +26,7 @@ services: - .:/overleaf/services/contacts - ../../node_modules:/overleaf/node_modules - ../../libraries:/overleaf/libraries + - ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it working_dir: /overleaf/services/contacts environment: ELASTIC_SEARCH_DSN: es:9200 @@ -39,6 +40,7 @@ services: depends_on: mongo: condition: service_started + entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 -- command: npm run --silent test:acceptance mongo: diff --git a/services/contacts/package.json b/services/contacts/package.json index f81f947d6a..db707e55c9 100644 --- a/services/contacts/package.json +++ b/services/contacts/package.json @@ -6,9 +6,9 @@ "main": "app.js", "scripts": { "start": "node app.js", - "test:acceptance:_run": "LOG_LEVEL=fatal mocha --loader=esmock --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js", + "test:acceptance:_run": "mocha --loader=esmock --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js", "test:acceptance": "npm run test:acceptance:_run -- --grep=$MOCHA_GREP", - "test:unit:_run": "LOG_LEVEL=fatal mocha --loader=esmock --recursive --reporter spec $@ test/unit/js", + "test:unit:_run": "mocha --loader=esmock --recursive --reporter spec $@ test/unit/js", "test:unit": "npm run test:unit:_run -- --grep=$MOCHA_GREP", "nodemon": "node --watch app.js", "lint": "eslint --max-warnings 0 --format unix .", diff --git a/services/docstore/app.js b/services/docstore/app.js index 76659e8411..ef755c4bb1 100644 --- a/services/docstore/app.js +++ b/services/docstore/app.js @@ -50,6 +50,14 @@ app.param('doc_id', function (req, res, next, docId) { app.get('/project/:project_id/doc-deleted', HttpController.getAllDeletedDocs) app.get('/project/:project_id/doc', HttpController.getAllDocs) app.get('/project/:project_id/ranges', HttpController.getAllRanges) +app.get( + '/project/:project_id/comment-thread-ids', + HttpController.getCommentThreadIds +) +app.get( + '/project/:project_id/tracked-changes-user-ids', + HttpController.getTrackedChangesUserIds +) app.get('/project/:project_id/has-ranges', HttpController.projectHasRanges) app.get('/project/:project_id/doc/:doc_id', HttpController.getDoc) app.get('/project/:project_id/doc/:doc_id/deleted', HttpController.isDocDeleted) diff --git a/services/docstore/app/js/DocArchiveManager.js b/services/docstore/app/js/DocArchiveManager.js index 4390afe18f..d03ee161a8 100644 --- a/services/docstore/app/js/DocArchiveManager.js +++ b/services/docstore/app/js/DocArchiveManager.js @@ -1,5 +1,4 @@ -const { callbackify } = require('node:util') -const MongoManager = require('./MongoManager').promises +const MongoManager = require('./MongoManager') const Errors = require('./Errors') const logger = require('@overleaf/logger') const Settings = require('@overleaf/settings') @@ -8,29 +7,12 @@ const { ReadableString } = require('@overleaf/stream-utils') const RangeManager = require('./RangeManager') const PersistorManager = require('./PersistorManager') const pMap = require('p-map') -const { streamToBuffer } = require('./StreamToBuffer').promises +const { streamToBuffer } = require('./StreamToBuffer') const { BSON } = require('mongodb-legacy') const PARALLEL_JOBS = Settings.parallelArchiveJobs const UN_ARCHIVE_BATCH_SIZE = Settings.unArchiveBatchSize -module.exports = { - archiveAllDocs: callbackify(archiveAllDocs), - archiveDoc: callbackify(archiveDoc), - unArchiveAllDocs: callbackify(unArchiveAllDocs), - unarchiveDoc: callbackify(unarchiveDoc), - destroyProject: callbackify(destroyProject), - getDoc: callbackify(getDoc), - promises: { - archiveAllDocs, - archiveDoc, - unArchiveAllDocs, - unarchiveDoc, - destroyProject, - getDoc, - }, -} - async function archiveAllDocs(projectId) { if (!_isArchivingEnabled()) { return @@ -62,6 +44,8 @@ async function archiveDoc(projectId, docId) { throw new Error('doc has no lines') } + RangeManager.fixCommentIds(doc) + // warn about any oversized docs already in mongo const linesSize = BSON.calculateObjectSize(doc.lines || {}) const rangesSize = BSON.calculateObjectSize(doc.ranges || {}) @@ -225,3 +209,12 @@ function _isArchivingEnabled() { return true } + +module.exports = { + archiveAllDocs, + archiveDoc, + unArchiveAllDocs, + unarchiveDoc, + destroyProject, + getDoc, +} diff --git a/services/docstore/app/js/DocManager.js b/services/docstore/app/js/DocManager.js index a9ed99425c..c9e8dadc2c 100644 --- a/services/docstore/app/js/DocManager.js +++ b/services/docstore/app/js/DocManager.js @@ -5,7 +5,6 @@ const _ = require('lodash') const DocArchive = require('./DocArchiveManager') const RangeManager = require('./RangeManager') const Settings = require('@overleaf/settings') -const { callbackifyAll } = require('@overleaf/promise-utils') const { setTimeout } = require('node:timers/promises') /** @@ -29,7 +28,7 @@ const DocManager = { throw new Error('must include inS3 when getting doc') } - const doc = await MongoManager.promises.findDoc(projectId, docId, filter) + const doc = await MongoManager.findDoc(projectId, docId, filter) if (doc == null) { throw new Errors.NotFoundError( @@ -38,15 +37,19 @@ const DocManager = { } if (doc.inS3) { - await DocArchive.promises.unarchiveDoc(projectId, docId) + await DocArchive.unarchiveDoc(projectId, docId) return await DocManager._getDoc(projectId, docId, filter) } + if (filter.ranges) { + RangeManager.fixCommentIds(doc) + } + return doc }, async isDocDeleted(projectId, docId) { - const doc = await MongoManager.promises.findDoc(projectId, docId, { + const doc = await MongoManager.findDoc(projectId, docId, { deleted: true, }) @@ -74,7 +77,7 @@ const DocManager = { // returns the doc without any version information async _peekRawDoc(projectId, docId) { - const doc = await MongoManager.promises.findDoc(projectId, docId, { + const doc = await MongoManager.findDoc(projectId, docId, { lines: true, rev: true, deleted: true, @@ -91,7 +94,7 @@ const DocManager = { if (doc.inS3) { // skip the unarchiving to mongo when getting a doc - const archivedDoc = await DocArchive.promises.getDoc(projectId, docId) + const archivedDoc = await DocArchive.getDoc(projectId, docId) Object.assign(doc, archivedDoc) } @@ -102,7 +105,7 @@ const DocManager = { // without unarchiving it (avoids unnecessary writes to mongo) async peekDoc(projectId, docId) { const doc = await DocManager._peekRawDoc(projectId, docId) - await MongoManager.promises.checkRevUnchanged(doc) + await MongoManager.checkRevUnchanged(doc) return doc }, @@ -111,16 +114,18 @@ const DocManager = { lines: true, inS3: true, }) - return doc + if (!doc) throw new Errors.NotFoundError() + if (!Array.isArray(doc.lines)) throw new Errors.DocWithoutLinesError() + return doc.lines.join('\n') }, async getAllDeletedDocs(projectId, filter) { - return await MongoManager.promises.getProjectsDeletedDocs(projectId, filter) + return await MongoManager.getProjectsDeletedDocs(projectId, filter) }, async getAllNonDeletedDocs(projectId, filter) { - await DocArchive.promises.unArchiveAllDocs(projectId) - const docs = await MongoManager.promises.getProjectsDocs( + await DocArchive.unArchiveAllDocs(projectId) + const docs = await MongoManager.getProjectsDocs( projectId, { include_deleted: false }, filter @@ -128,15 +133,46 @@ const DocManager = { if (docs == null) { throw new Errors.NotFoundError(`No docs for project ${projectId}`) } + if (filter.ranges) { + for (const doc of docs) { + RangeManager.fixCommentIds(doc) + } + } return docs }, + async getCommentThreadIds(projectId) { + const docs = await DocManager.getAllNonDeletedDocs(projectId, { + _id: true, + ranges: true, + }) + const byDoc = new Map() + for (const doc of docs) { + const ids = new Set() + for (const comment of doc.ranges?.comments || []) { + ids.add(comment.op.t) + } + if (ids.size > 0) byDoc.set(doc._id.toString(), Array.from(ids)) + } + return Object.fromEntries(byDoc.entries()) + }, + + async getTrackedChangesUserIds(projectId) { + const docs = await DocManager.getAllNonDeletedDocs(projectId, { + ranges: true, + }) + const userIds = new Set() + for (const doc of docs) { + for (const change of doc.ranges?.changes || []) { + if (change.metadata.user_id === 'anonymous-user') continue + userIds.add(change.metadata.user_id) + } + } + return Array.from(userIds) + }, + async projectHasRanges(projectId) { - const docs = await MongoManager.promises.getProjectsDocs( - projectId, - {}, - { _id: 1 } - ) + const docs = await MongoManager.getProjectsDocs(projectId, {}, { _id: 1 }) const docIds = docs.map(doc => doc._id) for (const docId of docIds) { const doc = await DocManager.peekDoc(projectId, docId) @@ -247,7 +283,7 @@ const DocManager = { } modified = true - await MongoManager.promises.upsertIntoDocCollection( + await MongoManager.upsertIntoDocCollection( projectId, docId, doc?.rev, @@ -262,11 +298,7 @@ const DocManager = { async patchDoc(projectId, docId, meta) { const projection = { _id: 1, deleted: true } - const doc = await MongoManager.promises.findDoc( - projectId, - docId, - projection - ) + const doc = await MongoManager.findDoc(projectId, docId, projection) if (!doc) { throw new Errors.NotFoundError( `No such project/doc to delete: ${projectId}/${docId}` @@ -275,7 +307,7 @@ const DocManager = { if (meta.deleted && Settings.docstore.archiveOnSoftDelete) { // The user will not read this doc anytime soon. Flush it out of mongo. - DocArchive.promises.archiveDoc(projectId, docId).catch(err => { + DocArchive.archiveDoc(projectId, docId).catch(err => { logger.warn( { projectId, docId, err }, 'archiving a single doc in the background failed' @@ -283,15 +315,8 @@ const DocManager = { }) } - await MongoManager.promises.patchDoc(projectId, docId, meta) + await MongoManager.patchDoc(projectId, docId, meta) }, } -module.exports = { - ...callbackifyAll(DocManager, { - multiResult: { - updateDoc: ['modified', 'rev'], - }, - }), - promises: DocManager, -} +module.exports = DocManager diff --git a/services/docstore/app/js/Errors.js b/services/docstore/app/js/Errors.js index bbdbe75c08..7b150cc0db 100644 --- a/services/docstore/app/js/Errors.js +++ b/services/docstore/app/js/Errors.js @@ -10,10 +10,13 @@ class DocRevValueError extends OError {} class DocVersionDecrementedError extends OError {} +class DocWithoutLinesError extends OError {} + module.exports = { Md5MismatchError, DocModifiedError, DocRevValueError, DocVersionDecrementedError, + DocWithoutLinesError, ...Errors, } diff --git a/services/docstore/app/js/HealthChecker.js b/services/docstore/app/js/HealthChecker.js index 34cd5c973c..a5b7ad7e9a 100644 --- a/services/docstore/app/js/HealthChecker.js +++ b/services/docstore/app/js/HealthChecker.js @@ -1,67 +1,35 @@ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ const { db, ObjectId } = require('./mongodb') -const request = require('request') -const async = require('async') const _ = require('lodash') const crypto = require('node:crypto') const settings = require('@overleaf/settings') const { port } = settings.internal.docstore const logger = require('@overleaf/logger') +const { fetchNothing, fetchJson } = require('@overleaf/fetch-utils') -module.exports = { - check(callback) { - const docId = new ObjectId() - const projectId = new ObjectId(settings.docstore.healthCheck.project_id) - const url = `http://127.0.0.1:${port}/project/${projectId}/doc/${docId}` - const lines = [ - 'smoke test - delete me', - `${crypto.randomBytes(32).toString('hex')}`, - ] - const getOpts = () => ({ - url, - timeout: 3000, +async function check() { + const docId = new ObjectId() + const projectId = new ObjectId(settings.docstore.healthCheck.project_id) + const url = `http://127.0.0.1:${port}/project/${projectId}/doc/${docId}` + const lines = [ + 'smoke test - delete me', + `${crypto.randomBytes(32).toString('hex')}`, + ] + logger.debug({ lines, url, docId, projectId }, 'running health check') + let body + try { + await fetchNothing(url, { + method: 'POST', + json: { lines, version: 42, ranges: {} }, + signal: AbortSignal.timeout(3_000), }) - logger.debug({ lines, url, docId, projectId }, 'running health check') - const jobs = [ - function (cb) { - const opts = getOpts() - opts.json = { lines, version: 42, ranges: {} } - return request.post(opts, cb) - }, - function (cb) { - const opts = getOpts() - opts.json = true - return request.get(opts, function (err, res, body) { - if (err != null) { - logger.err({ err }, 'docstore returned a error in health check get') - return cb(err) - } else if (res == null) { - return cb(new Error('no response from docstore with get check')) - } else if ((res != null ? res.statusCode : undefined) !== 200) { - return cb(new Error(`status code not 200, its ${res.statusCode}`)) - } else if ( - _.isEqual(body != null ? body.lines : undefined, lines) && - (body != null ? body._id : undefined) === docId.toString() - ) { - return cb() - } else { - return cb( - new Error( - `health check lines not equal ${body.lines} != ${lines}` - ) - ) - } - }) - }, - cb => db.docs.deleteOne({ _id: docId, project_id: projectId }, cb), - ] - return async.series(jobs, callback) - }, + body = await fetchJson(url, { signal: AbortSignal.timeout(3_000) }) + } finally { + await db.docs.deleteOne({ _id: docId, project_id: projectId }) + } + if (!_.isEqual(body?.lines, lines)) { + throw new Error(`health check lines not equal ${body.lines} != ${lines}`) + } +} +module.exports = { + check, } diff --git a/services/docstore/app/js/HttpController.js b/services/docstore/app/js/HttpController.js index 1c4e137033..50c4302aeb 100644 --- a/services/docstore/app/js/HttpController.js +++ b/services/docstore/app/js/HttpController.js @@ -4,143 +4,104 @@ const DocArchive = require('./DocArchiveManager') const HealthChecker = require('./HealthChecker') const Errors = require('./Errors') const Settings = require('@overleaf/settings') +const { expressify } = require('@overleaf/promise-utils') -function getDoc(req, res, next) { +async function getDoc(req, res) { const { doc_id: docId, project_id: projectId } = req.params const includeDeleted = req.query.include_deleted === 'true' logger.debug({ projectId, docId }, 'getting doc') - DocManager.getFullDoc(projectId, docId, function (error, doc) { - if (error) { - return next(error) - } - logger.debug({ docId, projectId }, 'got doc') - if (doc == null) { - res.sendStatus(404) - } else if (doc.deleted && !includeDeleted) { - res.sendStatus(404) - } else { - res.json(_buildDocView(doc)) - } - }) + const doc = await DocManager.getFullDoc(projectId, docId) + logger.debug({ docId, projectId }, 'got doc') + if (doc.deleted && !includeDeleted) { + res.sendStatus(404) + } else { + res.json(_buildDocView(doc)) + } } -function peekDoc(req, res, next) { +async function peekDoc(req, res) { const { doc_id: docId, project_id: projectId } = req.params logger.debug({ projectId, docId }, 'peeking doc') - DocManager.peekDoc(projectId, docId, function (error, doc) { - if (error) { - return next(error) - } - if (doc == null) { - res.sendStatus(404) - } else { - res.setHeader('x-doc-status', doc.inS3 ? 'archived' : 'active') - res.json(_buildDocView(doc)) - } - }) + const doc = await DocManager.peekDoc(projectId, docId) + res.setHeader('x-doc-status', doc.inS3 ? 'archived' : 'active') + res.json(_buildDocView(doc)) } -function isDocDeleted(req, res, next) { +async function isDocDeleted(req, res) { const { doc_id: docId, project_id: projectId } = req.params - DocManager.isDocDeleted(projectId, docId, function (error, deleted) { - if (error) { - return next(error) - } - res.json({ deleted }) - }) + const deleted = await DocManager.isDocDeleted(projectId, docId) + res.json({ deleted }) } -function getRawDoc(req, res, next) { +async function getRawDoc(req, res) { const { doc_id: docId, project_id: projectId } = req.params logger.debug({ projectId, docId }, 'getting raw doc') - DocManager.getDocLines(projectId, docId, function (error, doc) { - if (error) { - return next(error) - } - if (doc == null) { - res.sendStatus(404) - } else { - res.setHeader('content-type', 'text/plain') - res.send(_buildRawDocView(doc)) - } - }) + const content = await DocManager.getDocLines(projectId, docId) + res.setHeader('content-type', 'text/plain') + res.send(content) } -function getAllDocs(req, res, next) { +async function getAllDocs(req, res) { const { project_id: projectId } = req.params logger.debug({ projectId }, 'getting all docs') - DocManager.getAllNonDeletedDocs( - projectId, - { lines: true, rev: true }, - function (error, docs) { - if (docs == null) { - docs = [] - } - if (error) { - return next(error) - } - const docViews = _buildDocsArrayView(projectId, docs) - for (const docView of docViews) { - if (!docView.lines) { - logger.warn({ projectId, docId: docView._id }, 'missing doc lines') - docView.lines = [] - } - } - res.json(docViews) + const docs = await DocManager.getAllNonDeletedDocs(projectId, { + lines: true, + rev: true, + }) + const docViews = _buildDocsArrayView(projectId, docs) + for (const docView of docViews) { + if (!docView.lines) { + logger.warn({ projectId, docId: docView._id }, 'missing doc lines') + docView.lines = [] } - ) + } + res.json(docViews) } -function getAllDeletedDocs(req, res, next) { +async function getAllDeletedDocs(req, res) { const { project_id: projectId } = req.params logger.debug({ projectId }, 'getting all deleted docs') - DocManager.getAllDeletedDocs( - projectId, - { name: true, deletedAt: true }, - function (error, docs) { - if (error) { - return next(error) - } - res.json( - docs.map(doc => ({ - _id: doc._id.toString(), - name: doc.name, - deletedAt: doc.deletedAt, - })) - ) - } + const docs = await DocManager.getAllDeletedDocs(projectId, { + name: true, + deletedAt: true, + }) + res.json( + docs.map(doc => ({ + _id: doc._id.toString(), + name: doc.name, + deletedAt: doc.deletedAt, + })) ) } -function getAllRanges(req, res, next) { +async function getAllRanges(req, res) { const { project_id: projectId } = req.params logger.debug({ projectId }, 'getting all ranges') - DocManager.getAllNonDeletedDocs( - projectId, - { ranges: true }, - function (error, docs) { - if (docs == null) { - docs = [] - } - if (error) { - return next(error) - } - res.json(_buildDocsArrayView(projectId, docs)) - } - ) -} - -function projectHasRanges(req, res, next) { - const { project_id: projectId } = req.params - DocManager.projectHasRanges(projectId, (err, projectHasRanges) => { - if (err) { - return next(err) - } - res.json({ projectHasRanges }) + const docs = await DocManager.getAllNonDeletedDocs(projectId, { + ranges: true, }) + res.json(_buildDocsArrayView(projectId, docs)) } -function updateDoc(req, res, next) { +async function getCommentThreadIds(req, res) { + const { project_id: projectId } = req.params + const threadIds = await DocManager.getCommentThreadIds(projectId) + res.json(threadIds) +} + +async function getTrackedChangesUserIds(req, res) { + const { project_id: projectId } = req.params + const userIds = await DocManager.getTrackedChangesUserIds(projectId) + res.json(userIds) +} + +async function projectHasRanges(req, res) { + const { project_id: projectId } = req.params + const projectHasRanges = await DocManager.projectHasRanges(projectId) + res.json({ projectHasRanges }) +} + +async function updateDoc(req, res) { const { doc_id: docId, project_id: projectId } = req.params const lines = req.body?.lines const version = req.body?.version @@ -172,25 +133,20 @@ function updateDoc(req, res, next) { } logger.debug({ projectId, docId }, 'got http request to update doc') - DocManager.updateDoc( + const { modified, rev } = await DocManager.updateDoc( projectId, docId, lines, version, - ranges, - function (error, modified, rev) { - if (error) { - return next(error) - } - res.json({ - modified, - rev, - }) - } + ranges ) + res.json({ + modified, + rev, + }) } -function patchDoc(req, res, next) { +async function patchDoc(req, res) { const { doc_id: docId, project_id: projectId } = req.params logger.debug({ projectId, docId }, 'patching doc') @@ -203,12 +159,8 @@ function patchDoc(req, res, next) { logger.fatal({ field }, 'joi validation for pathDoc is broken') } }) - DocManager.patchDoc(projectId, docId, meta, function (error) { - if (error) { - return next(error) - } - res.sendStatus(204) - }) + await DocManager.patchDoc(projectId, docId, meta) + res.sendStatus(204) } function _buildDocView(doc) { @@ -221,10 +173,6 @@ function _buildDocView(doc) { return docView } -function _buildRawDocView(doc) { - return (doc?.lines ?? []).join('\n') -} - function _buildDocsArrayView(projectId, docs) { const docViews = [] for (const doc of docs) { @@ -241,79 +189,69 @@ function _buildDocsArrayView(projectId, docs) { return docViews } -function archiveAllDocs(req, res, next) { +async function archiveAllDocs(req, res) { const { project_id: projectId } = req.params logger.debug({ projectId }, 'archiving all docs') - DocArchive.archiveAllDocs(projectId, function (error) { - if (error) { - return next(error) - } - res.sendStatus(204) - }) + await DocArchive.archiveAllDocs(projectId) + res.sendStatus(204) } -function archiveDoc(req, res, next) { +async function archiveDoc(req, res) { const { doc_id: docId, project_id: projectId } = req.params logger.debug({ projectId, docId }, 'archiving a doc') - DocArchive.archiveDoc(projectId, docId, function (error) { - if (error) { - return next(error) - } - res.sendStatus(204) - }) + await DocArchive.archiveDoc(projectId, docId) + res.sendStatus(204) } -function unArchiveAllDocs(req, res, next) { +async function unArchiveAllDocs(req, res) { const { project_id: projectId } = req.params logger.debug({ projectId }, 'unarchiving all docs') - DocArchive.unArchiveAllDocs(projectId, function (err) { - if (err) { - if (err instanceof Errors.DocRevValueError) { - logger.warn({ err }, 'Failed to unarchive doc') - return res.sendStatus(409) - } - return next(err) + try { + await DocArchive.unArchiveAllDocs(projectId) + } catch (err) { + if (err instanceof Errors.DocRevValueError) { + logger.warn({ err }, 'Failed to unarchive doc') + return res.sendStatus(409) } - res.sendStatus(200) - }) + throw err + } + res.sendStatus(200) } -function destroyProject(req, res, next) { +async function destroyProject(req, res) { const { project_id: projectId } = req.params logger.debug({ projectId }, 'destroying all docs') - DocArchive.destroyProject(projectId, function (error) { - if (error) { - return next(error) - } - res.sendStatus(204) - }) + await DocArchive.destroyProject(projectId) + res.sendStatus(204) } -function healthCheck(req, res) { - HealthChecker.check(function (err) { - if (err) { - logger.err({ err }, 'error performing health check') - res.sendStatus(500) - } else { - res.sendStatus(200) - } - }) +async function healthCheck(req, res) { + try { + await HealthChecker.check() + } catch (err) { + logger.err({ err }, 'error performing health check') + res.sendStatus(500) + return + } + res.sendStatus(200) } module.exports = { - getDoc, - peekDoc, - isDocDeleted, - getRawDoc, - getAllDocs, - getAllDeletedDocs, - getAllRanges, - projectHasRanges, - updateDoc, - patchDoc, - archiveAllDocs, - archiveDoc, - unArchiveAllDocs, - destroyProject, - healthCheck, + getDoc: expressify(getDoc), + peekDoc: expressify(peekDoc), + isDocDeleted: expressify(isDocDeleted), + getRawDoc: expressify(getRawDoc), + getAllDocs: expressify(getAllDocs), + getAllDeletedDocs: expressify(getAllDeletedDocs), + getAllRanges: expressify(getAllRanges), + getTrackedChangesUserIds: expressify(getTrackedChangesUserIds), + getCommentThreadIds: expressify(getCommentThreadIds), + projectHasRanges: expressify(projectHasRanges), + updateDoc: expressify(updateDoc), + patchDoc: expressify(patchDoc), + archiveAllDocs: expressify(archiveAllDocs), + archiveDoc: expressify(archiveDoc), + unArchiveAllDocs: expressify(unArchiveAllDocs), + destroyProject: expressify(destroyProject), + healthCheck: expressify(healthCheck), } diff --git a/services/docstore/app/js/MongoManager.js b/services/docstore/app/js/MongoManager.js index ad1a2d2b40..ef101f91c0 100644 --- a/services/docstore/app/js/MongoManager.js +++ b/services/docstore/app/js/MongoManager.js @@ -1,7 +1,6 @@ const { db, ObjectId } = require('./mongodb') const Settings = require('@overleaf/settings') const Errors = require('./Errors') -const { callbackify } = require('node:util') const ARCHIVING_LOCK_DURATION_MS = Settings.archivingLockDurationMs @@ -241,34 +240,17 @@ async function destroyProject(projectId) { } module.exports = { - findDoc: callbackify(findDoc), - getProjectsDeletedDocs: callbackify(getProjectsDeletedDocs), - getProjectsDocs: callbackify(getProjectsDocs), - getArchivedProjectDocs: callbackify(getArchivedProjectDocs), - getNonArchivedProjectDocIds: callbackify(getNonArchivedProjectDocIds), - getNonDeletedArchivedProjectDocs: callbackify( - getNonDeletedArchivedProjectDocs - ), - upsertIntoDocCollection: callbackify(upsertIntoDocCollection), - restoreArchivedDoc: callbackify(restoreArchivedDoc), - patchDoc: callbackify(patchDoc), - getDocForArchiving: callbackify(getDocForArchiving), - markDocAsArchived: callbackify(markDocAsArchived), - checkRevUnchanged: callbackify(checkRevUnchanged), - destroyProject: callbackify(destroyProject), - promises: { - findDoc, - getProjectsDeletedDocs, - getProjectsDocs, - getArchivedProjectDocs, - getNonArchivedProjectDocIds, - getNonDeletedArchivedProjectDocs, - upsertIntoDocCollection, - restoreArchivedDoc, - patchDoc, - getDocForArchiving, - markDocAsArchived, - checkRevUnchanged, - destroyProject, - }, + findDoc, + getProjectsDeletedDocs, + getProjectsDocs, + getArchivedProjectDocs, + getNonArchivedProjectDocIds, + getNonDeletedArchivedProjectDocs, + upsertIntoDocCollection, + restoreArchivedDoc, + patchDoc, + getDocForArchiving, + markDocAsArchived, + checkRevUnchanged, + destroyProject, } diff --git a/services/docstore/app/js/RangeManager.js b/services/docstore/app/js/RangeManager.js index f36f68fe35..2fbadf9468 100644 --- a/services/docstore/app/js/RangeManager.js +++ b/services/docstore/app/js/RangeManager.js @@ -49,15 +49,25 @@ module.exports = RangeManager = { updateMetadata(change.metadata) } for (const comment of Array.from(ranges.comments || [])) { - comment.id = RangeManager._safeObjectId(comment.id) - if ((comment.op != null ? comment.op.t : undefined) != null) { - comment.op.t = RangeManager._safeObjectId(comment.op.t) - } + // Two bugs resulted in mismatched ids, prefer the thread id from the op: https://github.com/overleaf/internal/issues/23272 + comment.id = RangeManager._safeObjectId(comment.op?.t || comment.id) + if (comment.op) comment.op.t = comment.id + + // resolved property is added to comments when they are obtained from history, but this state doesn't belong in mongo docs collection + // more info: https://github.com/overleaf/internal/issues/24371#issuecomment-2913095174 + delete comment.op?.resolved updateMetadata(comment.metadata) } return ranges }, + fixCommentIds(doc) { + for (const comment of doc?.ranges?.comments || []) { + // Two bugs resulted in mismatched ids, prefer the thread id from the op: https://github.com/overleaf/internal/issues/23272 + if (comment.op?.t) comment.id = comment.op.t + } + }, + _safeObjectId(data) { try { return new ObjectId(data) diff --git a/services/docstore/app/js/StreamToBuffer.js b/services/docstore/app/js/StreamToBuffer.js index 7de146cd11..09215a7367 100644 --- a/services/docstore/app/js/StreamToBuffer.js +++ b/services/docstore/app/js/StreamToBuffer.js @@ -2,13 +2,9 @@ const { LoggerStream, WritableBuffer } = require('@overleaf/stream-utils') const Settings = require('@overleaf/settings') const logger = require('@overleaf/logger/logging-manager') const { pipeline } = require('node:stream/promises') -const { callbackify } = require('node:util') module.exports = { - streamToBuffer: callbackify(streamToBuffer), - promises: { - streamToBuffer, - }, + streamToBuffer, } async function streamToBuffer(projectId, docId, stream) { diff --git a/services/docstore/docker-compose.ci.yml b/services/docstore/docker-compose.ci.yml index ff222f6514..40decc4aea 100644 --- a/services/docstore/docker-compose.ci.yml +++ b/services/docstore/docker-compose.ci.yml @@ -27,12 +27,15 @@ services: MOCHA_GREP: ${MOCHA_GREP} NODE_ENV: test NODE_OPTIONS: "--unhandled-rejections=strict" + volumes: + - ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it depends_on: mongo: condition: service_started gcs: condition: service_healthy user: node + entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 -- command: npm run test:acceptance diff --git a/services/docstore/docker-compose.yml b/services/docstore/docker-compose.yml index 4a4fa2f10c..a58b862b9a 100644 --- a/services/docstore/docker-compose.yml +++ b/services/docstore/docker-compose.yml @@ -26,6 +26,7 @@ services: - .:/overleaf/services/docstore - ../../node_modules:/overleaf/node_modules - ../../libraries:/overleaf/libraries + - ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it working_dir: /overleaf/services/docstore environment: ELASTIC_SEARCH_DSN: es:9200 @@ -44,6 +45,7 @@ services: condition: service_started gcs: condition: service_healthy + entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 -- command: npm run --silent test:acceptance mongo: diff --git a/services/docstore/package.json b/services/docstore/package.json index e505f731d3..bf5857fd49 100644 --- a/services/docstore/package.json +++ b/services/docstore/package.json @@ -17,6 +17,7 @@ "types:check": "tsc --noEmit" }, "dependencies": { + "@overleaf/fetch-utils": "*", "@overleaf/logger": "*", "@overleaf/metrics": "*", "@overleaf/o-error": "*", diff --git a/services/docstore/test/acceptance/js/ArchiveDocsTests.js b/services/docstore/test/acceptance/js/ArchiveDocsTests.js index d9228103b6..7e254c7e84 100644 --- a/services/docstore/test/acceptance/js/ArchiveDocsTests.js +++ b/services/docstore/test/acceptance/js/ArchiveDocsTests.js @@ -1001,6 +1001,15 @@ describe('Archiving', function () { }, version: 2, } + this.fixedRanges = { + ...this.doc.ranges, + comments: [ + { + ...this.doc.ranges.comments[0], + id: this.doc.ranges.comments[0].op.t, + }, + ], + } return DocstoreClient.createDoc( this.project_id, this.doc._id, @@ -1048,7 +1057,7 @@ describe('Archiving', function () { throw error } s3Doc.lines.should.deep.equal(this.doc.lines) - const ranges = JSON.parse(JSON.stringify(this.doc.ranges)) // ObjectId -> String + const ranges = JSON.parse(JSON.stringify(this.fixedRanges)) // ObjectId -> String s3Doc.ranges.should.deep.equal(ranges) return done() } @@ -1075,7 +1084,7 @@ describe('Archiving', function () { throw error } doc.lines.should.deep.equal(this.doc.lines) - doc.ranges.should.deep.equal(this.doc.ranges) + doc.ranges.should.deep.equal(this.fixedRanges) expect(doc.inS3).not.to.exist return done() }) diff --git a/services/docstore/test/acceptance/js/GettingAllDocsTests.js b/services/docstore/test/acceptance/js/GettingAllDocsTests.js index 8fe5e7d91b..57851b2c3b 100644 --- a/services/docstore/test/acceptance/js/GettingAllDocsTests.js +++ b/services/docstore/test/acceptance/js/GettingAllDocsTests.js @@ -20,30 +20,73 @@ const DocstoreClient = require('./helpers/DocstoreClient') describe('Getting all docs', function () { beforeEach(function (done) { this.project_id = new ObjectId() + this.threadId1 = new ObjectId().toString() + this.threadId2 = new ObjectId().toString() this.docs = [ { _id: new ObjectId(), lines: ['one', 'two', 'three'], - ranges: { mock: 'one' }, + ranges: { + comments: [ + { id: new ObjectId().toString(), op: { t: this.threadId1 } }, + ], + changes: [ + { + id: new ObjectId().toString(), + metadata: { user_id: 'user-id-1' }, + }, + ], + }, rev: 2, }, { _id: new ObjectId(), lines: ['aaa', 'bbb', 'ccc'], - ranges: { mock: 'two' }, + ranges: { + changes: [ + { + id: new ObjectId().toString(), + metadata: { user_id: 'user-id-2' }, + }, + ], + }, rev: 4, }, { _id: new ObjectId(), lines: ['111', '222', '333'], - ranges: { mock: 'three' }, + ranges: { + comments: [ + { id: new ObjectId().toString(), op: { t: this.threadId2 } }, + ], + changes: [ + { + id: new ObjectId().toString(), + metadata: { user_id: 'anonymous-user' }, + }, + ], + }, rev: 6, }, ] + this.fixedRanges = this.docs.map(doc => { + if (!doc.ranges?.comments?.length) return doc.ranges + return { + ...doc.ranges, + comments: [ + { ...doc.ranges.comments[0], id: doc.ranges.comments[0].op.t }, + ], + } + }) this.deleted_doc = { _id: new ObjectId(), lines: ['deleted'], - ranges: { mock: 'four' }, + ranges: { + comments: [{ id: new ObjectId().toString(), op: { t: 'thread-id-3' } }], + changes: [ + { id: new ObjectId().toString(), metadata: { user_id: 'user-id-3' } }, + ], + }, rev: 8, } const version = 42 @@ -96,7 +139,7 @@ describe('Getting all docs', function () { }) }) - return it('getAllRanges should return all the (non-deleted) doc ranges', function (done) { + it('getAllRanges should return all the (non-deleted) doc ranges', function (done) { return DocstoreClient.getAllRanges(this.project_id, (error, res, docs) => { if (error != null) { throw error @@ -104,9 +147,38 @@ describe('Getting all docs', function () { docs.length.should.equal(this.docs.length) for (let i = 0; i < docs.length; i++) { const doc = docs[i] - doc.ranges.should.deep.equal(this.docs[i].ranges) + doc.ranges.should.deep.equal(this.fixedRanges[i]) } return done() }) }) + + it('getTrackedChangesUserIds should return all the user ids from (non-deleted) ranges', function (done) { + DocstoreClient.getTrackedChangesUserIds( + this.project_id, + (error, res, userIds) => { + if (error != null) { + throw error + } + userIds.should.deep.equal(['user-id-1', 'user-id-2']) + done() + } + ) + }) + + it('getCommentThreadIds should return all the thread ids from (non-deleted) ranges', function (done) { + DocstoreClient.getCommentThreadIds( + this.project_id, + (error, res, threadIds) => { + if (error != null) { + throw error + } + threadIds.should.deep.equal({ + [this.docs[0]._id.toString()]: [this.threadId1], + [this.docs[2]._id.toString()]: [this.threadId2], + }) + done() + } + ) + }) }) diff --git a/services/docstore/test/acceptance/js/GettingDocsTests.js b/services/docstore/test/acceptance/js/GettingDocsTests.js index 121b3c1e24..1cfc53c5c6 100644 --- a/services/docstore/test/acceptance/js/GettingDocsTests.js +++ b/services/docstore/test/acceptance/js/GettingDocsTests.js @@ -28,10 +28,26 @@ describe('Getting a doc', function () { op: { i: 'foo', p: 3 }, meta: { user_id: new ObjectId().toString(), - ts: new Date().toString(), + ts: new Date().toJSON(), }, }, ], + comments: [ + { + id: new ObjectId().toString(), + op: { c: 'comment', p: 1, t: new ObjectId().toString() }, + metadata: { + user_id: new ObjectId().toString(), + ts: new Date().toJSON(), + }, + }, + ], + } + this.fixedRanges = { + ...this.ranges, + comments: [ + { ...this.ranges.comments[0], id: this.ranges.comments[0].op.t }, + ], } return DocstoreApp.ensureRunning(() => { return DocstoreClient.createDoc( @@ -60,7 +76,7 @@ describe('Getting a doc', function () { if (error) return done(error) doc.lines.should.deep.equal(this.lines) doc.version.should.equal(this.version) - doc.ranges.should.deep.equal(this.ranges) + doc.ranges.should.deep.equal(this.fixedRanges) return done() } ) @@ -114,7 +130,7 @@ describe('Getting a doc', function () { if (error) return done(error) doc.lines.should.deep.equal(this.lines) doc.version.should.equal(this.version) - doc.ranges.should.deep.equal(this.ranges) + doc.ranges.should.deep.equal(this.fixedRanges) doc.deleted.should.equal(true) return done() } diff --git a/services/docstore/test/acceptance/js/HealthCheckerTest.js b/services/docstore/test/acceptance/js/HealthCheckerTest.js new file mode 100644 index 0000000000..b25a45312b --- /dev/null +++ b/services/docstore/test/acceptance/js/HealthCheckerTest.js @@ -0,0 +1,28 @@ +const { db } = require('../../../app/js/mongodb') +const DocstoreApp = require('./helpers/DocstoreApp') +const DocstoreClient = require('./helpers/DocstoreClient') +const { expect } = require('chai') + +describe('HealthChecker', function () { + beforeEach('start', function (done) { + DocstoreApp.ensureRunning(done) + }) + beforeEach('clear docs collection', async function () { + await db.docs.deleteMany({}) + }) + let res + beforeEach('run health check', function (done) { + DocstoreClient.healthCheck((err, _res) => { + res = _res + done(err) + }) + }) + + it('should return 200', function () { + res.statusCode.should.equal(200) + }) + + it('should not leave any cruft behind', async function () { + expect(await db.docs.find({}).toArray()).to.deep.equal([]) + }) +}) diff --git a/services/docstore/test/acceptance/js/helpers/DocstoreClient.js b/services/docstore/test/acceptance/js/helpers/DocstoreClient.js index 790ec8f237..cb8bce2579 100644 --- a/services/docstore/test/acceptance/js/helpers/DocstoreClient.js +++ b/services/docstore/test/acceptance/js/helpers/DocstoreClient.js @@ -100,6 +100,26 @@ module.exports = DocstoreClient = { ) }, + getCommentThreadIds(projectId, callback) { + request.get( + { + url: `http://127.0.0.1:${settings.internal.docstore.port}/project/${projectId}/comment-thread-ids`, + json: true, + }, + callback + ) + }, + + getTrackedChangesUserIds(projectId, callback) { + request.get( + { + url: `http://127.0.0.1:${settings.internal.docstore.port}/project/${projectId}/tracked-changes-user-ids`, + json: true, + }, + callback + ) + }, + updateDoc(projectId, docId, lines, version, ranges, callback) { return request.post( { @@ -181,6 +201,13 @@ module.exports = DocstoreClient = { ) }, + healthCheck(callback) { + request.get( + `http://127.0.0.1:${settings.internal.docstore.port}/health_check`, + callback + ) + }, + getS3Doc(projectId, docId, callback) { getStringFromPersistor( Persistor, diff --git a/services/docstore/test/unit/js/DocArchiveManagerTests.js b/services/docstore/test/unit/js/DocArchiveManagerTests.js index a57f9806c8..2ec1cb2016 100644 --- a/services/docstore/test/unit/js/DocArchiveManagerTests.js +++ b/services/docstore/test/unit/js/DocArchiveManagerTests.js @@ -4,7 +4,7 @@ const modulePath = '../../../app/js/DocArchiveManager.js' const SandboxedModule = require('sandboxed-module') const { ObjectId } = require('mongodb-legacy') const Errors = require('../../../app/js/Errors') -const StreamToBuffer = require('../../../app/js/StreamToBuffer').promises +const StreamToBuffer = require('../../../app/js/StreamToBuffer') describe('DocArchiveManager', function () { let DocArchiveManager, @@ -31,6 +31,7 @@ describe('DocArchiveManager', function () { RangeManager = { jsonRangesToMongo: sinon.stub().returns({ mongo: 'ranges' }), + fixCommentIds: sinon.stub(), } Settings = { docstore: { @@ -142,37 +143,33 @@ describe('DocArchiveManager', function () { } MongoManager = { - promises: { - markDocAsArchived: sinon.stub().resolves(), - restoreArchivedDoc: sinon.stub().resolves(), - upsertIntoDocCollection: sinon.stub().resolves(), - getProjectsDocs: sinon.stub().resolves(mongoDocs), - getNonDeletedArchivedProjectDocs: getArchivedProjectDocs, - getNonArchivedProjectDocIds, - getArchivedProjectDocs, - findDoc: sinon.stub().callsFake(fakeGetDoc), - getDocForArchiving: sinon.stub().callsFake(fakeGetDoc), - destroyProject: sinon.stub().resolves(), - }, + markDocAsArchived: sinon.stub().resolves(), + restoreArchivedDoc: sinon.stub().resolves(), + upsertIntoDocCollection: sinon.stub().resolves(), + getProjectsDocs: sinon.stub().resolves(mongoDocs), + getNonDeletedArchivedProjectDocs: getArchivedProjectDocs, + getNonArchivedProjectDocIds, + getArchivedProjectDocs, + findDoc: sinon.stub().callsFake(fakeGetDoc), + getDocForArchiving: sinon.stub().callsFake(fakeGetDoc), + destroyProject: sinon.stub().resolves(), } // Wrap streamToBuffer so that we can pass in something that it expects (in // this case, a Promise) rather than a stubbed stream object streamToBuffer = { - promises: { - streamToBuffer: async () => { - const inputStream = new Promise(resolve => { - stream.on('data', data => resolve(data)) - }) + streamToBuffer: async () => { + const inputStream = new Promise(resolve => { + stream.on('data', data => resolve(data)) + }) - const value = await StreamToBuffer.streamToBuffer( - 'testProjectId', - 'testDocId', - inputStream - ) + const value = await StreamToBuffer.streamToBuffer( + 'testProjectId', + 'testDocId', + inputStream + ) - return value - }, + return value }, } @@ -192,9 +189,13 @@ describe('DocArchiveManager', function () { describe('archiveDoc', function () { it('should resolve when passed a valid document', async function () { - await expect( - DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id) - ).to.eventually.be.fulfilled + await expect(DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id)).to + .eventually.be.fulfilled + }) + + it('should fix comment ids', async function () { + await DocArchiveManager.archiveDoc(projectId, mongoDocs[1]._id) + expect(RangeManager.fixCommentIds).to.have.been.called }) it('should throw an error if the doc has no lines', async function () { @@ -202,26 +203,26 @@ describe('DocArchiveManager', function () { doc.lines = null await expect( - DocArchiveManager.promises.archiveDoc(projectId, doc._id) + DocArchiveManager.archiveDoc(projectId, doc._id) ).to.eventually.be.rejectedWith('doc has no lines') }) it('should add the schema version', async function () { - await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[1]._id) + await DocArchiveManager.archiveDoc(projectId, mongoDocs[1]._id) expect(StreamUtils.ReadableString).to.have.been.calledWith( sinon.match(/"schema_v":1/) ) }) it('should calculate the hex md5 sum of the content', async function () { - await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id) + await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id) expect(Crypto.createHash).to.have.been.calledWith('md5') expect(HashUpdate).to.have.been.calledWith(archivedDocJson) expect(HashDigest).to.have.been.calledWith('hex') }) it('should pass the md5 hash to the object persistor for verification', async function () { - await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id) + await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id) expect(PersistorManager.sendStream).to.have.been.calledWith( sinon.match.any, @@ -232,7 +233,7 @@ describe('DocArchiveManager', function () { }) it('should pass the correct bucket and key to the persistor', async function () { - await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id) + await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id) expect(PersistorManager.sendStream).to.have.been.calledWith( Settings.docstore.bucket, @@ -241,7 +242,7 @@ describe('DocArchiveManager', function () { }) it('should create a stream from the encoded json and send it', async function () { - await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id) + await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id) expect(StreamUtils.ReadableString).to.have.been.calledWith( archivedDocJson ) @@ -253,8 +254,8 @@ describe('DocArchiveManager', function () { }) it('should mark the doc as archived', async function () { - await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id) - expect(MongoManager.promises.markDocAsArchived).to.have.been.calledWith( + await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id) + expect(MongoManager.markDocAsArchived).to.have.been.calledWith( projectId, mongoDocs[0]._id, mongoDocs[0].rev @@ -267,8 +268,8 @@ describe('DocArchiveManager', function () { }) it('should bail out early', async function () { - await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id) - expect(MongoManager.promises.getDocForArchiving).to.not.have.been.called + await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id) + expect(MongoManager.getDocForArchiving).to.not.have.been.called }) }) @@ -285,7 +286,7 @@ describe('DocArchiveManager', function () { it('should return an error', async function () { await expect( - DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id) + DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id) ).to.eventually.be.rejectedWith('null bytes detected') }) }) @@ -296,21 +297,19 @@ describe('DocArchiveManager', function () { describe('when the doc is in S3', function () { beforeEach(function () { - MongoManager.promises.findDoc = sinon - .stub() - .resolves({ inS3: true, rev }) + MongoManager.findDoc = sinon.stub().resolves({ inS3: true, rev }) docId = mongoDocs[0]._id lines = ['doc', 'lines'] rev = 123 }) it('should resolve when passed a valid document', async function () { - await expect(DocArchiveManager.promises.unarchiveDoc(projectId, docId)) - .to.eventually.be.fulfilled + await expect(DocArchiveManager.unarchiveDoc(projectId, docId)).to + .eventually.be.fulfilled }) it('should test md5 validity with the raw buffer', async function () { - await DocArchiveManager.promises.unarchiveDoc(projectId, docId) + await DocArchiveManager.unarchiveDoc(projectId, docId) expect(HashUpdate).to.have.been.calledWith( sinon.match.instanceOf(Buffer) ) @@ -319,15 +318,17 @@ describe('DocArchiveManager', function () { it('should throw an error if the md5 does not match', async function () { PersistorManager.getObjectMd5Hash.resolves('badf00d') await expect( - DocArchiveManager.promises.unarchiveDoc(projectId, docId) + DocArchiveManager.unarchiveDoc(projectId, docId) ).to.eventually.be.rejected.and.be.instanceof(Errors.Md5MismatchError) }) it('should restore the doc in Mongo', async function () { - await DocArchiveManager.promises.unarchiveDoc(projectId, docId) - expect( - MongoManager.promises.restoreArchivedDoc - ).to.have.been.calledWith(projectId, docId, archivedDoc) + await DocArchiveManager.unarchiveDoc(projectId, docId) + expect(MongoManager.restoreArchivedDoc).to.have.been.calledWith( + projectId, + docId, + archivedDoc + ) }) describe('when archiving is not configured', function () { @@ -337,15 +338,15 @@ describe('DocArchiveManager', function () { it('should error out on archived doc', async function () { await expect( - DocArchiveManager.promises.unarchiveDoc(projectId, docId) + DocArchiveManager.unarchiveDoc(projectId, docId) ).to.eventually.be.rejected.and.match( /found archived doc, but archiving backend is not configured/ ) }) it('should return early on non-archived doc', async function () { - MongoManager.promises.findDoc = sinon.stub().resolves({ rev }) - await DocArchiveManager.promises.unarchiveDoc(projectId, docId) + MongoManager.findDoc = sinon.stub().resolves({ rev }) + await DocArchiveManager.unarchiveDoc(projectId, docId) expect(PersistorManager.getObjectMd5Hash).to.not.have.been.called }) }) @@ -363,10 +364,12 @@ describe('DocArchiveManager', function () { }) it('should return the docs lines', async function () { - await DocArchiveManager.promises.unarchiveDoc(projectId, docId) - expect( - MongoManager.promises.restoreArchivedDoc - ).to.have.been.calledWith(projectId, docId, { lines, rev }) + await DocArchiveManager.unarchiveDoc(projectId, docId) + expect(MongoManager.restoreArchivedDoc).to.have.been.calledWith( + projectId, + docId, + { lines, rev } + ) }) }) @@ -385,14 +388,16 @@ describe('DocArchiveManager', function () { }) it('should return the doc lines and ranges', async function () { - await DocArchiveManager.promises.unarchiveDoc(projectId, docId) - expect( - MongoManager.promises.restoreArchivedDoc - ).to.have.been.calledWith(projectId, docId, { - lines, - ranges: { mongo: 'ranges' }, - rev: 456, - }) + await DocArchiveManager.unarchiveDoc(projectId, docId) + expect(MongoManager.restoreArchivedDoc).to.have.been.calledWith( + projectId, + docId, + { + lines, + ranges: { mongo: 'ranges' }, + rev: 456, + } + ) }) }) @@ -406,10 +411,12 @@ describe('DocArchiveManager', function () { }) it('should return only the doc lines', async function () { - await DocArchiveManager.promises.unarchiveDoc(projectId, docId) - expect( - MongoManager.promises.restoreArchivedDoc - ).to.have.been.calledWith(projectId, docId, { lines, rev: 456 }) + await DocArchiveManager.unarchiveDoc(projectId, docId) + expect(MongoManager.restoreArchivedDoc).to.have.been.calledWith( + projectId, + docId, + { lines, rev: 456 } + ) }) }) @@ -423,10 +430,12 @@ describe('DocArchiveManager', function () { }) it('should use the rev obtained from Mongo', async function () { - await DocArchiveManager.promises.unarchiveDoc(projectId, docId) - expect( - MongoManager.promises.restoreArchivedDoc - ).to.have.been.calledWith(projectId, docId, { lines, rev }) + await DocArchiveManager.unarchiveDoc(projectId, docId) + expect(MongoManager.restoreArchivedDoc).to.have.been.calledWith( + projectId, + docId, + { lines, rev } + ) }) }) @@ -441,7 +450,7 @@ describe('DocArchiveManager', function () { it('should throw an error', async function () { await expect( - DocArchiveManager.promises.unarchiveDoc(projectId, docId) + DocArchiveManager.unarchiveDoc(projectId, docId) ).to.eventually.be.rejectedWith( "I don't understand the doc format in s3" ) @@ -451,8 +460,8 @@ describe('DocArchiveManager', function () { }) it('should not do anything if the file is already unarchived', async function () { - MongoManager.promises.findDoc.resolves({ inS3: false }) - await DocArchiveManager.promises.unarchiveDoc(projectId, docId) + MongoManager.findDoc.resolves({ inS3: false }) + await DocArchiveManager.unarchiveDoc(projectId, docId) expect(PersistorManager.getObjectStream).not.to.have.been.called }) @@ -461,7 +470,7 @@ describe('DocArchiveManager', function () { .stub() .rejects(new Errors.NotFoundError()) await expect( - DocArchiveManager.promises.unarchiveDoc(projectId, docId) + DocArchiveManager.unarchiveDoc(projectId, docId) ).to.eventually.be.rejected.and.be.instanceof(Errors.NotFoundError) }) }) @@ -469,13 +478,11 @@ describe('DocArchiveManager', function () { describe('destroyProject', function () { describe('when archiving is enabled', function () { beforeEach(async function () { - await DocArchiveManager.promises.destroyProject(projectId) + await DocArchiveManager.destroyProject(projectId) }) it('should delete the project in Mongo', function () { - expect(MongoManager.promises.destroyProject).to.have.been.calledWith( - projectId - ) + expect(MongoManager.destroyProject).to.have.been.calledWith(projectId) }) it('should delete the project in the persistor', function () { @@ -489,13 +496,11 @@ describe('DocArchiveManager', function () { describe('when archiving is disabled', function () { beforeEach(async function () { Settings.docstore.backend = '' - await DocArchiveManager.promises.destroyProject(projectId) + await DocArchiveManager.destroyProject(projectId) }) it('should delete the project in Mongo', function () { - expect(MongoManager.promises.destroyProject).to.have.been.calledWith( - projectId - ) + expect(MongoManager.destroyProject).to.have.been.calledWith(projectId) }) it('should not delete the project in the persistor', function () { @@ -506,33 +511,35 @@ describe('DocArchiveManager', function () { describe('archiveAllDocs', function () { it('should resolve with valid arguments', async function () { - await expect(DocArchiveManager.promises.archiveAllDocs(projectId)).to - .eventually.be.fulfilled + await expect(DocArchiveManager.archiveAllDocs(projectId)).to.eventually.be + .fulfilled }) it('should archive all project docs which are not in s3', async function () { - await DocArchiveManager.promises.archiveAllDocs(projectId) + await DocArchiveManager.archiveAllDocs(projectId) // not inS3 - expect(MongoManager.promises.markDocAsArchived).to.have.been.calledWith( + expect(MongoManager.markDocAsArchived).to.have.been.calledWith( projectId, mongoDocs[0]._id ) - expect(MongoManager.promises.markDocAsArchived).to.have.been.calledWith( + expect(MongoManager.markDocAsArchived).to.have.been.calledWith( projectId, mongoDocs[1]._id ) - expect(MongoManager.promises.markDocAsArchived).to.have.been.calledWith( + expect(MongoManager.markDocAsArchived).to.have.been.calledWith( projectId, mongoDocs[4]._id ) // inS3 - expect( - MongoManager.promises.markDocAsArchived - ).not.to.have.been.calledWith(projectId, mongoDocs[2]._id) - expect( - MongoManager.promises.markDocAsArchived - ).not.to.have.been.calledWith(projectId, mongoDocs[3]._id) + expect(MongoManager.markDocAsArchived).not.to.have.been.calledWith( + projectId, + mongoDocs[2]._id + ) + expect(MongoManager.markDocAsArchived).not.to.have.been.calledWith( + projectId, + mongoDocs[3]._id + ) }) describe('when archiving is not configured', function () { @@ -541,21 +548,20 @@ describe('DocArchiveManager', function () { }) it('should bail out early', async function () { - await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id) - expect(MongoManager.promises.getNonArchivedProjectDocIds).to.not.have - .been.called + await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id) + expect(MongoManager.getNonArchivedProjectDocIds).to.not.have.been.called }) }) }) describe('unArchiveAllDocs', function () { it('should resolve with valid arguments', async function () { - await expect(DocArchiveManager.promises.unArchiveAllDocs(projectId)).to - .eventually.be.fulfilled + await expect(DocArchiveManager.unArchiveAllDocs(projectId)).to.eventually + .be.fulfilled }) it('should unarchive all inS3 docs', async function () { - await DocArchiveManager.promises.unArchiveAllDocs(projectId) + await DocArchiveManager.unArchiveAllDocs(projectId) for (const doc of archivedDocs) { expect(PersistorManager.getObjectStream).to.have.been.calledWith( @@ -571,9 +577,9 @@ describe('DocArchiveManager', function () { }) it('should bail out early', async function () { - await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id) - expect(MongoManager.promises.getNonDeletedArchivedProjectDocs).to.not - .have.been.called + await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id) + expect(MongoManager.getNonDeletedArchivedProjectDocs).to.not.have.been + .called }) }) }) diff --git a/services/docstore/test/unit/js/DocManagerTests.js b/services/docstore/test/unit/js/DocManagerTests.js index 8405520e6e..67a2f26547 100644 --- a/services/docstore/test/unit/js/DocManagerTests.js +++ b/services/docstore/test/unit/js/DocManagerTests.js @@ -17,25 +17,22 @@ describe('DocManager', function () { this.version = 42 this.MongoManager = { - promises: { - findDoc: sinon.stub(), - getProjectsDocs: sinon.stub(), - patchDoc: sinon.stub().resolves(), - upsertIntoDocCollection: sinon.stub().resolves(), - }, + findDoc: sinon.stub(), + getProjectsDocs: sinon.stub(), + patchDoc: sinon.stub().resolves(), + upsertIntoDocCollection: sinon.stub().resolves(), } this.DocArchiveManager = { - promises: { - unarchiveDoc: sinon.stub(), - unArchiveAllDocs: sinon.stub(), - archiveDoc: sinon.stub().resolves(), - }, + unarchiveDoc: sinon.stub(), + unArchiveAllDocs: sinon.stub(), + archiveDoc: sinon.stub().resolves(), } this.RangeManager = { jsonRangesToMongo(r) { return r }, shouldUpdateRanges: sinon.stub().returns(false), + fixCommentIds: sinon.stub(), } this.settings = { docstore: {} } @@ -52,7 +49,7 @@ describe('DocManager', function () { describe('getFullDoc', function () { beforeEach(function () { - this.DocManager.promises._getDoc = sinon.stub() + this.DocManager._getDoc = sinon.stub() this.doc = { _id: this.doc_id, lines: ['2134'], @@ -60,13 +57,10 @@ describe('DocManager', function () { }) it('should call get doc with a quick filter', async function () { - this.DocManager.promises._getDoc.resolves(this.doc) - const doc = await this.DocManager.promises.getFullDoc( - this.project_id, - this.doc_id - ) + this.DocManager._getDoc.resolves(this.doc) + const doc = await this.DocManager.getFullDoc(this.project_id, this.doc_id) doc.should.equal(this.doc) - this.DocManager.promises._getDoc + this.DocManager._getDoc .calledWith(this.project_id, this.doc_id, { lines: true, rev: true, @@ -79,27 +73,27 @@ describe('DocManager', function () { }) it('should return error when get doc errors', async function () { - this.DocManager.promises._getDoc.rejects(this.stubbedError) + this.DocManager._getDoc.rejects(this.stubbedError) await expect( - this.DocManager.promises.getFullDoc(this.project_id, this.doc_id) + this.DocManager.getFullDoc(this.project_id, this.doc_id) ).to.be.rejectedWith(this.stubbedError) }) }) describe('getRawDoc', function () { beforeEach(function () { - this.DocManager.promises._getDoc = sinon.stub() + this.DocManager._getDoc = sinon.stub() this.doc = { lines: ['2134'] } }) it('should call get doc with a quick filter', async function () { - this.DocManager.promises._getDoc.resolves(this.doc) - const doc = await this.DocManager.promises.getDocLines( + this.DocManager._getDoc.resolves(this.doc) + const content = await this.DocManager.getDocLines( this.project_id, this.doc_id ) - doc.should.equal(this.doc) - this.DocManager.promises._getDoc + content.should.equal(this.doc.lines.join('\n')) + this.DocManager._getDoc .calledWith(this.project_id, this.doc_id, { lines: true, inS3: true, @@ -108,11 +102,46 @@ describe('DocManager', function () { }) it('should return error when get doc errors', async function () { - this.DocManager.promises._getDoc.rejects(this.stubbedError) + this.DocManager._getDoc.rejects(this.stubbedError) await expect( - this.DocManager.promises.getDocLines(this.project_id, this.doc_id) + this.DocManager.getDocLines(this.project_id, this.doc_id) ).to.be.rejectedWith(this.stubbedError) }) + + it('should return error when get doc does not exist', async function () { + this.DocManager._getDoc.resolves(null) + await expect( + this.DocManager.getDocLines(this.project_id, this.doc_id) + ).to.be.rejectedWith(Errors.NotFoundError) + }) + + it('should return error when get doc has no lines', async function () { + this.DocManager._getDoc.resolves({}) + await expect( + this.DocManager.getDocLines(this.project_id, this.doc_id) + ).to.be.rejectedWith(Errors.DocWithoutLinesError) + }) + }) + + describe('_getDoc', function () { + it('should return error when get doc does not exist', async function () { + this.MongoManager.findDoc.resolves(null) + await expect( + this.DocManager._getDoc(this.project_id, this.doc_id, { inS3: true }) + ).to.be.rejectedWith(Errors.NotFoundError) + }) + + it('should fix comment ids', async function () { + this.MongoManager.findDoc.resolves({ + _id: this.doc_id, + ranges: {}, + }) + await this.DocManager._getDoc(this.project_id, this.doc_id, { + inS3: true, + ranges: true, + }) + expect(this.RangeManager.fixCommentIds).to.have.been.called + }) }) describe('getDoc', function () { @@ -128,26 +157,25 @@ describe('DocManager', function () { describe('when using a filter', function () { beforeEach(function () { - this.MongoManager.promises.findDoc.resolves(this.doc) + this.MongoManager.findDoc.resolves(this.doc) }) it('should error if inS3 is not set to true', async function () { await expect( - this.DocManager.promises._getDoc(this.project_id, this.doc_id, { + this.DocManager._getDoc(this.project_id, this.doc_id, { inS3: false, }) ).to.be.rejected }) it('should always get inS3 even when no filter is passed', async function () { - await expect( - this.DocManager.promises._getDoc(this.project_id, this.doc_id) - ).to.be.rejected - this.MongoManager.promises.findDoc.called.should.equal(false) + await expect(this.DocManager._getDoc(this.project_id, this.doc_id)).to + .be.rejected + this.MongoManager.findDoc.called.should.equal(false) }) it('should not error if inS3 is set to true', async function () { - await this.DocManager.promises._getDoc(this.project_id, this.doc_id, { + await this.DocManager._getDoc(this.project_id, this.doc_id, { inS3: true, }) }) @@ -155,8 +183,8 @@ describe('DocManager', function () { describe('when the doc is in the doc collection', function () { beforeEach(async function () { - this.MongoManager.promises.findDoc.resolves(this.doc) - this.result = await this.DocManager.promises._getDoc( + this.MongoManager.findDoc.resolves(this.doc) + this.result = await this.DocManager._getDoc( this.project_id, this.doc_id, { version: true, inS3: true } @@ -164,7 +192,7 @@ describe('DocManager', function () { }) it('should get the doc from the doc collection', function () { - this.MongoManager.promises.findDoc + this.MongoManager.findDoc .calledWith(this.project_id, this.doc_id) .should.equal(true) }) @@ -177,9 +205,9 @@ describe('DocManager', function () { describe('when MongoManager.findDoc errors', function () { it('should return the error', async function () { - this.MongoManager.promises.findDoc.rejects(this.stubbedError) + this.MongoManager.findDoc.rejects(this.stubbedError) await expect( - this.DocManager.promises._getDoc(this.project_id, this.doc_id, { + this.DocManager._getDoc(this.project_id, this.doc_id, { version: true, inS3: true, }) @@ -202,15 +230,15 @@ describe('DocManager', function () { version: 2, inS3: false, } - this.MongoManager.promises.findDoc.resolves(this.doc) - this.DocArchiveManager.promises.unarchiveDoc.callsFake( + this.MongoManager.findDoc.resolves(this.doc) + this.DocArchiveManager.unarchiveDoc.callsFake( async (projectId, docId) => { - this.MongoManager.promises.findDoc.resolves({ + this.MongoManager.findDoc.resolves({ ...this.unarchivedDoc, }) } ) - this.result = await this.DocManager.promises._getDoc( + this.result = await this.DocManager._getDoc( this.project_id, this.doc_id, { @@ -221,13 +249,13 @@ describe('DocManager', function () { }) it('should call the DocArchive to unarchive the doc', function () { - this.DocArchiveManager.promises.unarchiveDoc + this.DocArchiveManager.unarchiveDoc .calledWith(this.project_id, this.doc_id) .should.equal(true) }) it('should look up the doc twice', function () { - this.MongoManager.promises.findDoc.calledTwice.should.equal(true) + this.MongoManager.findDoc.calledTwice.should.equal(true) }) it('should return the doc', function () { @@ -239,9 +267,9 @@ describe('DocManager', function () { describe('when the doc does not exist in the docs collection', function () { it('should return a NotFoundError', async function () { - this.MongoManager.promises.findDoc.resolves(null) + this.MongoManager.findDoc.resolves(null) await expect( - this.DocManager.promises._getDoc(this.project_id, this.doc_id, { + this.DocManager._getDoc(this.project_id, this.doc_id, { version: true, inS3: true, }) @@ -262,23 +290,27 @@ describe('DocManager', function () { lines: ['mock-lines'], }, ] - this.MongoManager.promises.getProjectsDocs.resolves(this.docs) - this.DocArchiveManager.promises.unArchiveAllDocs.resolves(this.docs) - this.filter = { lines: true } - this.result = await this.DocManager.promises.getAllNonDeletedDocs( + this.MongoManager.getProjectsDocs.resolves(this.docs) + this.DocArchiveManager.unArchiveAllDocs.resolves(this.docs) + this.filter = { lines: true, ranges: true } + this.result = await this.DocManager.getAllNonDeletedDocs( this.project_id, this.filter ) }) it('should get the project from the database', function () { - this.MongoManager.promises.getProjectsDocs.should.have.been.calledWith( + this.MongoManager.getProjectsDocs.should.have.been.calledWith( this.project_id, { include_deleted: false }, this.filter ) }) + it('should fix comment ids', async function () { + expect(this.RangeManager.fixCommentIds).to.have.been.called + }) + it('should return the docs', function () { expect(this.result).to.deep.equal(this.docs) }) @@ -286,13 +318,10 @@ describe('DocManager', function () { describe('when there are no docs for the project', function () { it('should return a NotFoundError', async function () { - this.MongoManager.promises.getProjectsDocs.resolves(null) - this.DocArchiveManager.promises.unArchiveAllDocs.resolves(null) + this.MongoManager.getProjectsDocs.resolves(null) + this.DocArchiveManager.unArchiveAllDocs.resolves(null) await expect( - this.DocManager.promises.getAllNonDeletedDocs( - this.project_id, - this.filter - ) + this.DocManager.getAllNonDeletedDocs(this.project_id, this.filter) ).to.be.rejectedWith(`No docs for project ${this.project_id}`) }) }) @@ -303,7 +332,7 @@ describe('DocManager', function () { beforeEach(function () { this.lines = ['mock', 'doc', 'lines'] this.rev = 77 - this.MongoManager.promises.findDoc.resolves({ + this.MongoManager.findDoc.resolves({ _id: new ObjectId(this.doc_id), }) this.meta = {} @@ -311,7 +340,7 @@ describe('DocManager', function () { describe('standard path', function () { beforeEach(async function () { - await this.DocManager.promises.patchDoc( + await this.DocManager.patchDoc( this.project_id, this.doc_id, this.meta @@ -319,14 +348,14 @@ describe('DocManager', function () { }) it('should get the doc', function () { - expect(this.MongoManager.promises.findDoc).to.have.been.calledWith( + expect(this.MongoManager.findDoc).to.have.been.calledWith( this.project_id, this.doc_id ) }) it('should persist the meta', function () { - expect(this.MongoManager.promises.patchDoc).to.have.been.calledWith( + expect(this.MongoManager.patchDoc).to.have.been.calledWith( this.project_id, this.doc_id, this.meta @@ -339,7 +368,7 @@ describe('DocManager', function () { this.settings.docstore.archiveOnSoftDelete = false this.meta.deleted = true - await this.DocManager.promises.patchDoc( + await this.DocManager.patchDoc( this.project_id, this.doc_id, this.meta @@ -347,8 +376,7 @@ describe('DocManager', function () { }) it('should not flush the doc out of mongo', function () { - expect(this.DocArchiveManager.promises.archiveDoc).to.not.have.been - .called + expect(this.DocArchiveManager.archiveDoc).to.not.have.been.called }) }) @@ -356,7 +384,7 @@ describe('DocManager', function () { beforeEach(async function () { this.settings.docstore.archiveOnSoftDelete = false this.meta.deleted = false - await this.DocManager.promises.patchDoc( + await this.DocManager.patchDoc( this.project_id, this.doc_id, this.meta @@ -364,8 +392,7 @@ describe('DocManager', function () { }) it('should not flush the doc out of mongo', function () { - expect(this.DocArchiveManager.promises.archiveDoc).to.not.have.been - .called + expect(this.DocArchiveManager.archiveDoc).to.not.have.been.called }) }) @@ -377,7 +404,7 @@ describe('DocManager', function () { describe('when the background flush succeeds', function () { beforeEach(async function () { - await this.DocManager.promises.patchDoc( + await this.DocManager.patchDoc( this.project_id, this.doc_id, this.meta @@ -389,17 +416,18 @@ describe('DocManager', function () { }) it('should flush the doc out of mongo', function () { - expect( - this.DocArchiveManager.promises.archiveDoc - ).to.have.been.calledWith(this.project_id, this.doc_id) + expect(this.DocArchiveManager.archiveDoc).to.have.been.calledWith( + this.project_id, + this.doc_id + ) }) }) describe('when the background flush fails', function () { beforeEach(async function () { this.err = new Error('foo') - this.DocArchiveManager.promises.archiveDoc.rejects(this.err) - await this.DocManager.promises.patchDoc( + this.DocArchiveManager.archiveDoc.rejects(this.err) + await this.DocManager.patchDoc( this.project_id, this.doc_id, this.meta @@ -422,9 +450,9 @@ describe('DocManager', function () { describe('when the doc does not exist', function () { it('should return a NotFoundError', async function () { - this.MongoManager.promises.findDoc.resolves(null) + this.MongoManager.findDoc.resolves(null) await expect( - this.DocManager.promises.patchDoc(this.project_id, this.doc_id, {}) + this.DocManager.patchDoc(this.project_id, this.doc_id, {}) ).to.be.rejectedWith( `No such project/doc to delete: ${this.project_id}/${this.doc_id}` ) @@ -470,13 +498,13 @@ describe('DocManager', function () { ranges: this.originalRanges, } - this.DocManager.promises._getDoc = sinon.stub() + this.DocManager._getDoc = sinon.stub() }) describe('when only the doc lines have changed', function () { beforeEach(async function () { - this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc) - this.result = await this.DocManager.promises.updateDoc( + this.DocManager._getDoc = sinon.stub().resolves(this.doc) + this.result = await this.DocManager.updateDoc( this.project_id, this.doc_id, this.newDocLines, @@ -486,7 +514,7 @@ describe('DocManager', function () { }) it('should get the existing doc', function () { - this.DocManager.promises._getDoc + this.DocManager._getDoc .calledWith(this.project_id, this.doc_id, { version: true, rev: true, @@ -498,7 +526,7 @@ describe('DocManager', function () { }) it('should upsert the document to the doc collection', function () { - this.MongoManager.promises.upsertIntoDocCollection + this.MongoManager.upsertIntoDocCollection .calledWith(this.project_id, this.doc_id, this.rev, { lines: this.newDocLines, }) @@ -512,9 +540,9 @@ describe('DocManager', function () { describe('when the doc ranges have changed', function () { beforeEach(async function () { - this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc) + this.DocManager._getDoc = sinon.stub().resolves(this.doc) this.RangeManager.shouldUpdateRanges.returns(true) - this.result = await this.DocManager.promises.updateDoc( + this.result = await this.DocManager.updateDoc( this.project_id, this.doc_id, this.oldDocLines, @@ -524,7 +552,7 @@ describe('DocManager', function () { }) it('should upsert the ranges', function () { - this.MongoManager.promises.upsertIntoDocCollection + this.MongoManager.upsertIntoDocCollection .calledWith(this.project_id, this.doc_id, this.rev, { ranges: this.newRanges, }) @@ -538,8 +566,8 @@ describe('DocManager', function () { describe('when only the version has changed', function () { beforeEach(async function () { - this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc) - this.result = await this.DocManager.promises.updateDoc( + this.DocManager._getDoc = sinon.stub().resolves(this.doc) + this.result = await this.DocManager.updateDoc( this.project_id, this.doc_id, this.oldDocLines, @@ -549,7 +577,7 @@ describe('DocManager', function () { }) it('should update the version', function () { - this.MongoManager.promises.upsertIntoDocCollection.should.have.been.calledWith( + this.MongoManager.upsertIntoDocCollection.should.have.been.calledWith( this.project_id, this.doc_id, this.rev, @@ -564,8 +592,8 @@ describe('DocManager', function () { describe('when the doc has not changed at all', function () { beforeEach(async function () { - this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc) - this.result = await this.DocManager.promises.updateDoc( + this.DocManager._getDoc = sinon.stub().resolves(this.doc) + this.result = await this.DocManager.updateDoc( this.project_id, this.doc_id, this.oldDocLines, @@ -575,9 +603,7 @@ describe('DocManager', function () { }) it('should not update the ranges or lines or version', function () { - this.MongoManager.promises.upsertIntoDocCollection.called.should.equal( - false - ) + this.MongoManager.upsertIntoDocCollection.called.should.equal(false) }) it('should return the old rev and modified == false', function () { @@ -588,7 +614,7 @@ describe('DocManager', function () { describe('when the version is null', function () { it('should return an error', async function () { await expect( - this.DocManager.promises.updateDoc( + this.DocManager.updateDoc( this.project_id, this.doc_id, this.newDocLines, @@ -602,7 +628,7 @@ describe('DocManager', function () { describe('when the lines are null', function () { it('should return an error', async function () { await expect( - this.DocManager.promises.updateDoc( + this.DocManager.updateDoc( this.project_id, this.doc_id, null, @@ -616,7 +642,7 @@ describe('DocManager', function () { describe('when the ranges are null', function () { it('should return an error', async function () { await expect( - this.DocManager.promises.updateDoc( + this.DocManager.updateDoc( this.project_id, this.doc_id, this.newDocLines, @@ -630,9 +656,9 @@ describe('DocManager', function () { describe('when there is a generic error getting the doc', function () { beforeEach(async function () { this.error = new Error('doc could not be found') - this.DocManager.promises._getDoc = sinon.stub().rejects(this.error) + this.DocManager._getDoc = sinon.stub().rejects(this.error) await expect( - this.DocManager.promises.updateDoc( + this.DocManager.updateDoc( this.project_id, this.doc_id, this.newDocLines, @@ -643,16 +669,15 @@ describe('DocManager', function () { }) it('should not upsert the document to the doc collection', function () { - this.MongoManager.promises.upsertIntoDocCollection.should.not.have.been - .called + this.MongoManager.upsertIntoDocCollection.should.not.have.been.called }) }) describe('when the version was decremented', function () { it('should return an error', async function () { - this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc) + this.DocManager._getDoc = sinon.stub().resolves(this.doc) await expect( - this.DocManager.promises.updateDoc( + this.DocManager.updateDoc( this.project_id, this.doc_id, this.newDocLines, @@ -665,8 +690,8 @@ describe('DocManager', function () { describe('when the doc lines have not changed', function () { beforeEach(async function () { - this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc) - this.result = await this.DocManager.promises.updateDoc( + this.DocManager._getDoc = sinon.stub().resolves(this.doc) + this.result = await this.DocManager.updateDoc( this.project_id, this.doc_id, this.oldDocLines.slice(), @@ -676,9 +701,7 @@ describe('DocManager', function () { }) it('should not update the doc', function () { - this.MongoManager.promises.upsertIntoDocCollection.called.should.equal( - false - ) + this.MongoManager.upsertIntoDocCollection.called.should.equal(false) }) it('should return the existing rev', function () { @@ -688,8 +711,8 @@ describe('DocManager', function () { describe('when the doc does not exist', function () { beforeEach(async function () { - this.DocManager.promises._getDoc = sinon.stub().resolves(null) - this.result = await this.DocManager.promises.updateDoc( + this.DocManager._getDoc = sinon.stub().resolves(null) + this.result = await this.DocManager.updateDoc( this.project_id, this.doc_id, this.newDocLines, @@ -699,7 +722,7 @@ describe('DocManager', function () { }) it('should upsert the document to the doc collection', function () { - this.MongoManager.promises.upsertIntoDocCollection.should.have.been.calledWith( + this.MongoManager.upsertIntoDocCollection.should.have.been.calledWith( this.project_id, this.doc_id, undefined, @@ -718,12 +741,12 @@ describe('DocManager', function () { describe('when another update is racing', function () { beforeEach(async function () { - this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc) - this.MongoManager.promises.upsertIntoDocCollection + this.DocManager._getDoc = sinon.stub().resolves(this.doc) + this.MongoManager.upsertIntoDocCollection .onFirstCall() .rejects(new Errors.DocRevValueError()) this.RangeManager.shouldUpdateRanges.returns(true) - this.result = await this.DocManager.promises.updateDoc( + this.result = await this.DocManager.updateDoc( this.project_id, this.doc_id, this.newDocLines, @@ -733,7 +756,7 @@ describe('DocManager', function () { }) it('should upsert the doc twice', function () { - this.MongoManager.promises.upsertIntoDocCollection.should.have.been.calledWith( + this.MongoManager.upsertIntoDocCollection.should.have.been.calledWith( this.project_id, this.doc_id, this.rev, @@ -743,8 +766,7 @@ describe('DocManager', function () { version: this.version + 1, } ) - this.MongoManager.promises.upsertIntoDocCollection.should.have.been - .calledTwice + this.MongoManager.upsertIntoDocCollection.should.have.been.calledTwice }) it('should return the new rev', function () { diff --git a/services/docstore/test/unit/js/HttpControllerTests.js b/services/docstore/test/unit/js/HttpControllerTests.js index bf78696890..ab491ec150 100644 --- a/services/docstore/test/unit/js/HttpControllerTests.js +++ b/services/docstore/test/unit/js/HttpControllerTests.js @@ -14,7 +14,7 @@ describe('HttpController', function () { max_doc_length: 2 * 1024 * 1024, } this.DocArchiveManager = { - unArchiveAllDocs: sinon.stub().yields(), + unArchiveAllDocs: sinon.stub().returns(), } this.DocManager = {} this.HttpController = SandboxedModule.require(modulePath, { @@ -54,15 +54,13 @@ describe('HttpController', function () { describe('getDoc', function () { describe('without deleted docs', function () { - beforeEach(function () { + beforeEach(async function () { this.req.params = { project_id: this.projectId, doc_id: this.docId, } - this.DocManager.getFullDoc = sinon - .stub() - .callsArgWith(2, null, this.doc) - this.HttpController.getDoc(this.req, this.res, this.next) + this.DocManager.getFullDoc = sinon.stub().resolves(this.doc) + await this.HttpController.getDoc(this.req, this.res, this.next) }) it('should get the document with the version (including deleted)', function () { @@ -89,26 +87,24 @@ describe('HttpController', function () { project_id: this.projectId, doc_id: this.docId, } - this.DocManager.getFullDoc = sinon - .stub() - .callsArgWith(2, null, this.deletedDoc) + this.DocManager.getFullDoc = sinon.stub().resolves(this.deletedDoc) }) - it('should get the doc from the doc manager', function () { - this.HttpController.getDoc(this.req, this.res, this.next) + it('should get the doc from the doc manager', async function () { + await this.HttpController.getDoc(this.req, this.res, this.next) this.DocManager.getFullDoc .calledWith(this.projectId, this.docId) .should.equal(true) }) - it('should return 404 if the query string delete is not set ', function () { - this.HttpController.getDoc(this.req, this.res, this.next) + it('should return 404 if the query string delete is not set ', async function () { + await this.HttpController.getDoc(this.req, this.res, this.next) this.res.sendStatus.calledWith(404).should.equal(true) }) - it('should return the doc as JSON if include_deleted is set to true', function () { + it('should return the doc as JSON if include_deleted is set to true', async function () { this.req.query.include_deleted = 'true' - this.HttpController.getDoc(this.req, this.res, this.next) + await this.HttpController.getDoc(this.req, this.res, this.next) this.res.json .calledWith({ _id: this.docId, @@ -123,13 +119,15 @@ describe('HttpController', function () { }) describe('getRawDoc', function () { - beforeEach(function () { + beforeEach(async function () { this.req.params = { project_id: this.projectId, doc_id: this.docId, } - this.DocManager.getDocLines = sinon.stub().callsArgWith(2, null, this.doc) - this.HttpController.getRawDoc(this.req, this.res, this.next) + this.DocManager.getDocLines = sinon + .stub() + .resolves(this.doc.lines.join('\n')) + await this.HttpController.getRawDoc(this.req, this.res, this.next) }) it('should get the document without the version', function () { @@ -154,7 +152,7 @@ describe('HttpController', function () { describe('getAllDocs', function () { describe('normally', function () { - beforeEach(function () { + beforeEach(async function () { this.req.params = { project_id: this.projectId } this.docs = [ { @@ -168,10 +166,8 @@ describe('HttpController', function () { rev: 4, }, ] - this.DocManager.getAllNonDeletedDocs = sinon - .stub() - .callsArgWith(2, null, this.docs) - this.HttpController.getAllDocs(this.req, this.res, this.next) + this.DocManager.getAllNonDeletedDocs = sinon.stub().resolves(this.docs) + await this.HttpController.getAllDocs(this.req, this.res, this.next) }) it('should get all the (non-deleted) docs', function () { @@ -199,7 +195,7 @@ describe('HttpController', function () { }) describe('with null lines', function () { - beforeEach(function () { + beforeEach(async function () { this.req.params = { project_id: this.projectId } this.docs = [ { @@ -213,10 +209,8 @@ describe('HttpController', function () { rev: 4, }, ] - this.DocManager.getAllNonDeletedDocs = sinon - .stub() - .callsArgWith(2, null, this.docs) - this.HttpController.getAllDocs(this.req, this.res, this.next) + this.DocManager.getAllNonDeletedDocs = sinon.stub().resolves(this.docs) + await this.HttpController.getAllDocs(this.req, this.res, this.next) }) it('should return the doc with fallback lines', function () { @@ -238,7 +232,7 @@ describe('HttpController', function () { }) describe('with a null doc', function () { - beforeEach(function () { + beforeEach(async function () { this.req.params = { project_id: this.projectId } this.docs = [ { @@ -253,10 +247,8 @@ describe('HttpController', function () { rev: 4, }, ] - this.DocManager.getAllNonDeletedDocs = sinon - .stub() - .callsArgWith(2, null, this.docs) - this.HttpController.getAllDocs(this.req, this.res, this.next) + this.DocManager.getAllNonDeletedDocs = sinon.stub().resolves(this.docs) + await this.HttpController.getAllDocs(this.req, this.res, this.next) }) it('should return the non null docs as JSON', function () { @@ -292,7 +284,7 @@ describe('HttpController', function () { describe('getAllRanges', function () { describe('normally', function () { - beforeEach(function () { + beforeEach(async function () { this.req.params = { project_id: this.projectId } this.docs = [ { @@ -304,10 +296,8 @@ describe('HttpController', function () { ranges: { mock_ranges: 'two' }, }, ] - this.DocManager.getAllNonDeletedDocs = sinon - .stub() - .callsArgWith(2, null, this.docs) - this.HttpController.getAllRanges(this.req, this.res, this.next) + this.DocManager.getAllNonDeletedDocs = sinon.stub().resolves(this.docs) + await this.HttpController.getAllRanges(this.req, this.res, this.next) }) it('should get all the (non-deleted) doc ranges', function () { @@ -342,16 +332,17 @@ describe('HttpController', function () { }) describe('when the doc lines exist and were updated', function () { - beforeEach(function () { + beforeEach(async function () { this.req.body = { lines: (this.lines = ['hello', 'world']), version: (this.version = 42), ranges: (this.ranges = { changes: 'mock' }), } + this.rev = 5 this.DocManager.updateDoc = sinon .stub() - .yields(null, true, (this.rev = 5)) - this.HttpController.updateDoc(this.req, this.res, this.next) + .resolves({ modified: true, rev: this.rev }) + await this.HttpController.updateDoc(this.req, this.res, this.next) }) it('should update the document', function () { @@ -374,16 +365,17 @@ describe('HttpController', function () { }) describe('when the doc lines exist and were not updated', function () { - beforeEach(function () { + beforeEach(async function () { this.req.body = { lines: (this.lines = ['hello', 'world']), version: (this.version = 42), ranges: {}, } + this.rev = 5 this.DocManager.updateDoc = sinon .stub() - .yields(null, false, (this.rev = 5)) - this.HttpController.updateDoc(this.req, this.res, this.next) + .resolves({ modified: false, rev: this.rev }) + await this.HttpController.updateDoc(this.req, this.res, this.next) }) it('should return a modified status', function () { @@ -394,10 +386,12 @@ describe('HttpController', function () { }) describe('when the doc lines are not provided', function () { - beforeEach(function () { + beforeEach(async function () { this.req.body = { version: 42, ranges: {} } - this.DocManager.updateDoc = sinon.stub().yields(null, false) - this.HttpController.updateDoc(this.req, this.res, this.next) + this.DocManager.updateDoc = sinon + .stub() + .resolves({ modified: false, rev: 0 }) + await this.HttpController.updateDoc(this.req, this.res, this.next) }) it('should not update the document', function () { @@ -410,10 +404,12 @@ describe('HttpController', function () { }) describe('when the doc version are not provided', function () { - beforeEach(function () { + beforeEach(async function () { this.req.body = { version: 42, lines: ['hello world'] } - this.DocManager.updateDoc = sinon.stub().yields(null, false) - this.HttpController.updateDoc(this.req, this.res, this.next) + this.DocManager.updateDoc = sinon + .stub() + .resolves({ modified: false, rev: 0 }) + await this.HttpController.updateDoc(this.req, this.res, this.next) }) it('should not update the document', function () { @@ -426,10 +422,12 @@ describe('HttpController', function () { }) describe('when the doc ranges is not provided', function () { - beforeEach(function () { + beforeEach(async function () { this.req.body = { lines: ['foo'], version: 42 } - this.DocManager.updateDoc = sinon.stub().yields(null, false) - this.HttpController.updateDoc(this.req, this.res, this.next) + this.DocManager.updateDoc = sinon + .stub() + .resolves({ modified: false, rev: 0 }) + await this.HttpController.updateDoc(this.req, this.res, this.next) }) it('should not update the document', function () { @@ -442,13 +440,20 @@ describe('HttpController', function () { }) describe('when the doc body is too large', function () { - beforeEach(function () { + beforeEach(async function () { this.req.body = { lines: (this.lines = Array(2049).fill('a'.repeat(1024))), version: (this.version = 42), ranges: (this.ranges = { changes: 'mock' }), } - this.HttpController.updateDoc(this.req, this.res, this.next) + this.DocManager.updateDoc = sinon + .stub() + .resolves({ modified: false, rev: 0 }) + await this.HttpController.updateDoc(this.req, this.res, this.next) + }) + + it('should not update the document', function () { + this.DocManager.updateDoc.called.should.equal(false) }) it('should return a 413 (too large) response', function () { @@ -462,14 +467,14 @@ describe('HttpController', function () { }) describe('patchDoc', function () { - beforeEach(function () { + beforeEach(async function () { this.req.params = { project_id: this.projectId, doc_id: this.docId, } this.req.body = { name: 'foo.tex' } - this.DocManager.patchDoc = sinon.stub().yields(null) - this.HttpController.patchDoc(this.req, this.res, this.next) + this.DocManager.patchDoc = sinon.stub().resolves() + await this.HttpController.patchDoc(this.req, this.res, this.next) }) it('should delete the document', function () { @@ -484,11 +489,11 @@ describe('HttpController', function () { }) describe('with an invalid payload', function () { - beforeEach(function () { + beforeEach(async function () { this.req.body = { cannot: 'happen' } - this.DocManager.patchDoc = sinon.stub().yields(null) - this.HttpController.patchDoc(this.req, this.res, this.next) + this.DocManager.patchDoc = sinon.stub().resolves() + await this.HttpController.patchDoc(this.req, this.res, this.next) }) it('should log a message', function () { @@ -509,10 +514,10 @@ describe('HttpController', function () { }) describe('archiveAllDocs', function () { - beforeEach(function () { + beforeEach(async function () { this.req.params = { project_id: this.projectId } - this.DocArchiveManager.archiveAllDocs = sinon.stub().callsArg(1) - this.HttpController.archiveAllDocs(this.req, this.res, this.next) + this.DocArchiveManager.archiveAllDocs = sinon.stub().resolves() + await this.HttpController.archiveAllDocs(this.req, this.res, this.next) }) it('should archive the project', function () { @@ -532,9 +537,12 @@ describe('HttpController', function () { }) describe('on success', function () { - beforeEach(function (done) { - this.res.sendStatus.callsFake(() => done()) - this.HttpController.unArchiveAllDocs(this.req, this.res, this.next) + beforeEach(async function () { + await this.HttpController.unArchiveAllDocs( + this.req, + this.res, + this.next + ) }) it('returns a 200', function () { @@ -543,12 +551,15 @@ describe('HttpController', function () { }) describe("when the archived rev doesn't match", function () { - beforeEach(function (done) { - this.res.sendStatus.callsFake(() => done()) - this.DocArchiveManager.unArchiveAllDocs.yields( + beforeEach(async function () { + this.DocArchiveManager.unArchiveAllDocs.rejects( new Errors.DocRevValueError('bad rev') ) - this.HttpController.unArchiveAllDocs(this.req, this.res, this.next) + await this.HttpController.unArchiveAllDocs( + this.req, + this.res, + this.next + ) }) it('returns a 409', function () { @@ -558,10 +569,10 @@ describe('HttpController', function () { }) describe('destroyProject', function () { - beforeEach(function () { + beforeEach(async function () { this.req.params = { project_id: this.projectId } - this.DocArchiveManager.destroyProject = sinon.stub().callsArg(1) - this.HttpController.destroyProject(this.req, this.res, this.next) + this.DocArchiveManager.destroyProject = sinon.stub().resolves() + await this.HttpController.destroyProject(this.req, this.res, this.next) }) it('should destroy the docs', function () { diff --git a/services/docstore/test/unit/js/MongoManagerTests.js b/services/docstore/test/unit/js/MongoManagerTests.js index 4f8467db76..b96b661df4 100644 --- a/services/docstore/test/unit/js/MongoManagerTests.js +++ b/services/docstore/test/unit/js/MongoManagerTests.js @@ -41,7 +41,7 @@ describe('MongoManager', function () { this.doc = { name: 'mock-doc' } this.db.docs.findOne = sinon.stub().resolves(this.doc) this.filter = { lines: true } - this.result = await this.MongoManager.promises.findDoc( + this.result = await this.MongoManager.findDoc( this.projectId, this.docId, this.filter @@ -70,11 +70,7 @@ describe('MongoManager', function () { describe('patchDoc', function () { beforeEach(async function () { this.meta = { name: 'foo.tex' } - await this.MongoManager.promises.patchDoc( - this.projectId, - this.docId, - this.meta - ) + await this.MongoManager.patchDoc(this.projectId, this.docId, this.meta) }) it('should pass the parameter along', function () { @@ -104,7 +100,7 @@ describe('MongoManager', function () { describe('with included_deleted = false', function () { beforeEach(async function () { - this.result = await this.MongoManager.promises.getProjectsDocs( + this.result = await this.MongoManager.getProjectsDocs( this.projectId, { include_deleted: false }, this.filter @@ -132,7 +128,7 @@ describe('MongoManager', function () { describe('with included_deleted = true', function () { beforeEach(async function () { - this.result = await this.MongoManager.promises.getProjectsDocs( + this.result = await this.MongoManager.getProjectsDocs( this.projectId, { include_deleted: true }, this.filter @@ -167,7 +163,7 @@ describe('MongoManager', function () { this.db.docs.find = sinon.stub().returns({ toArray: sinon.stub().resolves([this.doc1, this.doc2, this.doc3]), }) - this.result = await this.MongoManager.promises.getProjectsDeletedDocs( + this.result = await this.MongoManager.getProjectsDeletedDocs( this.projectId, this.filter ) @@ -203,7 +199,7 @@ describe('MongoManager', function () { }) it('should upsert the document', async function () { - await this.MongoManager.promises.upsertIntoDocCollection( + await this.MongoManager.upsertIntoDocCollection( this.projectId, this.docId, this.oldRev, @@ -223,7 +219,7 @@ describe('MongoManager', function () { it('should handle update error', async function () { this.db.docs.updateOne.rejects(this.stubbedErr) await expect( - this.MongoManager.promises.upsertIntoDocCollection( + this.MongoManager.upsertIntoDocCollection( this.projectId, this.docId, this.rev, @@ -235,7 +231,7 @@ describe('MongoManager', function () { }) it('should insert without a previous rev', async function () { - await this.MongoManager.promises.upsertIntoDocCollection( + await this.MongoManager.upsertIntoDocCollection( this.projectId, this.docId, null, @@ -254,7 +250,7 @@ describe('MongoManager', function () { it('should handle generic insert error', async function () { this.db.docs.insertOne.rejects(this.stubbedErr) await expect( - this.MongoManager.promises.upsertIntoDocCollection( + this.MongoManager.upsertIntoDocCollection( this.projectId, this.docId, null, @@ -266,7 +262,7 @@ describe('MongoManager', function () { it('should handle duplicate insert error', async function () { this.db.docs.insertOne.rejects({ code: 11000 }) await expect( - this.MongoManager.promises.upsertIntoDocCollection( + this.MongoManager.upsertIntoDocCollection( this.projectId, this.docId, null, @@ -280,7 +276,7 @@ describe('MongoManager', function () { beforeEach(async function () { this.projectId = new ObjectId() this.db.docs.deleteMany = sinon.stub().resolves() - await this.MongoManager.promises.destroyProject(this.projectId) + await this.MongoManager.destroyProject(this.projectId) }) it('should destroy all docs', function () { @@ -297,13 +293,13 @@ describe('MongoManager', function () { it('should not error when the rev has not changed', async function () { this.db.docs.findOne = sinon.stub().resolves({ rev: 1 }) - await this.MongoManager.promises.checkRevUnchanged(this.doc) + await this.MongoManager.checkRevUnchanged(this.doc) }) it('should return an error when the rev has changed', async function () { this.db.docs.findOne = sinon.stub().resolves({ rev: 2 }) await expect( - this.MongoManager.promises.checkRevUnchanged(this.doc) + this.MongoManager.checkRevUnchanged(this.doc) ).to.be.rejectedWith(Errors.DocModifiedError) }) @@ -311,14 +307,14 @@ describe('MongoManager', function () { this.db.docs.findOne = sinon.stub().resolves({ rev: 2 }) this.doc = { _id: new ObjectId(), name: 'mock-doc', rev: NaN } await expect( - this.MongoManager.promises.checkRevUnchanged(this.doc) + this.MongoManager.checkRevUnchanged(this.doc) ).to.be.rejectedWith(Errors.DocRevValueError) }) it('should return a value error if checked doc rev is NaN', async function () { this.db.docs.findOne = sinon.stub().resolves({ rev: NaN }) await expect( - this.MongoManager.promises.checkRevUnchanged(this.doc) + this.MongoManager.checkRevUnchanged(this.doc) ).to.be.rejectedWith(Errors.DocRevValueError) }) }) @@ -334,7 +330,7 @@ describe('MongoManager', function () { describe('complete doc', function () { beforeEach(async function () { - await this.MongoManager.promises.restoreArchivedDoc( + await this.MongoManager.restoreArchivedDoc( this.projectId, this.docId, this.archivedDoc @@ -364,7 +360,7 @@ describe('MongoManager', function () { describe('without ranges', function () { beforeEach(async function () { delete this.archivedDoc.ranges - await this.MongoManager.promises.restoreArchivedDoc( + await this.MongoManager.restoreArchivedDoc( this.projectId, this.docId, this.archivedDoc @@ -395,7 +391,7 @@ describe('MongoManager', function () { it('throws a DocRevValueError', async function () { this.db.docs.updateOne.resolves({ matchedCount: 0 }) await expect( - this.MongoManager.promises.restoreArchivedDoc( + this.MongoManager.restoreArchivedDoc( this.projectId, this.docId, this.archivedDoc diff --git a/services/docstore/test/unit/js/RangeManagerTests.js b/services/docstore/test/unit/js/RangeManagerTests.js index 7a2de7352e..ba99280a7a 100644 --- a/services/docstore/test/unit/js/RangeManagerTests.js +++ b/services/docstore/test/unit/js/RangeManagerTests.js @@ -30,7 +30,7 @@ describe('RangeManager', function () { }) describe('jsonRangesToMongo', function () { - it('should convert ObjectIds and dates to proper objects', function () { + it('should convert ObjectIds and dates to proper objects and fix comment id', function () { const changeId = new ObjectId().toString() const commentId = new ObjectId().toString() const userId = new ObjectId().toString() @@ -66,7 +66,7 @@ describe('RangeManager', function () { ], comments: [ { - id: new ObjectId(commentId), + id: new ObjectId(threadId), op: { c: 'foo', p: 3, t: new ObjectId(threadId) }, }, ], @@ -110,7 +110,6 @@ describe('RangeManager', function () { return it('should be consistent when transformed through json -> mongo -> json', function () { const changeId = new ObjectId().toString() - const commentId = new ObjectId().toString() const userId = new ObjectId().toString() const threadId = new ObjectId().toString() const ts = new Date().toJSON() @@ -127,7 +126,7 @@ describe('RangeManager', function () { ], comments: [ { - id: commentId, + id: threadId, op: { c: 'foo', p: 3, t: threadId }, }, ], @@ -142,6 +141,7 @@ describe('RangeManager', function () { return describe('shouldUpdateRanges', function () { beforeEach(function () { + const threadId = new ObjectId() this.ranges = { changes: [ { @@ -155,8 +155,8 @@ describe('RangeManager', function () { ], comments: [ { - id: new ObjectId(), - op: { c: 'foo', p: 3, t: new ObjectId() }, + id: threadId, + op: { c: 'foo', p: 3, t: threadId }, }, ], } diff --git a/services/document-updater/app/js/DiffCodec.js b/services/document-updater/app/js/DiffCodec.js index 8c574cff70..17da409386 100644 --- a/services/document-updater/app/js/DiffCodec.js +++ b/services/document-updater/app/js/DiffCodec.js @@ -1,3 +1,4 @@ +const OError = require('@overleaf/o-error') const DMP = require('diff-match-patch') const { TextOperation } = require('overleaf-editor-core') const dmp = new DMP() @@ -38,23 +39,62 @@ module.exports = { return ops }, - diffAsHistoryV1EditOperation(before, after) { - const diffs = dmp.diff_main(before, after) + /** + * @param {import("overleaf-editor-core").StringFileData} file + * @param {string} after + * @return {TextOperation} + */ + diffAsHistoryOTEditOperation(file, after) { + const beforeWithoutTrackedDeletes = file.getContent({ + filterTrackedDeletes: true, + }) + const diffs = dmp.diff_main(beforeWithoutTrackedDeletes, after) dmp.diff_cleanupSemantic(diffs) + const trackedChanges = file.trackedChanges.asSorted() + let nextTc = trackedChanges.shift() + const op = new TextOperation() for (const diff of diffs) { - const [type, content] = diff + let [type, content] = diff if (type === this.ADDED) { op.insert(content) - } else if (type === this.REMOVED) { - op.remove(content.length) - } else if (type === this.UNCHANGED) { - op.retain(content.length) + } else if (type === this.REMOVED || type === this.UNCHANGED) { + while (op.baseLength + content.length > nextTc?.range.start) { + if (nextTc.tracking.type === 'delete') { + const untilRange = nextTc.range.start - op.baseLength + if (type === this.REMOVED) { + op.remove(untilRange) + } else if (type === this.UNCHANGED) { + op.retain(untilRange) + } + op.retain(nextTc.range.end - nextTc.range.start) + content = content.slice(untilRange) + } + nextTc = trackedChanges.shift() + } + if (type === this.REMOVED) { + op.remove(content.length) + } else if (type === this.UNCHANGED) { + op.retain(content.length) + } } else { throw new Error('Unknown type') } } + while (nextTc) { + if ( + nextTc.tracking.type !== 'delete' || + nextTc.range.start !== op.baseLength + ) { + throw new OError( + 'StringFileData.trackedChanges out of sync: unexpected range after end of diff', + { nextTc, baseLength: op.baseLength } + ) + } + op.retain(nextTc.range.end - nextTc.range.start) + nextTc = trackedChanges.shift() + } return op }, } diff --git a/services/document-updater/app/js/DocumentManager.js b/services/document-updater/app/js/DocumentManager.js index 4803056423..3fb3d10a6e 100644 --- a/services/document-updater/app/js/DocumentManager.js +++ b/services/document-updater/app/js/DocumentManager.js @@ -194,9 +194,8 @@ const DocumentManager = { let op if (type === 'history-ot') { const file = StringFileData.fromRaw(oldLines) - const operation = DiffCodec.diffAsHistoryV1EditOperation( - // TODO(24596): tc support for history-ot - file.getContent({ filterTrackedDeletes: true }), + const operation = DiffCodec.diffAsHistoryOTEditOperation( + file, newLines.join('\n') ) if (operation.isNoop()) { @@ -536,11 +535,6 @@ const DocumentManager = { if (opts.historyRangesMigration) { historyRangesSupport = opts.historyRangesMigration === 'forwards' } - if (!Array.isArray(lines)) { - const file = StringFileData.fromRaw(lines) - // TODO(24596): tc support for history-ot - lines = file.getLines() - } await ProjectHistoryRedisManager.promises.queueResyncDocContent( projectId, diff --git a/services/document-updater/app/js/HistoryManager.js b/services/document-updater/app/js/HistoryManager.js index 3963431925..d9a8459525 100644 --- a/services/document-updater/app/js/HistoryManager.js +++ b/services/document-updater/app/js/HistoryManager.js @@ -62,6 +62,7 @@ const HistoryManager = { // record updates for project history if ( HistoryManager.shouldFlushHistoryOps( + projectId, projectOpsLength, ops.length, HistoryManager.FLUSH_PROJECT_EVERY_N_OPS @@ -77,7 +78,8 @@ const HistoryManager = { } }, - shouldFlushHistoryOps(length, opsLength, threshold) { + shouldFlushHistoryOps(projectId, length, opsLength, threshold) { + if (Settings.shortHistoryQueues.includes(projectId)) return true if (!length) { return false } // don't flush unless we know the length diff --git a/services/document-updater/app/js/Limits.js b/services/document-updater/app/js/Limits.js index 268ccd3f9b..cbd9293042 100644 --- a/services/document-updater/app/js/Limits.js +++ b/services/document-updater/app/js/Limits.js @@ -28,4 +28,19 @@ module.exports = { // since we didn't hit the limit in the loop, the document is within the allowed length return false }, + + /** + * @param {StringFileRawData} raw + * @param {number} maxDocLength + */ + stringFileDataContentIsTooLarge(raw, maxDocLength) { + let n = raw.content.length + if (n <= maxDocLength) return false // definitely under the limit, no need to calculate the total size + for (const tc of raw.trackedChanges ?? []) { + if (tc.tracking.type !== 'delete') continue + n -= tc.range.length + if (n <= maxDocLength) return false // under the limit now, no need to calculate the exact size + } + return true + }, } diff --git a/services/document-updater/app/js/ProjectHistoryRedisManager.js b/services/document-updater/app/js/ProjectHistoryRedisManager.js index 9a9985d99a..78e9c2ea4c 100644 --- a/services/document-updater/app/js/ProjectHistoryRedisManager.js +++ b/services/document-updater/app/js/ProjectHistoryRedisManager.js @@ -8,13 +8,14 @@ const rclient = require('@overleaf/redis-wrapper').createClient( ) const logger = require('@overleaf/logger') const metrics = require('./Metrics') -const { docIsTooLarge } = require('./Limits') +const { docIsTooLarge, stringFileDataContentIsTooLarge } = require('./Limits') const { addTrackedDeletesToContent, extractOriginOrSource } = require('./Utils') const HistoryConversions = require('./HistoryConversions') const OError = require('@overleaf/o-error') /** * @import { Ranges } from './types' + * @import { StringFileRawData } from 'overleaf-editor-core/lib/types' */ const ProjectHistoryRedisManager = { @@ -180,7 +181,7 @@ const ProjectHistoryRedisManager = { * @param {string} projectId * @param {string} projectHistoryId * @param {string} docId - * @param {string[]} lines + * @param {string[] | StringFileRawData} lines * @param {Ranges} ranges * @param {string[]} resolvedCommentIds * @param {number} version @@ -204,13 +205,8 @@ const ProjectHistoryRedisManager = { 'queue doc content resync' ) - let content = lines.join('\n') - if (historyRangesSupport) { - content = addTrackedDeletesToContent(content, ranges.changes ?? []) - } - const projectUpdate = { - resyncDocContent: { content, version }, + resyncDocContent: { version }, projectHistoryId, path: pathname, doc: docId, @@ -219,17 +215,38 @@ const ProjectHistoryRedisManager = { }, } - if (historyRangesSupport) { - projectUpdate.resyncDocContent.ranges = - HistoryConversions.toHistoryRanges(ranges) - projectUpdate.resyncDocContent.resolvedCommentIds = resolvedCommentIds + let content = '' + if (Array.isArray(lines)) { + content = lines.join('\n') + if (historyRangesSupport) { + content = addTrackedDeletesToContent(content, ranges.changes ?? []) + projectUpdate.resyncDocContent.ranges = + HistoryConversions.toHistoryRanges(ranges) + projectUpdate.resyncDocContent.resolvedCommentIds = resolvedCommentIds + } + } else { + content = lines.content + projectUpdate.resyncDocContent.historyOTRanges = { + comments: lines.comments, + trackedChanges: lines.trackedChanges, + } } + projectUpdate.resyncDocContent.content = content const jsonUpdate = JSON.stringify(projectUpdate) // Do an optimised size check on the docLines using the serialised // project update length as an upper bound const sizeBound = jsonUpdate.length - if (docIsTooLarge(sizeBound, lines, Settings.max_doc_length)) { + if (Array.isArray(lines)) { + if (docIsTooLarge(sizeBound, lines, Settings.max_doc_length)) { + throw new OError( + 'blocking resync doc content insert into project history queue: doc is too large', + { projectId, docId, docSize: sizeBound } + ) + } + } else if ( + stringFileDataContentIsTooLarge(lines, Settings.max_doc_length) + ) { throw new OError( 'blocking resync doc content insert into project history queue: doc is too large', { projectId, docId, docSize: sizeBound } diff --git a/services/document-updater/app/js/ProjectManager.js b/services/document-updater/app/js/ProjectManager.js index 781ed0e168..cdd4c11482 100644 --- a/services/document-updater/app/js/ProjectManager.js +++ b/services/document-updater/app/js/ProjectManager.js @@ -317,6 +317,7 @@ function updateProjectWithLocks( } if ( HistoryManager.shouldFlushHistoryOps( + projectId, projectOpsLength, updates.length, HistoryManager.FLUSH_PROJECT_EVERY_N_OPS diff --git a/services/document-updater/config/settings.defaults.js b/services/document-updater/config/settings.defaults.js index 0cd29d325b..9ed59de6c4 100755 --- a/services/document-updater/config/settings.defaults.js +++ b/services/document-updater/config/settings.defaults.js @@ -184,4 +184,8 @@ module.exports = { smoothingOffset: process.env.SMOOTHING_OFFSET || 1000, // milliseconds gracefulShutdownDelayInMs: parseInt(process.env.GRACEFUL_SHUTDOWN_DELAY_SECONDS ?? '10', 10) * 1000, + + shortHistoryQueues: (process.env.SHORT_HISTORY_QUEUES || '') + .split(',') + .filter(s => !!s), } diff --git a/services/document-updater/docker-compose.ci.yml b/services/document-updater/docker-compose.ci.yml index 2fe97bd9b3..ca15f35fef 100644 --- a/services/document-updater/docker-compose.ci.yml +++ b/services/document-updater/docker-compose.ci.yml @@ -28,12 +28,15 @@ services: MOCHA_GREP: ${MOCHA_GREP} NODE_ENV: test NODE_OPTIONS: "--unhandled-rejections=strict" + volumes: + - ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it depends_on: mongo: condition: service_started redis: condition: service_healthy user: node + entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 -- command: npm run test:acceptance @@ -45,7 +48,7 @@ services: command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs . user: root redis: - image: redis + image: redis:7.4.3 healthcheck: test: ping="$$(redis-cli ping)" && [ "$$ping" = 'PONG' ] interval: 1s diff --git a/services/document-updater/docker-compose.yml b/services/document-updater/docker-compose.yml index 8a94d1a24c..3688d21d0b 100644 --- a/services/document-updater/docker-compose.yml +++ b/services/document-updater/docker-compose.yml @@ -26,6 +26,7 @@ services: - .:/overleaf/services/document-updater - ../../node_modules:/overleaf/node_modules - ../../libraries:/overleaf/libraries + - ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it working_dir: /overleaf/services/document-updater environment: ELASTIC_SEARCH_DSN: es:9200 @@ -45,10 +46,11 @@ services: condition: service_started redis: condition: service_healthy + entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 -- command: npm run --silent test:acceptance redis: - image: redis + image: redis:7.4.3 healthcheck: test: ping=$$(redis-cli ping) && [ "$$ping" = 'PONG' ] interval: 1s diff --git a/services/document-updater/scripts/check_redis_mongo_sync_state.js b/services/document-updater/scripts/check_redis_mongo_sync_state.js index 08209400aa..51db47af4d 100644 --- a/services/document-updater/scripts/check_redis_mongo_sync_state.js +++ b/services/document-updater/scripts/check_redis_mongo_sync_state.js @@ -15,6 +15,7 @@ const request = require('requestretry').defaults({ retryDelay: 10, }) +const ONLY_PROJECT_ID = process.env.ONLY_PROJECT_ID const AUTO_FIX_VERSION_MISMATCH = process.env.AUTO_FIX_VERSION_MISMATCH === 'true' const AUTO_FIX_PARTIALLY_DELETED_DOC_METADATA = @@ -319,10 +320,12 @@ async function processProject(projectId) { * @return {Promise<{perIterationOutOfSync: number, done: boolean}>} */ async function scanOnce(processed, outOfSync) { - const projectIds = await ProjectFlusher.promises.flushAllProjects({ - limit: LIMIT, - dryRun: true, - }) + const projectIds = ONLY_PROJECT_ID + ? [ONLY_PROJECT_ID] + : await ProjectFlusher.promises.flushAllProjects({ + limit: LIMIT, + dryRun: true, + }) let perIterationOutOfSync = 0 for (const projectId of projectIds) { diff --git a/services/document-updater/test/acceptance/js/SettingADocumentTests.js b/services/document-updater/test/acceptance/js/SettingADocumentTests.js index fd1851a221..e1bc54dc90 100644 --- a/services/document-updater/test/acceptance/js/SettingADocumentTests.js +++ b/services/document-updater/test/acceptance/js/SettingADocumentTests.js @@ -686,4 +686,285 @@ describe('Setting a document', function () { }) }) }) + + describe('with track changes (history-ot)', function () { + const lines = ['one', 'one and a half', 'two', 'three'] + const userId = DocUpdaterClient.randomId() + const ts = new Date().toISOString() + beforeEach(function (done) { + numberOfReceivedUpdates = 0 + this.newLines = ['one', 'two', 'three'] + this.project_id = DocUpdaterClient.randomId() + this.doc_id = DocUpdaterClient.randomId() + this.historyOTUpdate = { + doc: this.doc_id, + op: [ + { + textOperation: [ + 4, + { + r: 'one and a half\n'.length, + tracking: { + type: 'delete', + userId, + ts, + }, + }, + 9, + ], + }, + ], + v: this.version, + meta: { source: 'random-publicId' }, + } + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines, + version: this.version, + otMigrationStage: 1, + }) + DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, error => { + if (error) { + throw error + } + DocUpdaterClient.sendUpdate( + this.project_id, + this.doc_id, + this.historyOTUpdate, + error => { + if (error) { + throw error + } + DocUpdaterClient.waitForPendingUpdates( + this.project_id, + this.doc_id, + done + ) + } + ) + }) + }) + + afterEach(function () { + MockProjectHistoryApi.flushProject.resetHistory() + MockWebApi.setDocument.resetHistory() + }) + it('should record tracked changes', function (done) { + docUpdaterRedis.get( + Keys.docLines({ doc_id: this.doc_id }), + (error, data) => { + if (error) { + throw error + } + expect(JSON.parse(data)).to.deep.equal({ + content: lines.join('\n'), + trackedChanges: [ + { + range: { + pos: 4, + length: 15, + }, + tracking: { + ts, + type: 'delete', + userId, + }, + }, + ], + }) + done() + } + ) + }) + + it('should apply the change', function (done) { + DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, data) => { + if (error) { + throw error + } + expect(data.lines).to.deep.equal(this.newLines) + done() + } + ) + }) + const cases = [ + { + name: 'when resetting the content', + lines, + want: { + content: 'one\none and a half\none and a half\ntwo\nthree', + trackedChanges: [ + { + range: { + pos: 'one and a half\n'.length + 4, + length: 15, + }, + tracking: { + ts, + type: 'delete', + userId, + }, + }, + ], + }, + }, + { + name: 'when adding content before a tracked delete', + lines: ['one', 'INSERT', 'two', 'three'], + want: { + content: 'one\nINSERT\none and a half\ntwo\nthree', + trackedChanges: [ + { + range: { + pos: 'INSERT\n'.length + 4, + length: 15, + }, + tracking: { + ts, + type: 'delete', + userId, + }, + }, + ], + }, + }, + { + name: 'when adding content after a tracked delete', + lines: ['one', 'two', 'INSERT', 'three'], + want: { + content: 'one\none and a half\ntwo\nINSERT\nthree', + trackedChanges: [ + { + range: { + pos: 4, + length: 15, + }, + tracking: { + ts, + type: 'delete', + userId, + }, + }, + ], + }, + }, + { + name: 'when deleting content before a tracked delete', + lines: ['two', 'three'], + want: { + content: 'one and a half\ntwo\nthree', + trackedChanges: [ + { + range: { + pos: 0, + length: 15, + }, + tracking: { + ts, + type: 'delete', + userId, + }, + }, + ], + }, + }, + { + name: 'when deleting content after a tracked delete', + lines: ['one', 'two'], + want: { + content: 'one\none and a half\ntwo', + trackedChanges: [ + { + range: { + pos: 4, + length: 15, + }, + tracking: { + ts, + type: 'delete', + userId, + }, + }, + ], + }, + }, + { + name: 'when deleting content immediately after a tracked delete', + lines: ['one', 'three'], + want: { + content: 'one\none and a half\nthree', + trackedChanges: [ + { + range: { + pos: 4, + length: 15, + }, + tracking: { + ts, + type: 'delete', + userId, + }, + }, + ], + }, + }, + { + name: 'when deleting content across a tracked delete', + lines: ['onethree'], + want: { + content: 'oneone and a half\nthree', + trackedChanges: [ + { + range: { + pos: 3, + length: 15, + }, + tracking: { + ts, + type: 'delete', + userId, + }, + }, + ], + }, + }, + ] + + for (const { name, lines, want } of cases) { + describe(name, function () { + beforeEach(function (done) { + DocUpdaterClient.setDocLines( + this.project_id, + this.doc_id, + lines, + this.source, + userId, + false, + (error, res, body) => { + if (error) { + return done(error) + } + this.statusCode = res.statusCode + this.body = body + done() + } + ) + }) + it('should update accordingly', function (done) { + docUpdaterRedis.get( + Keys.docLines({ doc_id: this.doc_id }), + (error, data) => { + if (error) { + throw error + } + expect(JSON.parse(data)).to.deep.equal(want) + done() + } + ) + }) + }) + } + }) }) diff --git a/services/document-updater/test/unit/js/HistoryManager/HistoryManagerTests.js b/services/document-updater/test/unit/js/HistoryManager/HistoryManagerTests.js index 2fd019d4c2..2a5fb29b6d 100644 --- a/services/document-updater/test/unit/js/HistoryManager/HistoryManagerTests.js +++ b/services/document-updater/test/unit/js/HistoryManager/HistoryManagerTests.js @@ -14,6 +14,7 @@ describe('HistoryManager', function () { requires: { request: (this.request = {}), '@overleaf/settings': (this.Settings = { + shortHistoryQueues: [], apis: { project_history: { url: 'http://project_history.example.com', @@ -118,7 +119,7 @@ describe('HistoryManager', function () { beforeEach(function () { this.HistoryManager.shouldFlushHistoryOps = sinon.stub() this.HistoryManager.shouldFlushHistoryOps - .withArgs(this.project_ops_length) + .withArgs(this.project_id, this.project_ops_length) .returns(true) this.HistoryManager.recordAndFlushHistoryOps( @@ -139,7 +140,7 @@ describe('HistoryManager', function () { beforeEach(function () { this.HistoryManager.shouldFlushHistoryOps = sinon.stub() this.HistoryManager.shouldFlushHistoryOps - .withArgs(this.project_ops_length) + .withArgs(this.project_id, this.project_ops_length) .returns(false) this.HistoryManager.recordAndFlushHistoryOps( @@ -157,6 +158,7 @@ describe('HistoryManager', function () { describe('shouldFlushHistoryOps', function () { it('should return false if the number of ops is not known', function () { this.HistoryManager.shouldFlushHistoryOps( + this.project_id, null, ['a', 'b', 'c'].length, 1 @@ -168,6 +170,7 @@ describe('HistoryManager', function () { // Previously we were on 11 ops // We didn't pass over a multiple of 5 this.HistoryManager.shouldFlushHistoryOps( + this.project_id, 14, ['a', 'b', 'c'].length, 5 @@ -178,6 +181,7 @@ describe('HistoryManager', function () { // Previously we were on 12 ops // We've reached a new multiple of 5 this.HistoryManager.shouldFlushHistoryOps( + this.project_id, 15, ['a', 'b', 'c'].length, 5 @@ -189,11 +193,22 @@ describe('HistoryManager', function () { // Previously we were on 16 ops // We didn't pass over a multiple of 5 this.HistoryManager.shouldFlushHistoryOps( + this.project_id, 17, ['a', 'b', 'c'].length, 5 ).should.equal(true) }) + + it('should return true if the project has a short queue', function () { + this.Settings.shortHistoryQueues = [this.project_id] + this.HistoryManager.shouldFlushHistoryOps( + this.project_id, + 14, + ['a', 'b', 'c'].length, + 5 + ).should.equal(true) + }) }) }) diff --git a/services/document-updater/test/unit/js/Limits/LimitsTests.js b/services/document-updater/test/unit/js/Limits/LimitsTests.js index 34a5c13c26..11ca38746a 100644 --- a/services/document-updater/test/unit/js/Limits/LimitsTests.js +++ b/services/document-updater/test/unit/js/Limits/LimitsTests.js @@ -81,4 +81,88 @@ describe('Limits', function () { }) }) }) + + describe('stringFileDataContentIsTooLarge', function () { + it('should handle small docs', function () { + expect( + this.Limits.stringFileDataContentIsTooLarge({ content: '' }, 123) + ).to.equal(false) + }) + it('should handle docs at the limit', function () { + expect( + this.Limits.stringFileDataContentIsTooLarge( + { content: 'x'.repeat(123) }, + 123 + ) + ).to.equal(false) + }) + it('should handle docs above the limit', function () { + expect( + this.Limits.stringFileDataContentIsTooLarge( + { content: 'x'.repeat(123 + 1) }, + 123 + ) + ).to.equal(true) + }) + it('should handle docs above the limit and below with tracked-deletes removed', function () { + expect( + this.Limits.stringFileDataContentIsTooLarge( + { + content: 'x'.repeat(123 + 1), + trackedChanges: [ + { + range: { pos: 1, length: 1 }, + tracking: { + type: 'delete', + ts: '2025-06-16T14:31:44.910Z', + userId: 'user-id', + }, + }, + ], + }, + 123 + ) + ).to.equal(false) + }) + it('should handle docs above the limit and above with tracked-deletes removed', function () { + expect( + this.Limits.stringFileDataContentIsTooLarge( + { + content: 'x'.repeat(123 + 2), + trackedChanges: [ + { + range: { pos: 1, length: 1 }, + tracking: { + type: 'delete', + ts: '2025-06-16T14:31:44.910Z', + userId: 'user-id', + }, + }, + ], + }, + 123 + ) + ).to.equal(true) + }) + it('should handle docs above the limit and with tracked-inserts', function () { + expect( + this.Limits.stringFileDataContentIsTooLarge( + { + content: 'x'.repeat(123 + 1), + trackedChanges: [ + { + range: { pos: 1, length: 1 }, + tracking: { + type: 'insert', + ts: '2025-06-16T14:31:44.910Z', + userId: 'user-id', + }, + }, + ], + }, + 123 + ) + ).to.equal(true) + }) + }) }) diff --git a/services/document-updater/test/unit/js/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js b/services/document-updater/test/unit/js/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js index 760385b176..ad6c121dfb 100644 --- a/services/document-updater/test/unit/js/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js +++ b/services/document-updater/test/unit/js/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js @@ -15,6 +15,7 @@ describe('ProjectHistoryRedisManager', function () { this.Limits = { docIsTooLarge: sinon.stub().returns(false), + stringFileDataContentIsTooLarge: sinon.stub().returns(false), } this.ProjectHistoryRedisManager = SandboxedModule.require(modulePath, { @@ -61,22 +62,18 @@ describe('ProjectHistoryRedisManager', function () { }) it('should queue an update', function () { - this.multi.rpush - .calledWithExactly( - `ProjectHistory:Ops:${this.project_id}`, - this.ops[0], - this.ops[1] - ) - .should.equal(true) + this.multi.rpush.should.have.been.calledWithExactly( + `ProjectHistory:Ops:${this.project_id}`, + this.ops[0], + this.ops[1] + ) }) it('should set the queue timestamp if not present', function () { - this.multi.setnx - .calledWithExactly( - `ProjectHistory:FirstOpTimestamp:${this.project_id}`, - Date.now() - ) - .should.equal(true) + this.multi.setnx.should.have.been.calledWithExactly( + `ProjectHistory:FirstOpTimestamp:${this.project_id}`, + Date.now() + ) }) }) @@ -118,9 +115,10 @@ describe('ProjectHistoryRedisManager', function () { file: this.file_id, } - this.ProjectHistoryRedisManager.promises.queueOps - .calledWithExactly(this.project_id, JSON.stringify(update)) - .should.equal(true) + this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly( + this.project_id, + JSON.stringify(update) + ) }) }) @@ -166,9 +164,10 @@ describe('ProjectHistoryRedisManager', function () { doc: this.doc_id, } - this.ProjectHistoryRedisManager.promises.queueOps - .calledWithExactly(this.project_id, JSON.stringify(update)) - .should.equal(true) + this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly( + this.project_id, + JSON.stringify(update) + ) }) it('should queue an update with file metadata', async function () { @@ -350,9 +349,10 @@ describe('ProjectHistoryRedisManager', function () { doc: this.doc_id, } - this.ProjectHistoryRedisManager.promises.queueOps - .calledWithExactly(this.project_id, JSON.stringify(update)) - .should.equal(true) + this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly( + this.project_id, + JSON.stringify(update) + ) }) it('should not forward ranges if history ranges support is undefined', async function () { @@ -402,9 +402,10 @@ describe('ProjectHistoryRedisManager', function () { doc: this.doc_id, } - this.ProjectHistoryRedisManager.promises.queueOps - .calledWithExactly(this.project_id, JSON.stringify(update)) - .should.equal(true) + this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly( + this.project_id, + JSON.stringify(update) + ) }) it('should pass "false" as the createdBlob field if not provided', async function () { @@ -432,9 +433,10 @@ describe('ProjectHistoryRedisManager', function () { doc: this.doc_id, } - this.ProjectHistoryRedisManager.promises.queueOps - .calledWithExactly(this.project_id, JSON.stringify(update)) - .should.equal(true) + this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly( + this.project_id, + JSON.stringify(update) + ) }) it('should pass through the value of the createdBlob field', async function () { @@ -463,9 +465,10 @@ describe('ProjectHistoryRedisManager', function () { doc: this.doc_id, } - this.ProjectHistoryRedisManager.promises.queueOps - .calledWithExactly(this.project_id, JSON.stringify(update)) - .should.equal(true) + this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly( + this.project_id, + JSON.stringify(update) + ) }) }) @@ -493,8 +496,8 @@ describe('ProjectHistoryRedisManager', function () { beforeEach(async function () { this.update = { resyncDocContent: { - content: 'one\ntwo', version: this.version, + content: 'one\ntwo', }, projectHistoryId: this.projectHistoryId, path: this.pathname, @@ -516,19 +519,18 @@ describe('ProjectHistoryRedisManager', function () { }) it('should check if the doc is too large', function () { - this.Limits.docIsTooLarge - .calledWith( - JSON.stringify(this.update).length, - this.lines, - this.settings.max_doc_length - ) - .should.equal(true) + this.Limits.docIsTooLarge.should.have.been.calledWith( + JSON.stringify(this.update).length, + this.lines, + this.settings.max_doc_length + ) }) it('should queue an update', function () { - this.ProjectHistoryRedisManager.promises.queueOps - .calledWithExactly(this.project_id, JSON.stringify(this.update)) - .should.equal(true) + this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly( + this.project_id, + JSON.stringify(this.update) + ) }) }) @@ -551,9 +553,8 @@ describe('ProjectHistoryRedisManager', function () { }) it('should not queue an update if the doc is too large', function () { - this.ProjectHistoryRedisManager.promises.queueOps.called.should.equal( - false - ) + this.ProjectHistoryRedisManager.promises.queueOps.should.not.have.been + .called }) }) @@ -561,10 +562,10 @@ describe('ProjectHistoryRedisManager', function () { beforeEach(async function () { this.update = { resyncDocContent: { - content: 'onedeleted\ntwo', version: this.version, ranges: this.ranges, resolvedCommentIds: this.resolvedCommentIds, + content: 'onedeleted\ntwo', }, projectHistoryId: this.projectHistoryId, path: this.pathname, @@ -601,9 +602,76 @@ describe('ProjectHistoryRedisManager', function () { }) it('should queue an update', function () { - this.ProjectHistoryRedisManager.promises.queueOps - .calledWithExactly(this.project_id, JSON.stringify(this.update)) - .should.equal(true) + this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly( + this.project_id, + JSON.stringify(this.update) + ) + }) + }) + + describe('history-ot', function () { + beforeEach(async function () { + this.lines = { + content: 'onedeleted\ntwo', + comments: [{ id: 'id1', ranges: [{ pos: 0, length: 3 }] }], + trackedChanges: [ + { + range: { pos: 3, length: 7 }, + tracking: { + type: 'delete', + userId: 'user-id', + ts: '2025-06-16T14:31:44.910Z', + }, + }, + ], + } + this.update = { + resyncDocContent: { + version: this.version, + historyOTRanges: { + comments: this.lines.comments, + trackedChanges: this.lines.trackedChanges, + }, + content: this.lines.content, + }, + projectHistoryId: this.projectHistoryId, + path: this.pathname, + doc: this.doc_id, + meta: { ts: new Date() }, + } + + await this.ProjectHistoryRedisManager.promises.queueResyncDocContent( + this.project_id, + this.projectHistoryId, + this.doc_id, + this.lines, + this.ranges, + this.resolvedCommentIds, + this.version, + this.pathname, + true + ) + }) + + it('should include tracked deletes in the update', function () { + this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly( + this.project_id, + JSON.stringify(this.update) + ) + }) + + it('should check the doc length without tracked deletes', function () { + this.Limits.stringFileDataContentIsTooLarge.should.have.been.calledWith( + this.lines, + this.settings.max_doc_length + ) + }) + + it('should queue an update', function () { + this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly( + this.project_id, + JSON.stringify(this.update) + ) }) }) }) diff --git a/services/git-bridge/Dockerfile b/services/git-bridge/Dockerfile index 58572ae8b9..48579b9494 100644 --- a/services/git-bridge/Dockerfile +++ b/services/git-bridge/Dockerfile @@ -1,11 +1,17 @@ -# Dockerfile for git-bridge +# Build the a8m/envsubst binary, as it supports default values, +# which the gnu envsubst (from gettext-base) does not. +FROM golang:1.24.3-alpine AS envsubst_builder + +WORKDIR /build + +RUN go install github.com/a8m/envsubst/cmd/envsubst@latest FROM maven:3-amazoncorretto-21-debian AS base RUN apt-get update && apt-get install -y make git sqlite3 \ && rm -rf /var/lib/apt/lists -COPY vendor/envsubst /opt/envsubst +COPY --from=envsubst_builder /go/bin/envsubst /opt/envsubst RUN chmod +x /opt/envsubst RUN useradd --create-home node @@ -33,7 +39,7 @@ RUN adduser -D node COPY --from=builder /git-bridge.jar / -COPY vendor/envsubst /opt/envsubst +COPY --from=envsubst_builder /go/bin/envsubst /opt/envsubst RUN chmod +x /opt/envsubst COPY conf/envsubst_template.json envsubst_template.json diff --git a/services/git-bridge/pom.xml b/services/git-bridge/pom.xml index 7b2c5b8e55..3feb4dd860 100644 --- a/services/git-bridge/pom.xml +++ b/services/git-bridge/pom.xml @@ -18,8 +18,8 @@ 2.8.4 9.4.57.v20241219 2.9.0 - 3.0.1 - 6.6.1.202309021850-r + 3.0.2 + 6.10.1.202505221210-r 3.41.2.2 2.9.9 1.37.0 diff --git a/services/git-bridge/vendor/envsubst b/services/git-bridge/vendor/envsubst deleted file mode 100755 index f7ad8081d0..0000000000 Binary files a/services/git-bridge/vendor/envsubst and /dev/null differ diff --git a/services/history-v1/api/app/rollout.js b/services/history-v1/api/app/rollout.js new file mode 100644 index 0000000000..24ca0409f8 --- /dev/null +++ b/services/history-v1/api/app/rollout.js @@ -0,0 +1,76 @@ +const crypto = require('node:crypto') + +class Rollout { + constructor(config) { + // The history buffer level is used to determine whether to queue changes + // in Redis or persist them directly to the chunk store. + // If defaults to 0 (no queuing) if not set. + this.historyBufferLevel = config.has('historyBufferLevel') + ? parseInt(config.get('historyBufferLevel'), 10) + : 0 + // The forcePersistBuffer flag will ensure the buffer is fully persisted before + // any persist operation. Set this to true if you want to make the persisted-version + // in Redis match the endVersion of the latest chunk. This should be set to true + // when downgrading from a history buffer level that queues changes in Redis + // without persisting them immediately. + this.forcePersistBuffer = config.has('forcePersistBuffer') + ? config.get('forcePersistBuffer') === 'true' + : false + + // Support gradual rollout of the next history buffer level + // with a percentage of projects using it. + this.nextHistoryBufferLevel = config.has('nextHistoryBufferLevel') + ? parseInt(config.get('nextHistoryBufferLevel'), 10) + : null + this.nextHistoryBufferLevelRolloutPercentage = config.has( + 'nextHistoryBufferLevelRolloutPercentage' + ) + ? parseInt(config.get('nextHistoryBufferLevelRolloutPercentage'), 10) + : 0 + } + + report(logger) { + logger.info( + { + historyBufferLevel: this.historyBufferLevel, + forcePersistBuffer: this.forcePersistBuffer, + nextHistoryBufferLevel: this.nextHistoryBufferLevel, + nextHistoryBufferLevelRolloutPercentage: + this.nextHistoryBufferLevelRolloutPercentage, + }, + this.historyBufferLevel > 0 || this.forcePersistBuffer + ? 'using history buffer' + : 'history buffer disabled' + ) + } + + /** + * Get the history buffer level for a project. + * @param {string} projectId + * @returns {Object} - An object containing the history buffer level and force persist buffer flag. + * @property {number} historyBufferLevel - The history buffer level to use for processing changes. + * @property {boolean} forcePersistBuffer - If true, forces the buffer to be persisted before any operation. + */ + getHistoryBufferLevelOptions(projectId) { + if ( + this.nextHistoryBufferLevel > this.historyBufferLevel && + this.nextHistoryBufferLevelRolloutPercentage > 0 + ) { + const hash = crypto.createHash('sha1').update(projectId).digest('hex') + const percentage = parseInt(hash.slice(0, 8), 16) % 100 + // If the project is in the rollout percentage, we use the next history buffer level. + if (percentage < this.nextHistoryBufferLevelRolloutPercentage) { + return { + historyBufferLevel: this.nextHistoryBufferLevel, + forcePersistBuffer: this.forcePersistBuffer, + } + } + } + return { + historyBufferLevel: this.historyBufferLevel, + forcePersistBuffer: this.forcePersistBuffer, + } + } +} + +module.exports = Rollout diff --git a/services/history-v1/api/controllers/project_import.js b/services/history-v1/api/controllers/project_import.js index edffb19a25..638873d105 100644 --- a/services/history-v1/api/controllers/project_import.js +++ b/services/history-v1/api/controllers/project_import.js @@ -2,6 +2,7 @@ 'use strict' +const config = require('config') const { expressify } = require('@overleaf/promise-utils') const HTTPStatus = require('http-status') @@ -21,10 +22,15 @@ const BatchBlobStore = storage.BatchBlobStore const BlobStore = storage.BlobStore const chunkStore = storage.chunkStore const HashCheckBlobStore = storage.HashCheckBlobStore -const persistChanges = storage.persistChanges +const commitChanges = storage.commitChanges +const persistBuffer = storage.persistBuffer const InvalidChangeError = storage.InvalidChangeError const render = require('./render') +const Rollout = require('../app/rollout') + +const rollout = new Rollout(config) +rollout.report(logger) // display the rollout configuration in the logs async function importSnapshot(req, res) { const projectId = req.swagger.params.project_id.value @@ -35,6 +41,7 @@ async function importSnapshot(req, res) { try { snapshot = Snapshot.fromRaw(rawSnapshot) } catch (err) { + logger.warn({ err, projectId }, 'failed to import snapshot') return render.unprocessableEntity(res) } @@ -43,6 +50,7 @@ async function importSnapshot(req, res) { historyId = await chunkStore.initializeProject(projectId, snapshot) } catch (err) { if (err instanceof chunkStore.AlreadyInitialized) { + logger.warn({ err, projectId }, 'already initialized') return render.conflict(res) } else { throw err @@ -108,7 +116,12 @@ async function importChanges(req, res, next) { let result try { - result = await persistChanges(projectId, changes, limits, endVersion) + const { historyBufferLevel, forcePersistBuffer } = + rollout.getHistoryBufferLevelOptions(projectId) + result = await commitChanges(projectId, changes, limits, endVersion, { + historyBufferLevel, + forcePersistBuffer, + }) } catch (err) { if ( err instanceof Chunk.ConflictingEndVersion || @@ -141,5 +154,29 @@ async function importChanges(req, res, next) { } } +async function flushChanges(req, res, next) { + const projectId = req.swagger.params.project_id.value + // Use the same limits importChanges, since these are passed to persistChanges + const farFuture = new Date() + farFuture.setTime(farFuture.getTime() + 7 * 24 * 3600 * 1000) + const limits = { + maxChanges: 0, + minChangeTimestamp: farFuture, + maxChangeTimestamp: farFuture, + autoResync: true, + } + try { + await persistBuffer(projectId, limits) + res.status(HTTPStatus.OK).end() + } catch (err) { + if (err instanceof Chunk.NotFoundError) { + render.notFound(res) + } else { + throw err + } + } +} + exports.importSnapshot = expressify(importSnapshot) exports.importChanges = expressify(importChanges) +exports.flushChanges = expressify(flushChanges) diff --git a/services/history-v1/api/controllers/projects.js b/services/history-v1/api/controllers/projects.js index 47a1d959ad..031833688c 100644 --- a/services/history-v1/api/controllers/projects.js +++ b/services/history-v1/api/controllers/projects.js @@ -34,6 +34,7 @@ async function initializeProject(req, res, next) { res.status(HTTPStatus.OK).json({ projectId }) } catch (err) { if (err instanceof chunkStore.AlreadyInitialized) { + logger.warn({ err, projectId }, 'failed to initialize') render.conflict(res) } else { throw err @@ -242,11 +243,15 @@ async function createProjectBlob(req, res, next) { const sizeLimit = new StreamSizeLimit(maxUploadSize) await pipeline(req, sizeLimit, fs.createWriteStream(tmpPath)) if (sizeLimit.sizeLimitExceeded) { + logger.warn( + { projectId, expectedHash, maxUploadSize }, + 'blob exceeds size threshold' + ) return render.requestEntityTooLarge(res) } const hash = await blobHash.fromFile(tmpPath) if (hash !== expectedHash) { - logger.debug({ hash, expectedHash }, 'Hash mismatch') + logger.warn({ projectId, hash, expectedHash }, 'Hash mismatch') return render.conflict(res, 'File hash mismatch') } @@ -343,6 +348,10 @@ async function copyProjectBlob(req, res, next) { targetBlobStore.getBlob(blobHash), ]) if (!sourceBlob) { + logger.warn( + { sourceProjectId, targetProjectId, blobHash }, + 'missing source blob when copying across projects' + ) return render.notFound(res) } // Exit early if the blob exists in the target project. diff --git a/services/history-v1/api/swagger/project_import.js b/services/history-v1/api/swagger/project_import.js index a93f42d27e..6103eed74b 100644 --- a/services/history-v1/api/swagger/project_import.js +++ b/services/history-v1/api/swagger/project_import.js @@ -139,9 +139,45 @@ const getChanges = { ], } +const flushChanges = { + 'x-swagger-router-controller': 'project_import', + operationId: 'flushChanges', + tags: ['ProjectImport'], + description: 'Flush project changes from buffer to the chunk store.', + parameters: [ + { + name: 'project_id', + in: 'path', + description: 'project id', + required: true, + type: 'string', + }, + ], + responses: { + 200: { + description: 'Success', + schema: { + $ref: '#/definitions/Project', + }, + }, + 404: { + description: 'Not Found', + schema: { + $ref: '#/definitions/Error', + }, + }, + }, + security: [ + { + basic: [], + }, + ], +} + exports.paths = { '/projects/{project_id}/import': { post: importSnapshot }, '/projects/{project_id}/legacy_import': { post: importSnapshot }, '/projects/{project_id}/changes': { get: getChanges, post: importChanges }, '/projects/{project_id}/legacy_changes': { post: importChanges }, + '/projects/{project_id}/flush': { post: flushChanges }, } diff --git a/services/history-v1/app.js b/services/history-v1/app.js index 261f1001b6..dd991c1a6d 100644 --- a/services/history-v1/app.js +++ b/services/history-v1/app.js @@ -100,11 +100,13 @@ function setupErrorHandling() { }) } if (err.code === 'ENUM_MISMATCH') { + logger.warn({ err, projectId }, err.message) return res.status(HTTPStatus.UNPROCESSABLE_ENTITY).json({ message: 'invalid enum value: ' + err.paramName, }) } if (err.code === 'REQUIRED') { + logger.warn({ err, projectId }, err.message) return res.status(HTTPStatus.UNPROCESSABLE_ENTITY).json({ message: err.message, }) diff --git a/services/history-v1/config/custom-environment-variables.json b/services/history-v1/config/custom-environment-variables.json index d07ae2925a..686ca25407 100644 --- a/services/history-v1/config/custom-environment-variables.json +++ b/services/history-v1/config/custom-environment-variables.json @@ -84,6 +84,10 @@ "maxFileUploadSize": "MAX_FILE_UPLOAD_SIZE", "httpsOnly": "HTTPS_ONLY", "httpRequestTimeout": "HTTP_REQUEST_TIMEOUT", + "historyBufferLevel": "HISTORY_BUFFER_LEVEL", + "forcePersistBuffer": "FORCE_PERSIST_BUFFER", + "nextHistoryBufferLevel": "NEXT_HISTORY_BUFFER_LEVEL", + "nextHistoryBufferLevelRolloutPercentage": "NEXT_HISTORY_BUFFER_LEVEL_ROLLOUT_PERCENTAGE", "redis": { "queue": { "host": "QUEUES_REDIS_HOST", @@ -100,5 +104,9 @@ "password": "REDIS_PASSWORD", "port": "REDIS_PORT" } + }, + "projectHistory": { + "host": "PROJECT_HISTORY_HOST", + "port": "PROJECT_HISTORY_PORT" } } diff --git a/services/history-v1/config/default.json b/services/history-v1/config/default.json index 5222b84d87..e7732fe3f7 100644 --- a/services/history-v1/config/default.json +++ b/services/history-v1/config/default.json @@ -39,5 +39,8 @@ "databasePoolMin": "2", "databasePoolMax": "10", "httpsOnly": "false", - "httpRequestTimeout": "300000" + "httpRequestTimeout": "300000", + "projectHistory": { + "port": "3054" + } } diff --git a/services/history-v1/docker-compose.ci.yml b/services/history-v1/docker-compose.ci.yml index 0dfe8b99d3..9128451c4f 100644 --- a/services/history-v1/docker-compose.ci.yml +++ b/services/history-v1/docker-compose.ci.yml @@ -39,6 +39,7 @@ services: NODE_OPTIONS: "--unhandled-rejections=strict" volumes: - ./test/acceptance/certs:/certs + - ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it depends_on: mongo: condition: service_started @@ -55,6 +56,7 @@ services: gcs: condition: service_healthy user: node + entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 -- command: npm run test:acceptance @@ -66,7 +68,7 @@ services: command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs . user: root redis: - image: redis + image: redis:7.4.3 healthcheck: test: ping="$$(redis-cli ping)" && [ "$$ping" = 'PONG' ] interval: 1s diff --git a/services/history-v1/docker-compose.yml b/services/history-v1/docker-compose.yml index b87d859e1e..cda379fb14 100644 --- a/services/history-v1/docker-compose.yml +++ b/services/history-v1/docker-compose.yml @@ -33,6 +33,7 @@ services: - ../../node_modules:/overleaf/node_modules - ../../libraries:/overleaf/libraries - ./test/acceptance/certs:/certs + - ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it working_dir: /overleaf/services/history-v1 environment: ELASTIC_SEARCH_DSN: es:9200 @@ -71,10 +72,11 @@ services: condition: service_completed_successfully gcs: condition: service_healthy + entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 -- command: npm run --silent test:acceptance redis: - image: redis + image: redis:7.4.3 healthcheck: test: ping=$$(redis-cli ping) && [ "$$ping" = 'PONG' ] interval: 1s diff --git a/services/history-v1/package.json b/services/history-v1/package.json index 1fdfd95c45..4796cafd03 100644 --- a/services/history-v1/package.json +++ b/services/history-v1/package.json @@ -7,6 +7,7 @@ "private": true, "dependencies": { "@google-cloud/secret-manager": "^5.6.0", + "@overleaf/fetch-utils": "*", "@overleaf/logger": "*", "@overleaf/metrics": "*", "@overleaf/mongo-utils": "*", @@ -36,6 +37,7 @@ "mongodb": "6.12.0", "overleaf-editor-core": "*", "p-limit": "^6.2.0", + "p-queue": "^8.1.0", "pg": "^8.7.1", "pg-query-stream": "^4.2.4", "swagger-tools": "^0.10.4", diff --git a/services/history-v1/storage/index.js b/services/history-v1/storage/index.js index 2aa492f46e..82a51583be 100644 --- a/services/history-v1/storage/index.js +++ b/services/history-v1/storage/index.js @@ -8,6 +8,9 @@ exports.mongodb = require('./lib/mongodb') exports.redis = require('./lib/redis') exports.persistChanges = require('./lib/persist_changes') exports.persistor = require('./lib/persistor') +exports.persistBuffer = require('./lib/persist_buffer') +exports.commitChanges = require('./lib/commit_changes') +exports.queueChanges = require('./lib/queue_changes') exports.ProjectArchive = require('./lib/project_archive') exports.streams = require('./lib/streams') exports.temp = require('./lib/temp') diff --git a/services/history-v1/storage/lib/chunk_store/index.js b/services/history-v1/storage/lib/chunk_store/index.js index 6dab84f929..286a8d8764 100644 --- a/services/history-v1/storage/lib/chunk_store/index.js +++ b/services/history-v1/storage/lib/chunk_store/index.js @@ -151,23 +151,48 @@ async function loadAtVersion(projectId, version, opts = {}) { const backend = getBackend(projectId) const blobStore = new BlobStore(projectId) const batchBlobStore = new BatchBlobStore(blobStore) + const latestChunkMetadata = await getLatestChunkMetadata(projectId) - const chunkRecord = await backend.getChunkForVersion(projectId, version, { - preferNewer: opts.preferNewer, - }) + // When loading a chunk for a version there are three cases to consider: + // 1. If `persistedOnly` is true, we always use the requested version + // to fetch the chunk. + // 2. If `persistedOnly` is false and the requested version is in the + // persisted chunk version range, we use the requested version. + // 3. If `persistedOnly` is false and the requested version is ahead of + // the persisted chunk versions, we fetch the latest chunk and see if + // the non-persisted changes include the requested version. + const targetChunkVersion = opts.persistedOnly + ? version + : Math.min(latestChunkMetadata.endVersion, version) + + const chunkRecord = await backend.getChunkForVersion( + projectId, + targetChunkVersion, + { + preferNewer: opts.preferNewer, + } + ) const rawHistory = await historyStore.loadRaw(projectId, chunkRecord.id) const history = History.fromRaw(rawHistory) + const startVersion = chunkRecord.endVersion - history.countChanges() if (!opts.persistedOnly) { + // Try to extend the chunk with any non-persisted changes that + // follow the chunk's end version. const nonPersistedChanges = await getChunkExtension( projectId, chunkRecord.endVersion ) history.pushChanges(nonPersistedChanges) + + // Check that the changes do actually contain the requested version + if (version > chunkRecord.endVersion + nonPersistedChanges.length) { + throw new Chunk.VersionNotFoundError(projectId, version) + } } await lazyLoadHistoryFiles(history, batchBlobStore) - return new Chunk(history, chunkRecord.endVersion - history.countChanges()) + return new Chunk(history, startVersion) } /** @@ -190,6 +215,7 @@ async function loadAtTimestamp(projectId, timestamp, opts = {}) { const chunkRecord = await backend.getChunkForTimestamp(projectId, timestamp) const rawHistory = await historyStore.loadRaw(projectId, chunkRecord.id) const history = History.fromRaw(rawHistory) + const startVersion = chunkRecord.endVersion - history.countChanges() if (!opts.persistedOnly) { const nonPersistedChanges = await getChunkExtension( @@ -200,7 +226,7 @@ async function loadAtTimestamp(projectId, timestamp, opts = {}) { } await lazyLoadHistoryFiles(history, batchBlobStore) - return new Chunk(history, chunkRecord.endVersion - history.countChanges()) + return new Chunk(history, startVersion) } /** diff --git a/services/history-v1/storage/lib/chunk_store/mongo.js b/services/history-v1/storage/lib/chunk_store/mongo.js index 26c1bc48ec..49020c6be4 100644 --- a/services/history-v1/storage/lib/chunk_store/mongo.js +++ b/services/history-v1/storage/lib/chunk_store/mongo.js @@ -286,6 +286,27 @@ async function updateProjectRecord( ) } +/** + * @param {number} historyId + * @return {Promise} + */ +async function lookupMongoProjectIdFromHistoryId(historyId) { + const project = await mongodb.projects.findOne( + // string for Object ids, number for postgres ids + { 'overleaf.history.id': historyId }, + { projection: { _id: 1 } } + ) + if (!project) { + // should not happen: We flush before allowing a project to be soft-deleted. + throw new OError('mongo project not found by history id', { historyId }) + } + return project._id.toString() +} + +async function resolveHistoryIdToMongoProjectId(projectId) { + return projectId +} + /** * Record that a chunk was replaced by a new one. * @@ -533,4 +554,6 @@ module.exports = { deleteProjectChunks, getOldChunksBatch, deleteOldChunks, + lookupMongoProjectIdFromHistoryId, + resolveHistoryIdToMongoProjectId, } diff --git a/services/history-v1/storage/lib/chunk_store/postgres.js b/services/history-v1/storage/lib/chunk_store/postgres.js index bfb5c6954a..8906db38e1 100644 --- a/services/history-v1/storage/lib/chunk_store/postgres.js +++ b/services/history-v1/storage/lib/chunk_store/postgres.js @@ -5,7 +5,10 @@ const assert = require('../assert') const knex = require('../knex') const knexReadOnly = require('../knex_read_only') const { ChunkVersionConflictError } = require('./errors') -const { updateProjectRecord } = require('./mongo') +const { + updateProjectRecord, + lookupMongoProjectIdFromHistoryId, +} = require('./mongo') const DUPLICATE_KEY_ERROR_CODE = '23505' @@ -472,6 +475,10 @@ async function generateProjectId() { return record.doc_id.toString() } +async function resolveHistoryIdToMongoProjectId(projectId) { + return await lookupMongoProjectIdFromHistoryId(parseInt(projectId, 10)) +} + module.exports = { getLatestChunk, getFirstChunkBeforeTimestamp, @@ -488,4 +495,5 @@ module.exports = { getOldChunksBatch, deleteOldChunks, generateProjectId, + resolveHistoryIdToMongoProjectId, } diff --git a/services/history-v1/storage/lib/chunk_store/redis.js b/services/history-v1/storage/lib/chunk_store/redis.js index 0ae7cee2e5..59bfd81e39 100644 --- a/services/history-v1/storage/lib/chunk_store/redis.js +++ b/services/history-v1/storage/lib/chunk_store/redis.js @@ -480,11 +480,12 @@ async function getNonPersistedChanges(projectId, baseVersion) { } rclient.defineCommand('set_persisted_version', { - numberOfKeys: 3, + numberOfKeys: 4, lua: ` local headVersionKey = KEYS[1] local persistedVersionKey = KEYS[2] - local changesKey = KEYS[3] + local persistTimeKey = KEYS[3] + local changesKey = KEYS[4] local newPersistedVersion = tonumber(ARGV[1]) local maxPersistedChanges = tonumber(ARGV[2]) @@ -501,9 +502,19 @@ rclient.defineCommand('set_persisted_version', { return 'too_low' end + -- Refuse to set a persisted version that is higher than the head version + if newPersistedVersion > headVersion then + return 'too_high' + end + -- Set the persisted version redis.call('SET', persistedVersionKey, newPersistedVersion) + -- Clear the persist time if the persisted version now matches the head version + if newPersistedVersion == headVersion then + redis.call('DEL', persistTimeKey) + end + -- Calculate the starting index, to keep only maxPersistedChanges beyond the persisted version -- Using negative indexing to count backwards from the end of the list local startIndex = newPersistedVersion - headVersion - maxPersistedChanges @@ -530,6 +541,7 @@ async function setPersistedVersion(projectId, persistedVersion) { const keys = [ keySchema.headVersion({ projectId }), keySchema.persistedVersion({ projectId }), + keySchema.persistTime({ projectId }), keySchema.changes({ projectId }), ] @@ -541,6 +553,13 @@ async function setPersistedVersion(projectId, persistedVersion) { status, }) + if (status === 'too_high') { + throw new VersionOutOfBoundsError( + 'Persisted version cannot be higher than head version', + { projectId, persistedVersion } + ) + } + return status } catch (err) { metrics.inc('chunk_store.redis.set_persisted_version', 1, { @@ -631,6 +650,7 @@ async function expireProject(projectId) { metrics.inc('chunk_store.redis.set_persisted_version', 1, { status, }) + return status } catch (err) { metrics.inc('chunk_store.redis.set_persisted_version', 1, { status: 'error', diff --git a/services/history-v1/storage/lib/commit_changes.js b/services/history-v1/storage/lib/commit_changes.js new file mode 100644 index 0000000000..5749e5fc0e --- /dev/null +++ b/services/history-v1/storage/lib/commit_changes.js @@ -0,0 +1,159 @@ +// @ts-check + +'use strict' + +const metrics = require('@overleaf/metrics') +const redisBackend = require('./chunk_store/redis') +const logger = require('@overleaf/logger') +const queueChanges = require('./queue_changes') +const persistChanges = require('./persist_changes') +const persistBuffer = require('./persist_buffer') + +/** + * @typedef {import('overleaf-editor-core').Change} Change + */ + +/** + * Handle incoming changes by processing them according to the specified options. + * @param {string} projectId + * @param {Change[]} changes + * @param {Object} limits + * @param {number} endVersion + * @param {Object} options + * @param {number} [options.historyBufferLevel] - The history buffer level to use for processing changes. + * @param {Boolean} [options.forcePersistBuffer] - If true, forces the buffer to be persisted before any operation. + * @return {Promise.} + */ + +async function commitChanges( + projectId, + changes, + limits, + endVersion, + options = {} +) { + const { historyBufferLevel, forcePersistBuffer } = options + + // Force the buffer to be persisted if specified. + if (forcePersistBuffer) { + try { + const status = await redisBackend.expireProject(projectId) // clear the project from Redis if it is persisted, returns 'not-persisted' if it was not persisted + if (status === 'not-persisted') { + await persistBuffer(projectId, limits) + await redisBackend.expireProject(projectId) // clear the project from Redis after persisting + metrics.inc('persist_buffer_force', 1, { status: 'persisted' }) + } + } catch (err) { + metrics.inc('persist_buffer_force', 1, { status: 'error' }) + logger.error( + { err, projectId }, + 'failed to persist buffer before committing changes' + ) + } + } + + metrics.inc('commit_changes', 1, { + history_buffer_level: historyBufferLevel || 0, + }) + + // Now handle the changes based on the configured history buffer level. + switch (historyBufferLevel) { + case 4: // Queue changes and only persist them in the background + await queueChanges(projectId, changes, endVersion) + return {} + case 3: // Queue changes and immediately persist with persistBuffer + await queueChanges(projectId, changes, endVersion) + return await persistBuffer(projectId, limits) + case 2: // Equivalent to queueChangesInRedis:true + await queueChangesFake(projectId, changes, endVersion) + return await persistChanges(projectId, changes, limits, endVersion) + case 1: // Queue changes with fake persist only for projects in redis already + await queueChangesFakeOnlyIfExists(projectId, changes, endVersion) + return await persistChanges(projectId, changes, limits, endVersion) + case 0: // Persist changes directly to the chunk store + return await persistChanges(projectId, changes, limits, endVersion) + default: + throw new Error(`Invalid history buffer level: ${historyBufferLevel}`) + } +} + +/** + * Queues a set of changes in redis as if they had been persisted, ignoring any errors. + * @param {string} projectId + * @param {Change[]} changes + * @param {number} endVersion + * @param {Object} [options] + * @param {boolean} [options.onlyIfExists] - If true, only queue changes if the project + * already exists in Redis. + */ + +async function queueChangesFake(projectId, changes, endVersion, options = {}) { + try { + await queueChanges(projectId, changes, endVersion) + await fakePersistRedisChanges(projectId, changes, endVersion) + } catch (err) { + logger.error({ err }, 'Chunk buffer verification failed') + } +} + +/** + * Queues changes in Redis, simulating persistence, but only if the project already exists. + * @param {string} projectId - The ID of the project. + * @param {Change[]} changes - An array of changes to be queued. + * @param {number} endVersion - The expected version of the project before these changes are applied. + */ + +async function queueChangesFakeOnlyIfExists(projectId, changes, endVersion) { + await queueChangesFake(projectId, changes, endVersion, { + onlyIfExists: true, + }) +} + +/** + * Simulates the persistence of changes by verifying a given set of changes against + * what is currently stored as non-persisted in Redis, and then updates the + * persisted version number in Redis. + * + * @async + * @param {string} projectId - The ID of the project. + * @param {Change[]} changesToPersist - An array of changes that are expected to be + * persisted. These are used for verification + * against the changes currently in Redis. + * @param {number} baseVersion - The base version number from which to calculate + * the new persisted version. + * @returns {Promise} A promise that resolves when the persisted version + * in Redis has been updated. + */ +async function fakePersistRedisChanges( + projectId, + changesToPersist, + baseVersion +) { + const nonPersistedChanges = await redisBackend.getNonPersistedChanges( + projectId, + baseVersion + ) + + if ( + serializeChanges(nonPersistedChanges) === serializeChanges(changesToPersist) + ) { + metrics.inc('persist_redis_changes_verification', 1, { status: 'match' }) + } else { + logger.warn({ projectId }, 'mismatch of non-persisted changes from Redis') + metrics.inc('persist_redis_changes_verification', 1, { + status: 'mismatch', + }) + } + + const persistedVersion = baseVersion + nonPersistedChanges.length + await redisBackend.setPersistedVersion(projectId, persistedVersion) +} + +/** + * @param {Change[]} changes + */ +function serializeChanges(changes) { + return JSON.stringify(changes.map(change => change.toRaw())) +} + +module.exports = commitChanges diff --git a/services/history-v1/storage/lib/persist_buffer.js b/services/history-v1/storage/lib/persist_buffer.js new file mode 100644 index 0000000000..d562388f87 --- /dev/null +++ b/services/history-v1/storage/lib/persist_buffer.js @@ -0,0 +1,206 @@ +// @ts-check +'use strict' + +const logger = require('@overleaf/logger') +const metrics = require('@overleaf/metrics') +const OError = require('@overleaf/o-error') +const assert = require('./assert') +const chunkStore = require('./chunk_store') +const { BlobStore } = require('./blob_store') +const BatchBlobStore = require('./batch_blob_store') +const persistChanges = require('./persist_changes') +const resyncProject = require('./resync_project') +const redisBackend = require('./chunk_store/redis') + +/** + * Persist the changes from Redis buffer to the main storage + * + * Algorithm Outline: + * 1. Get the latest chunk's endVersion from the database + * 2. Get non-persisted changes from Redis that are after this endVersion. + * 3. If no such changes, exit. + * 4. Load file blobs for these Redis changes. + * 5. Run the persistChanges() algorithm to store these changes into a new chunk(s) in GCS. + * - This must not decrease the endVersion. If changes were processed, it must advance. + * 6. Set the new persisted version (endVersion of the latest persisted chunk) in Redis. + * + * @param {string} projectId + * @param {Object} limits + * @throws {Error | OError} If a critical error occurs during persistence. + */ +async function persistBuffer(projectId, limits) { + assert.projectId(projectId) + logger.debug({ projectId }, 'starting persistBuffer operation') + + // 1. Get the latest chunk's endVersion from GCS/main store + let endVersion + const latestChunkMetadata = await chunkStore.getLatestChunkMetadata(projectId) + + if (latestChunkMetadata) { + endVersion = latestChunkMetadata.endVersion + } else { + endVersion = 0 // No chunks found, start from version 0 + logger.debug({ projectId }, 'no existing chunks found in main storage') + } + + logger.debug({ projectId, endVersion }, 'got latest persisted chunk') + + // 2. Get non-persisted changes from Redis + const changesToPersist = await redisBackend.getNonPersistedChanges( + projectId, + endVersion + ) + + if (changesToPersist.length === 0) { + logger.debug( + { projectId, endVersion }, + 'no new changes in Redis buffer to persist' + ) + metrics.inc('persist_buffer', 1, { status: 'no_changes' }) + // No changes to persist, update the persisted version in Redis + // to match the current endVersion. This shouldn't be needed + // unless a worker failed to update the persisted version. + await redisBackend.setPersistedVersion(projectId, endVersion) + const { chunk } = await chunkStore.loadByChunkRecord( + projectId, + latestChunkMetadata + ) + // Return the result in the same format as persistChanges + // so that the caller can handle it uniformly. + return { + numberOfChangesPersisted: changesToPersist.length, + originalEndVersion: endVersion, + currentChunk: chunk, + } + } + + logger.debug( + { + projectId, + endVersion, + count: changesToPersist.length, + }, + 'found changes in Redis to persist' + ) + + // 4. Load file blobs for these Redis changes. Errors will propagate. + const blobStore = new BlobStore(projectId) + const batchBlobStore = new BatchBlobStore(blobStore) + + const blobHashes = new Set() + for (const change of changesToPersist) { + change.findBlobHashes(blobHashes) + } + if (blobHashes.size > 0) { + await batchBlobStore.preload(Array.from(blobHashes)) + } + for (const change of changesToPersist) { + await change.loadFiles('lazy', blobStore) + } + + // 5. Run the persistChanges() algorithm. Errors will propagate. + logger.debug( + { + projectId, + endVersion, + changeCount: changesToPersist.length, + }, + 'calling persistChanges' + ) + + const persistResult = await persistChanges( + projectId, + changesToPersist, + limits, + endVersion + ) + + if (!persistResult || !persistResult.currentChunk) { + metrics.inc('persist_buffer', 1, { status: 'no-chunk-error' }) + throw new OError( + 'persistChanges did not produce a new chunk for non-empty changes', + { + projectId, + endVersion, + changeCount: changesToPersist.length, + } + ) + } + + const newPersistedChunk = persistResult.currentChunk + const newEndVersion = newPersistedChunk.getEndVersion() + + if (newEndVersion <= endVersion) { + metrics.inc('persist_buffer', 1, { status: 'chunk-version-mismatch' }) + throw new OError( + 'persisted chunk endVersion must be greater than current persisted chunk end version for non-empty changes', + { + projectId, + newEndVersion, + endVersion, + changeCount: changesToPersist.length, + } + ) + } + + logger.debug( + { + projectId, + oldVersion: endVersion, + newVersion: newEndVersion, + }, + 'successfully persisted changes from Redis to main storage' + ) + + // 6. Set the persisted version in Redis. Errors will propagate. + const status = await redisBackend.setPersistedVersion( + projectId, + newEndVersion + ) + + if (status !== 'ok') { + metrics.inc('persist_buffer', 1, { status: 'error-on-persisted-version' }) + throw new OError('failed to update persisted version in Redis', { + projectId, + newEndVersion, + status, + }) + } + + logger.debug( + { projectId, newEndVersion }, + 'updated persisted version in Redis' + ) + + // 7. Resync the project if content hash validation failed + if (limits.autoResync && persistResult.resyncNeeded) { + if ( + changesToPersist.some( + change => change.getOrigin()?.getKind() === 'history-resync' + ) + ) { + // To avoid an infinite loop, do not resync if the current batch of + // changes contains a history resync. + logger.warn( + { projectId }, + 'content hash validation failed while persisting a history resync, skipping additional resync' + ) + } else { + const backend = chunkStore.getBackend(projectId) + const mongoProjectId = + await backend.resolveHistoryIdToMongoProjectId(projectId) + await resyncProject(mongoProjectId) + } + } + + logger.debug( + { projectId, finalPersistedVersion: newEndVersion }, + 'persistBuffer operation completed successfully' + ) + + metrics.inc('persist_buffer', 1, { status: 'persisted' }) + + return persistResult +} + +module.exports = persistBuffer diff --git a/services/history-v1/storage/lib/persist_changes.js b/services/history-v1/storage/lib/persist_changes.js index 5b80285eb0..d2ca00053f 100644 --- a/services/history-v1/storage/lib/persist_changes.js +++ b/services/history-v1/storage/lib/persist_changes.js @@ -4,7 +4,6 @@ const _ = require('lodash') const logger = require('@overleaf/logger') -const metrics = require('@overleaf/metrics') const core = require('overleaf-editor-core') const Chunk = core.Chunk @@ -15,7 +14,6 @@ const chunkStore = require('./chunk_store') const { BlobStore } = require('./blob_store') const { InvalidChangeError } = require('./errors') const { getContentHash } = require('./content_hash') -const redisBackend = require('./chunk_store/redis') function countChangeBytes(change) { // Note: This is not quite accurate, because the raw change may contain raw @@ -202,45 +200,6 @@ async function persistChanges(projectId, allChanges, limits, clientEndVersion) { currentSnapshot.applyAll(currentChunk.getChanges()) } - async function queueChangesInRedis() { - const hollowSnapshot = currentSnapshot.clone() - // We're transforming a lazy snapshot to a hollow snapshot, so loadFiles() - // doesn't really need a blobStore, but its signature still requires it. - const blobStore = new BlobStore(projectId) - await hollowSnapshot.loadFiles('hollow', blobStore) - hollowSnapshot.applyAll(changesToPersist, { strict: true }) - const baseVersion = currentChunk.getEndVersion() - await redisBackend.queueChanges( - projectId, - hollowSnapshot, - baseVersion, - changesToPersist - ) - } - - async function fakePersistRedisChanges() { - const baseVersion = currentChunk.getEndVersion() - const nonPersistedChanges = await redisBackend.getNonPersistedChanges( - projectId, - baseVersion - ) - - if ( - serializeChanges(nonPersistedChanges) === - serializeChanges(changesToPersist) - ) { - metrics.inc('persist_redis_changes_verification', 1, { status: 'match' }) - } else { - logger.warn({ projectId }, 'mismatch of non-persisted changes from Redis') - metrics.inc('persist_redis_changes_verification', 1, { - status: 'mismatch', - }) - } - - const persistedVersion = baseVersion + nonPersistedChanges.length - await redisBackend.setPersistedVersion(projectId, persistedVersion) - } - async function extendLastChunkIfPossible() { const timer = new Timer() const changesPushed = await fillChunk(currentChunk, changesToPersist) @@ -289,12 +248,6 @@ async function persistChanges(projectId, allChanges, limits, clientEndVersion) { const numberOfChangesToPersist = oldChanges.length await loadLatestChunk() - try { - await queueChangesInRedis() - await fakePersistRedisChanges() - } catch (err) { - logger.error({ err }, 'Chunk buffer verification failed') - } await extendLastChunkIfPossible() await createNewChunksAsNeeded() @@ -309,11 +262,4 @@ async function persistChanges(projectId, allChanges, limits, clientEndVersion) { } } -/** - * @param {core.Change[]} changes - */ -function serializeChanges(changes) { - return JSON.stringify(changes.map(change => change.toRaw())) -} - module.exports = persistChanges diff --git a/services/history-v1/storage/lib/queue_changes.js b/services/history-v1/storage/lib/queue_changes.js new file mode 100644 index 0000000000..6b8d4b22b4 --- /dev/null +++ b/services/history-v1/storage/lib/queue_changes.js @@ -0,0 +1,75 @@ +// @ts-check + +'use strict' + +const redisBackend = require('./chunk_store/redis') +const { BlobStore } = require('./blob_store') +const chunkStore = require('./chunk_store') +const core = require('overleaf-editor-core') +const Chunk = core.Chunk + +/** + * Queues an incoming set of changes after validating them against the current snapshot. + * + * @async + * @function queueChanges + * @param {string} projectId - The project to queue changes for. + * @param {Array} changesToQueue - An array of change objects to be applied and queued. + * @param {number} endVersion - The expected version of the project before these changes are applied. + * This is used for optimistic concurrency control. + * @param {Object} [opts] - Additional options for queuing changes. + * @throws {Chunk.ConflictingEndVersion} If the provided `endVersion` does not match the + * current version of the project. + * @returns {Promise} A promise that resolves with the status returned by the + * `redisBackend.queueChanges` operation. + */ +async function queueChanges(projectId, changesToQueue, endVersion, opts) { + const result = await redisBackend.getHeadSnapshot(projectId) + let currentSnapshot = null + let currentVersion = null + if (result) { + // If we have a snapshot in redis, we can use it to check the current state + // of the project and apply changes to it. + currentSnapshot = result.snapshot + currentVersion = result.version + } else { + // Otherwise, load the latest chunk from the chunk store. + const latestChunk = await chunkStore.loadLatest(projectId, { + persistedOnly: true, + }) + // Throw an error if no latest chunk is found, indicating the project has not been initialised. + if (!latestChunk) { + throw new Chunk.NotFoundError(projectId) + } + currentSnapshot = latestChunk.getSnapshot() + currentSnapshot.applyAll(latestChunk.getChanges()) + currentVersion = latestChunk.getEndVersion() + } + + // Ensure the endVersion matches the current version of the project. + if (endVersion !== currentVersion) { + throw new Chunk.ConflictingEndVersion(endVersion, currentVersion) + } + + // Compute the new hollow snapshot to be saved to redis. + const hollowSnapshot = currentSnapshot + const blobStore = new BlobStore(projectId) + await hollowSnapshot.loadFiles('hollow', blobStore) + // Clone the changes to avoid modifying the original ones when computing the hollow snapshot. + const hollowChanges = changesToQueue.map(change => change.clone()) + for (const change of hollowChanges) { + await change.loadFiles('hollow', blobStore) + } + hollowSnapshot.applyAll(hollowChanges, { strict: true }) + const baseVersion = currentVersion + const status = await redisBackend.queueChanges( + projectId, + hollowSnapshot, + baseVersion, + changesToQueue, + opts + ) + return status +} + +module.exports = queueChanges diff --git a/services/history-v1/storage/lib/resync_project.js b/services/history-v1/storage/lib/resync_project.js new file mode 100644 index 0000000000..3ec680bb5b --- /dev/null +++ b/services/history-v1/storage/lib/resync_project.js @@ -0,0 +1,14 @@ +// @ts-check + +const config = require('config') +const { fetchNothing } = require('@overleaf/fetch-utils') + +const PROJECT_HISTORY_URL = `http://${config.projectHistory.host}:${config.projectHistory.port}` + +async function resyncProject(projectId) { + await fetchNothing(`${PROJECT_HISTORY_URL}/project/${projectId}/resync`, { + method: 'POST', + }) +} + +module.exports = resyncProject diff --git a/services/history-v1/storage/lib/scan.js b/services/history-v1/storage/lib/scan.js index fe4b8d514e..d55f5362c1 100644 --- a/services/history-v1/storage/lib/scan.js +++ b/services/history-v1/storage/lib/scan.js @@ -1,5 +1,9 @@ -const logger = require('@overleaf/logger') +// @ts-check +'use strict' + +const logger = require('@overleaf/logger') +const { JobNotFoundError, JobNotReadyError } = require('./chunk_store/errors') const BATCH_SIZE = 1000 // Default batch size for SCAN /** @@ -147,10 +151,24 @@ async function scanAndProcessDueItems( `Successfully performed ${taskName} for project` ) } catch (err) { - logger.error( - { ...logContext, projectId, err }, - `Error performing ${taskName} for project` - ) + if (err instanceof JobNotReadyError) { + // the project has been touched since the job was created + logger.info( + { ...logContext, projectId }, + `Job not ready for ${taskName} for project` + ) + } else if (err instanceof JobNotFoundError) { + // the project has been expired already by another worker + logger.info( + { ...logContext, projectId }, + `Job not found for ${taskName} for project` + ) + } else { + logger.error( + { ...logContext, projectId, err }, + `Error performing ${taskName} for project` + ) + } continue } } diff --git a/services/history-v1/storage/scripts/back_fill_file_hash.mjs b/services/history-v1/storage/scripts/back_fill_file_hash.mjs index 96dfd79e38..ba3e0d4359 100644 --- a/services/history-v1/storage/scripts/back_fill_file_hash.mjs +++ b/services/history-v1/storage/scripts/back_fill_file_hash.mjs @@ -89,14 +89,13 @@ ObjectId.cacheHexString = true */ /** - * @return {{PROJECT_IDS_FROM: string, PROCESS_HASHED_FILES: boolean, PROCESS_DELETED_FILES: boolean, LOGGING_IDENTIFIER: string, BATCH_RANGE_START: string, PROCESS_BLOBS: boolean, BATCH_RANGE_END: string, PROCESS_NON_DELETED_PROJECTS: boolean, PROCESS_DELETED_PROJECTS: boolean, COLLECT_BACKED_UP_BLOBS: boolean}} + * @return {{PROJECT_IDS_FROM: string, PROCESS_HASHED_FILES: boolean, LOGGING_IDENTIFIER: string, BATCH_RANGE_START: string, PROCESS_BLOBS: boolean, BATCH_RANGE_END: string, PROCESS_NON_DELETED_PROJECTS: boolean, PROCESS_DELETED_PROJECTS: boolean, COLLECT_BACKED_UP_BLOBS: boolean}} */ function parseArgs() { const PUBLIC_LAUNCH_DATE = new Date('2012-01-01T00:00:00Z') const args = commandLineArgs([ { name: 'processNonDeletedProjects', type: String, defaultValue: 'false' }, { name: 'processDeletedProjects', type: String, defaultValue: 'false' }, - { name: 'processDeletedFiles', type: String, defaultValue: 'false' }, { name: 'processHashedFiles', type: String, defaultValue: 'false' }, { name: 'processBlobs', type: String, defaultValue: 'true' }, { name: 'projectIdsFrom', type: String, defaultValue: '' }, @@ -131,7 +130,6 @@ function parseArgs() { PROCESS_NON_DELETED_PROJECTS: boolVal('processNonDeletedProjects'), PROCESS_DELETED_PROJECTS: boolVal('processDeletedProjects'), PROCESS_BLOBS: boolVal('processBlobs'), - PROCESS_DELETED_FILES: boolVal('processDeletedFiles'), PROCESS_HASHED_FILES: boolVal('processHashedFiles'), COLLECT_BACKED_UP_BLOBS: boolVal('collectBackedUpBlobs'), BATCH_RANGE_START, @@ -145,7 +143,6 @@ const { PROCESS_NON_DELETED_PROJECTS, PROCESS_DELETED_PROJECTS, PROCESS_BLOBS, - PROCESS_DELETED_FILES, PROCESS_HASHED_FILES, COLLECT_BACKED_UP_BLOBS, BATCH_RANGE_START, @@ -188,7 +185,6 @@ const typedProjectsCollection = db.collection('projects') const deletedProjectsCollection = db.collection('deletedProjects') /** @type {DeletedProjectsCollection} */ const typedDeletedProjectsCollection = db.collection('deletedProjects') -const deletedFilesCollection = db.collection('deletedFiles') const concurrencyLimit = pLimit(CONCURRENCY) @@ -647,22 +643,15 @@ async function queueNextBatch(batch, prefix = 'rootFolder.0') { * @return {Promise} */ async function processBatch(batch, prefix = 'rootFolder.0') { - const [deletedFiles, { nBlobs, blobs }, { nBackedUpBlobs, backedUpBlobs }] = - await Promise.all([ - collectDeletedFiles(batch), - collectProjectBlobs(batch), - collectBackedUpBlobs(batch), - ]) - const files = Array.from( - findFileInBatch(batch, prefix, deletedFiles, blobs, backedUpBlobs) - ) + const [{ nBlobs, blobs }, { nBackedUpBlobs, backedUpBlobs }] = + await Promise.all([collectProjectBlobs(batch), collectBackedUpBlobs(batch)]) + const files = Array.from(findFileInBatch(batch, prefix, blobs, backedUpBlobs)) STATS.projects += batch.length STATS.blobs += nBlobs STATS.backedUpBlobs += nBackedUpBlobs // GC batch.length = 0 - deletedFiles.clear() blobs.clear() backedUpBlobs.clear() @@ -713,9 +702,7 @@ async function handleDeletedFileTreeBatch(batch) { * @return {Promise} */ async function tryUpdateFileRefInMongo(entry) { - if (entry.path === MONGO_PATH_DELETED_FILE) { - return await tryUpdateDeletedFileRefInMongo(entry) - } else if (entry.path.startsWith('project.')) { + if (entry.path.startsWith('project.')) { return await tryUpdateFileRefInMongoInDeletedProject(entry) } @@ -732,22 +719,6 @@ async function tryUpdateFileRefInMongo(entry) { return result.matchedCount === 1 } -/** - * @param {QueueEntry} entry - * @return {Promise} - */ -async function tryUpdateDeletedFileRefInMongo(entry) { - STATS.mongoUpdates++ - const result = await deletedFilesCollection.updateOne( - { - _id: new ObjectId(entry.fileId), - projectId: entry.ctx.projectId, - }, - { $set: { hash: entry.hash } } - ) - return result.matchedCount === 1 -} - /** * @param {QueueEntry} entry * @return {Promise} @@ -812,7 +783,6 @@ async function updateFileRefInMongo(entry) { break } if (!found) { - if (await tryUpdateDeletedFileRefInMongo(entry)) return STATS.fileHardDeleted++ console.warn('bug: file hard-deleted while processing', projectId, fileId) return @@ -905,49 +875,22 @@ function* findFiles(ctx, folder, path, isInputLoop = false) { /** * @param {Array} projects * @param {string} prefix - * @param {Map>} deletedFiles * @param {Map>} blobs * @param {Map>} backedUpBlobs * @return Generator */ -function* findFileInBatch( - projects, - prefix, - deletedFiles, - blobs, - backedUpBlobs -) { +function* findFileInBatch(projects, prefix, blobs, backedUpBlobs) { for (const project of projects) { const projectIdS = project._id.toString() const historyIdS = project.overleaf.history.id.toString() const projectBlobs = blobs.get(historyIdS) || [] const projectBackedUpBlobs = new Set(backedUpBlobs.get(projectIdS) || []) - const projectDeletedFiles = deletedFiles.get(projectIdS) || [] const ctx = new ProjectContext( project._id, historyIdS, projectBlobs, projectBackedUpBlobs ) - for (const fileRef of projectDeletedFiles) { - const fileId = fileRef._id.toString() - if (fileRef.hash) { - if (ctx.canSkipProcessingHashedFile(fileRef.hash)) continue - ctx.remainingQueueEntries++ - STATS.filesWithHash++ - yield { - ctx, - cacheKey: fileRef.hash, - fileId, - hash: fileRef.hash, - path: MONGO_PATH_SKIP_WRITE_HASH_TO_FILE_TREE, - } - } else { - ctx.remainingQueueEntries++ - STATS.filesWithoutHash++ - yield { ctx, cacheKey: fileId, fileId, path: MONGO_PATH_DELETED_FILE } - } - } for (const blob of projectBlobs) { if (projectBackedUpBlobs.has(blob.getHash())) continue ctx.remainingQueueEntries++ @@ -981,41 +924,6 @@ async function collectProjectBlobs(batch) { return await getProjectBlobsBatch(batch.map(p => p.overleaf.history.id)) } -/** - * @param {Array} projects - * @return {Promise>>} - */ -async function collectDeletedFiles(projects) { - const deletedFiles = new Map() - if (!PROCESS_DELETED_FILES) return deletedFiles - - const cursor = deletedFilesCollection.find( - { - projectId: { $in: projects.map(p => p._id) }, - ...(PROCESS_HASHED_FILES - ? {} - : { - hash: { $exists: false }, - }), - }, - { - projection: { _id: 1, projectId: 1, hash: 1 }, - readPreference: READ_PREFERENCE_SECONDARY, - sort: { projectId: 1 }, - } - ) - for await (const deletedFileRef of cursor) { - const projectId = deletedFileRef.projectId.toString() - const found = deletedFiles.get(projectId) - if (found) { - found.push(deletedFileRef) - } else { - deletedFiles.set(projectId, [deletedFileRef]) - } - } - return deletedFiles -} - /** * @param {Array} projects * @return {Promise<{nBackedUpBlobs:number,backedUpBlobs:Map>}>} @@ -1043,7 +951,6 @@ async function collectBackedUpBlobs(projects) { const BATCH_HASH_WRITES = 1_000 const BATCH_FILE_UPDATES = 100 -const MONGO_PATH_DELETED_FILE = 'deleted-file' const MONGO_PATH_SKIP_WRITE_HASH_TO_FILE_TREE = 'skip-write-to-file-tree' class ProjectContext { @@ -1264,9 +1171,7 @@ class ProjectContext { const projectEntries = [] const deletedProjectEntries = [] for (const entry of this.#pendingFileWrites) { - if (entry.path === MONGO_PATH_DELETED_FILE) { - individualUpdates.push(entry) - } else if (entry.path.startsWith('project.')) { + if (entry.path.startsWith('project.')) { deletedProjectEntries.push(entry) } else { projectEntries.push(entry) diff --git a/services/history-v1/storage/scripts/expire_redis_chunks.js b/services/history-v1/storage/scripts/expire_redis_chunks.js index af2be097b6..60ce4c66f6 100644 --- a/services/history-v1/storage/scripts/expire_redis_chunks.js +++ b/services/history-v1/storage/scripts/expire_redis_chunks.js @@ -14,12 +14,9 @@ logger.initialize('expire-redis-chunks') async function expireProjectAction(projectId) { const job = await claimExpireJob(projectId) - try { - await expireProject(projectId) - } finally { - if (job && job.close) { - await job.close() - } + await expireProject(projectId) + if (job && job.close) { + await job.close() } } diff --git a/services/history-v1/storage/scripts/persist_and_expire_queues.sh b/services/history-v1/storage/scripts/persist_and_expire_queues.sh new file mode 100644 index 0000000000..d5789541da --- /dev/null +++ b/services/history-v1/storage/scripts/persist_and_expire_queues.sh @@ -0,0 +1,3 @@ +#!/bin/sh +node storage/scripts/persist_redis_chunks.mjs --queue --max-time 270 +node storage/scripts/expire_redis_chunks.js diff --git a/services/history-v1/storage/scripts/persist_redis_chunks.mjs b/services/history-v1/storage/scripts/persist_redis_chunks.mjs new file mode 100644 index 0000000000..dd7e9f3a51 --- /dev/null +++ b/services/history-v1/storage/scripts/persist_redis_chunks.mjs @@ -0,0 +1,181 @@ +import config from 'config' +import PQueue from 'p-queue' +import { fetchNothing } from '@overleaf/fetch-utils' +import logger from '@overleaf/logger' +import commandLineArgs from 'command-line-args' +import * as redis from '../lib/redis.js' +import knex from '../lib/knex.js' +import knexReadOnly from '../lib/knex_read_only.js' +import { client } from '../lib/mongodb.js' +import { scanAndProcessDueItems } from '../lib/scan.js' +import persistBuffer from '../lib/persist_buffer.js' +import { claimPersistJob } from '../lib/chunk_store/redis.js' +import { loadGlobalBlobs } from '../lib/blob_store/index.js' +import { EventEmitter } from 'node:events' +import { fileURLToPath } from 'node:url' + +// Something is registering 11 listeners, over the limit of 10, which generates +// a lot of warning noise. +EventEmitter.defaultMaxListeners = 11 + +const rclient = redis.rclientHistory + +const optionDefinitions = [ + { name: 'dry-run', alias: 'd', type: Boolean }, + { name: 'queue', type: Boolean }, + { name: 'max-time', type: Number }, + { name: 'min-rate', type: Number, defaultValue: 1 }, +] +const options = commandLineArgs(optionDefinitions) +const DRY_RUN = options['dry-run'] || false +const USE_QUEUE = options.queue || false +const MAX_TIME = options['max-time'] || null +const MIN_RATE = options['min-rate'] +const HISTORY_V1_URL = `http://${process.env.HISTORY_V1_HOST || 'localhost'}:${process.env.PORT || 3100}` +let isShuttingDown = false + +logger.initialize('persist-redis-chunks') + +async function persistProjectAction(projectId) { + const job = await claimPersistJob(projectId) + // Set limits to force us to persist all of the changes. + const farFuture = new Date() + farFuture.setTime(farFuture.getTime() + 7 * 24 * 3600 * 1000) + const limits = { + maxChanges: 0, + minChangeTimestamp: farFuture, + maxChangeTimestamp: farFuture, + autoResync: true, + } + await persistBuffer(projectId, limits) + if (job && job.close) { + await job.close() + } +} + +async function requestProjectFlush(projectId) { + const job = await claimPersistJob(projectId) + logger.debug({ projectId }, 'sending project flush request') + const url = `${HISTORY_V1_URL}/api/projects/${projectId}/flush` + const credentials = Buffer.from( + `staging:${config.get('basicHttpAuth.password')}` + ).toString('base64') + await fetchNothing(url, { + method: 'POST', + headers: { + Authorization: `Basic ${credentials}`, + }, + }) + if (job && job.close) { + await job.close() + } +} + +async function persistQueuedProjects(queuedProjects) { + const totalCount = queuedProjects.size + // Compute the rate at which we need to dispatch requests + const targetRate = MAX_TIME > 0 ? Math.ceil(totalCount / MAX_TIME) : 0 + // Rate limit to spread the requests over the interval. + const queue = new PQueue({ + intervalCap: Math.max(MIN_RATE, targetRate), + interval: 1000, // use a 1 second interval + }) + logger.info( + { totalCount, targetRate, minRate: MIN_RATE, maxTime: MAX_TIME }, + 'dispatching project flush requests' + ) + const startTime = Date.now() + let dispatchedCount = 0 + for (const projectId of queuedProjects) { + if (isShuttingDown) { + logger.info('Shutting down, stopping project flush requests') + queue.clear() + break + } + queue.add(async () => { + try { + await requestProjectFlush(projectId) + } catch (err) { + logger.error({ err, projectId }, 'error while flushing project') + } + }) + dispatchedCount++ + if (dispatchedCount % 1000 === 0) { + logger.info( + { count: dispatchedCount }, + 'dispatched project flush requests' + ) + } + await queue.onEmpty() + } + const elapsedTime = Math.floor((Date.now() - startTime) / 1000) + logger.info( + { count: totalCount, elapsedTime }, + 'dispatched project flush requests' + ) + await queue.onIdle() +} + +async function runPersistChunks() { + const queuedProjects = new Set() + + async function queueProjectAction(projectId) { + queuedProjects.add(projectId) + } + + await loadGlobalBlobs() + await scanAndProcessDueItems( + rclient, + 'persistChunks', + 'persist-time', + USE_QUEUE ? queueProjectAction : persistProjectAction, + DRY_RUN + ) + + if (USE_QUEUE) { + if (isShuttingDown) { + logger.info('Shutting down, skipping queued project persistence') + return + } + logger.info( + { count: queuedProjects.size }, + 'queued projects for persistence' + ) + await persistQueuedProjects(queuedProjects) + } +} + +async function main() { + try { + await runPersistChunks() + } catch (err) { + logger.fatal( + { err, taskName: 'persistChunks' }, + 'Unhandled error in runPersistChunks' + ) + process.exit(1) + } finally { + await redis.disconnect() + await client.close() + await knex.destroy() + await knexReadOnly.destroy() + } +} + +function gracefulShutdown() { + if (isShuttingDown) { + return + } + isShuttingDown = true + logger.info({ isShuttingDown }, 'received shutdown signal, cleaning up...') +} + +// Check if the module is being run directly +const currentScriptPath = fileURLToPath(import.meta.url) +if (process.argv[1] === currentScriptPath) { + process.on('SIGINT', gracefulShutdown) + process.on('SIGTERM', gracefulShutdown) + main() +} + +export { runPersistChunks } diff --git a/services/history-v1/storage/scripts/show_buffer.js b/services/history-v1/storage/scripts/show_buffer.js new file mode 100644 index 0000000000..1d80ee227d --- /dev/null +++ b/services/history-v1/storage/scripts/show_buffer.js @@ -0,0 +1,117 @@ +#!/usr/bin/env node +// @ts-check + +const { rclientHistory: rclient } = require('../lib/redis') +const { keySchema } = require('../lib/chunk_store/redis') +const commandLineArgs = require('command-line-args') + +const optionDefinitions = [ + { name: 'historyId', type: String, defaultOption: true }, +] + +// Column width for key display alignment; can be overridden with COL_WIDTH env variable +const COLUMN_WIDTH = process.env.COL_WIDTH + ? parseInt(process.env.COL_WIDTH, 10) + : 45 + +let options +try { + options = commandLineArgs(optionDefinitions) +} catch (e) { + console.error( + 'Error parsing command line arguments:', + e instanceof Error ? e.message : String(e) + ) + console.error('Usage: ./show_buffer.js ') + process.exit(1) +} + +const { historyId } = options + +if (!historyId) { + console.error('Usage: ./show_buffer.js ') + process.exit(1) +} + +function format(str, indent = COLUMN_WIDTH + 2) { + const lines = str.split('\n') + for (let i = 1; i < lines.length; i++) { + lines[i] = ' '.repeat(indent) + lines[i] + } + return lines.join('\n') +} + +async function displayKeyValue( + rclient, + key, + { parseJson = false, formatDate = false } = {} +) { + const value = await rclient.get(key) + let displayValue = '(nil)' + if (value) { + if (parseJson) { + try { + displayValue = format(JSON.stringify(JSON.parse(value), null, 2)) + } catch (e) { + displayValue = ` Raw value: ${value}` + } + } else if (formatDate) { + const ts = parseInt(value, 10) + displayValue = `${new Date(ts).toISOString()} (${value})` + } else { + displayValue = value + } + } + console.log(`${key.padStart(COLUMN_WIDTH)}: ${displayValue}`) +} + +async function displayBuffer(projectId) { + console.log(`Buffer for history ID: ${projectId}`) + console.log('--------------------------------------------------') + + try { + const headKey = keySchema.head({ projectId }) + const headVersionKey = keySchema.headVersion({ projectId }) + const persistedVersionKey = keySchema.persistedVersion({ projectId }) + const expireTimeKey = keySchema.expireTime({ projectId }) + const persistTimeKey = keySchema.persistTime({ projectId }) + const changesKey = keySchema.changes({ projectId }) + + await displayKeyValue(rclient, headKey, { parseJson: true }) + await displayKeyValue(rclient, headVersionKey) + await displayKeyValue(rclient, persistedVersionKey) + await displayKeyValue(rclient, expireTimeKey, { formatDate: true }) + await displayKeyValue(rclient, persistTimeKey, { formatDate: true }) + + const changesList = await rclient.lrange(changesKey, 0, -1) + + // 6. changes + let changesListDisplay = '(nil)' + if (changesList) { + changesListDisplay = changesList.length + ? format( + changesList + .map((change, index) => `[${index}]: ${change}`) + .join('\n') + ) + : '(empty list)' + } + console.log(`${changesKey.padStart(COLUMN_WIDTH)}: ${changesListDisplay}`) + } catch (error) { + console.error('Error fetching data from Redis:', error) + throw error + } +} + +;(async () => { + let errorOccurred = false + try { + await displayBuffer(historyId) + } catch (error) { + errorOccurred = true + } finally { + rclient.quit(() => { + process.exit(errorOccurred ? 1 : 0) + }) + } +})() diff --git a/services/history-v1/test/acceptance/js/api/project_flush.test.js b/services/history-v1/test/acceptance/js/api/project_flush.test.js new file mode 100644 index 0000000000..f8d0b23d8e --- /dev/null +++ b/services/history-v1/test/acceptance/js/api/project_flush.test.js @@ -0,0 +1,66 @@ +'use strict' + +const BPromise = require('bluebird') +const { expect } = require('chai') +const HTTPStatus = require('http-status') +const fetch = require('node-fetch') +const fs = BPromise.promisifyAll(require('node:fs')) + +const cleanup = require('../storage/support/cleanup') +const fixtures = require('../storage/support/fixtures') +const testFiles = require('../storage/support/test_files') +const testProjects = require('./support/test_projects') +const testServer = require('./support/test_server') + +const { Change, File, Operation } = require('overleaf-editor-core') +const queueChanges = require('../../../../storage/lib/queue_changes') +const { getState } = require('../../../../storage/lib/chunk_store/redis') + +describe('project flush', function () { + beforeEach(cleanup.everything) + beforeEach(fixtures.create) + + it('persists queued changes to the chunk store', async function () { + const basicAuthClient = testServer.basicAuthClient + const projectId = await testProjects.createEmptyProject() + + // upload an empty file + const response = await fetch( + testServer.url( + `/api/projects/${projectId}/blobs/${File.EMPTY_FILE_HASH}`, + { qs: { pathname: 'main.tex' } } + ), + { + method: 'PUT', + body: fs.createReadStream(testFiles.path('empty.tex')), + headers: { + Authorization: testServer.basicAuthHeader, + }, + } + ) + expect(response.ok).to.be.true + + const testFile = File.fromHash(File.EMPTY_FILE_HASH) + const testChange = new Change( + [Operation.addFile('main.tex', testFile)], + new Date() + ) + await queueChanges(projectId, [testChange], 0) + + // Verify that the changes are queued and not yet persisted + const initialState = await getState(projectId) + expect(initialState.persistedVersion).to.be.null + expect(initialState.changes).to.have.lengthOf(1) + + const importResponse = + await basicAuthClient.apis.ProjectImport.flushChanges({ + project_id: projectId, + }) + + expect(importResponse.status).to.equal(HTTPStatus.OK) + + // Verify that the changes were persisted to the chunk store + const finalState = await getState(projectId) + expect(finalState.persistedVersion).to.equal(1) + }) +}) diff --git a/services/history-v1/test/acceptance/js/api/rollout.test.js b/services/history-v1/test/acceptance/js/api/rollout.test.js new file mode 100644 index 0000000000..f1a65e5aff --- /dev/null +++ b/services/history-v1/test/acceptance/js/api/rollout.test.js @@ -0,0 +1,115 @@ +const config = require('config') +const sinon = require('sinon') +const { expect } = require('chai') + +const cleanup = require('../storage/support/cleanup') +const Rollout = require('../../../../api/app/rollout') + +describe('rollout', function () { + beforeEach(cleanup.everything) + beforeEach('Set up stubs', function () { + sinon.stub(config, 'has').callThrough() + sinon.stub(config, 'get').callThrough() + }) + afterEach(sinon.restore) + + it('should return a valid history buffer level', function () { + setMockConfig('historyBufferLevel', '2') + setMockConfig('forcePersistBuffer', 'false') + + const rollout = new Rollout(config) + const { historyBufferLevel, forcePersistBuffer } = + rollout.getHistoryBufferLevelOptions('test-project-id') + expect(historyBufferLevel).to.equal(2) + expect(forcePersistBuffer).to.be.false + }) + + it('should return a valid history buffer level and force persist buffer options', function () { + setMockConfig('historyBufferLevel', '1') + setMockConfig('forcePersistBuffer', 'true') + const rollout = new Rollout(config) + const { historyBufferLevel, forcePersistBuffer } = + rollout.getHistoryBufferLevelOptions('test-project-id') + expect(historyBufferLevel).to.equal(1) + expect(forcePersistBuffer).to.be.true + }) + + describe('with a higher next history buffer level rollout', function () { + beforeEach(function () { + setMockConfig('historyBufferLevel', '2') + setMockConfig('forcePersistBuffer', 'false') + setMockConfig('nextHistoryBufferLevel', '3') + }) + it('should return the expected history buffer level when the rollout percentage is zero', function () { + setMockConfig('nextHistoryBufferLevelRolloutPercentage', '0') + const rollout = new Rollout(config) + for (let i = 0; i < 1000; i++) { + const { historyBufferLevel, forcePersistBuffer } = + rollout.getHistoryBufferLevelOptions(`test-project-id-${i}`) + expect(historyBufferLevel).to.equal(2) + expect(forcePersistBuffer).to.be.false + } + }) + + it('should return the expected distribution of levels when the rollout percentage is 10%', function () { + setMockConfig('nextHistoryBufferLevelRolloutPercentage', '10') + const rollout = new Rollout(config) + let currentLevel = 0 + let nextLevel = 0 + for (let i = 0; i < 1000; i++) { + const { historyBufferLevel } = rollout.getHistoryBufferLevelOptions( + `test-project-id-${i}` + ) + switch (historyBufferLevel) { + case 2: + currentLevel++ + break + case 3: + nextLevel++ + break + default: + expect.fail( + `Unexpected history buffer level: ${historyBufferLevel}` + ) + } + } + const twoPercentage = (currentLevel / 1000) * 100 + const threePercentage = (nextLevel / 1000) * 100 + expect(twoPercentage).to.be.closeTo(90, 5) // 90% for level 2 + expect(threePercentage).to.be.closeTo(10, 5) // 10% for level 3 + }) + }) + describe('with a next history buffer level lower than the current level', function () { + beforeEach(function () { + setMockConfig('historyBufferLevel', '3') + setMockConfig('forcePersistBuffer', 'false') + setMockConfig('nextHistoryBufferLevel', '2') + }) + it('should always return the current level when the rollout percentage is zero', function () { + setMockConfig('nextHistoryBufferLevelRolloutPercentage', '0') + const rollout = new Rollout(config) + for (let i = 0; i < 1000; i++) { + const { historyBufferLevel, forcePersistBuffer } = + rollout.getHistoryBufferLevelOptions(`test-project-id-${i}`) + expect(historyBufferLevel).to.equal(3) + expect(forcePersistBuffer).to.be.false + } + }) + + it('should always return the current level regardless of the rollout percentage', function () { + setMockConfig('nextHistoryBufferLevelRolloutPercentage', '10') + const rollout = new Rollout(config) + for (let i = 0; i < 1000; i++) { + const { historyBufferLevel } = rollout.getHistoryBufferLevelOptions( + `test-project-id-${i}` + ) + expect(historyBufferLevel).to.equal(3) + } + }) + }) +}) + +function setMockConfig(path, value) { + config.has.withArgs(path).returns(true) + config.get.withArgs(path).returns(value) +} diff --git a/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs b/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs index fad87b4703..fd39369a71 100644 --- a/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs +++ b/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs @@ -35,7 +35,6 @@ const { tieringStorageClass } = config.get('backupPersistor') const projectsCollection = db.collection('projects') const deletedProjectsCollection = db.collection('deletedProjects') -const deletedFilesCollection = db.collection('deletedFiles') const FILESTORE_PERSISTOR = ObjectPersistor({ backend: 'gcs', @@ -130,11 +129,8 @@ describe('back_fill_file_hash script', function () { const fileId7 = objectIdFromTime('2017-02-01T00:07:00Z') const fileId8 = objectIdFromTime('2017-02-01T00:08:00Z') const fileId9 = objectIdFromTime('2017-02-01T00:09:00Z') - const fileIdDeleted1 = objectIdFromTime('2017-03-01T00:01:00Z') - const fileIdDeleted2 = objectIdFromTime('2017-03-01T00:02:00Z') - const fileIdDeleted3 = objectIdFromTime('2017-03-01T00:03:00Z') - const fileIdDeleted4 = objectIdFromTime('2024-03-01T00:04:00Z') - const fileIdDeleted5 = objectIdFromTime('2024-03-01T00:05:00Z') + const fileId10 = objectIdFromTime('2017-02-01T00:10:00Z') + const fileId11 = objectIdFromTime('2017-02-01T00:11:00Z') const contentTextBlob0 = Buffer.from('Hello 0') const hashTextBlob0 = gitBlobHashBuffer(contentTextBlob0) const contentTextBlob1 = Buffer.from('Hello 1') @@ -161,7 +157,6 @@ describe('back_fill_file_hash script', function () { hash: hashFile7, content: contentFile7, }, - { projectId: projectId0, historyId: historyId0, fileId: fileIdDeleted5 }, { projectId: projectId0, historyId: historyId0, @@ -181,7 +176,6 @@ describe('back_fill_file_hash script', function () { content: contentTextBlob2, }, { projectId: projectId1, historyId: historyId1, fileId: fileId1 }, - { projectId: projectId1, historyId: historyId1, fileId: fileIdDeleted1 }, { projectId: projectId2, historyId: historyId2, @@ -189,23 +183,28 @@ describe('back_fill_file_hash script', function () { hasHash: true, }, { projectId: projectId3, historyId: historyId3, fileId: fileId3 }, + // fileId10 is dupe of fileId3, without a hash + { + projectId: projectId3, + historyId: historyId3, + fileId: fileId10, + content: Buffer.from(fileId3.toString()), + hash: gitBlobHash(fileId3), + }, + // fileId11 is dupe of fileId3, but with a hash + { + projectId: projectId3, + historyId: historyId3, + fileId: fileId11, + content: Buffer.from(fileId3.toString()), + hash: gitBlobHash(fileId3), + hasHash: true, + }, { projectId: projectIdDeleted0, historyId: historyIdDeleted0, fileId: fileId4, }, - { - projectId: projectIdDeleted0, - historyId: historyIdDeleted0, - fileId: fileIdDeleted2, - }, - // { historyId: historyIdDeleted0, fileId:fileIdDeleted3 }, // fileIdDeleted3 is dupe of fileIdDeleted2 - { - projectId: projectIdDeleted0, - historyId: historyIdDeleted0, - fileId: fileIdDeleted4, - hasHash: true, - }, { projectId: projectIdDeleted1, historyId: historyIdDeleted1, @@ -233,10 +232,6 @@ describe('back_fill_file_hash script', function () { fileId4, fileId5, fileId6, - fileIdDeleted1, - fileIdDeleted2, - fileIdDeleted3, - fileIdDeleted4, } console.log({ projectId0, @@ -328,7 +323,11 @@ describe('back_fill_file_hash script', function () { fileRefs: [], folders: [ { - fileRefs: [{ _id: fileId3 }], + fileRefs: [ + { _id: fileId3 }, + { _id: fileId10 }, + { _id: fileId11, hash: gitBlobHash(fileId3) }, + ], folders: [], }, ], @@ -446,17 +445,6 @@ describe('back_fill_file_hash script', function () { }, }, ]) - await deletedFilesCollection.insertMany([ - { _id: fileIdDeleted1, projectId: projectId1 }, - { _id: fileIdDeleted2, projectId: projectIdDeleted0 }, - { _id: fileIdDeleted3, projectId: projectIdDeleted0 }, - { - _id: fileIdDeleted4, - projectId: projectIdDeleted0, - hash: gitBlobHash(fileIdDeleted4), - }, - { _id: fileIdDeleted5, projectId: projectId0 }, - ]) } async function populateHistoryV1() { @@ -499,11 +487,6 @@ describe('back_fill_file_hash script', function () { `${projectId0}/${fileId7}`, Stream.Readable.from([contentFile7]) ) - await FILESTORE_PERSISTOR.sendStream( - USER_FILES_BUCKET_NAME, - `${projectId0}/${fileIdDeleted5}`, - Stream.Readable.from([fileIdDeleted5.toString()]) - ) await FILESTORE_PERSISTOR.sendStream( USER_FILES_BUCKET_NAME, `${projectId1}/${fileId1}`, @@ -519,6 +502,18 @@ describe('back_fill_file_hash script', function () { `${projectId3}/${fileId3}`, Stream.Readable.from([fileId3.toString()]) ) + await FILESTORE_PERSISTOR.sendStream( + USER_FILES_BUCKET_NAME, + `${projectId3}/${fileId10}`, + // fileId10 is dupe of fileId3 + Stream.Readable.from([fileId3.toString()]) + ) + await FILESTORE_PERSISTOR.sendStream( + USER_FILES_BUCKET_NAME, + `${projectId3}/${fileId11}`, + // fileId11 is dupe of fileId3 + Stream.Readable.from([fileId3.toString()]) + ) await FILESTORE_PERSISTOR.sendStream( USER_FILES_BUCKET_NAME, `${projectIdDeleted0}/${fileId4}`, @@ -529,27 +524,6 @@ describe('back_fill_file_hash script', function () { `${projectIdDeleted1}/${fileId5}`, Stream.Readable.from([fileId5.toString()]) ) - await FILESTORE_PERSISTOR.sendStream( - USER_FILES_BUCKET_NAME, - `${projectId1}/${fileIdDeleted1}`, - Stream.Readable.from([fileIdDeleted1.toString()]) - ) - await FILESTORE_PERSISTOR.sendStream( - USER_FILES_BUCKET_NAME, - `${projectIdDeleted0}/${fileIdDeleted2}`, - Stream.Readable.from([fileIdDeleted2.toString()]) - ) - await FILESTORE_PERSISTOR.sendStream( - USER_FILES_BUCKET_NAME, - `${projectIdDeleted0}/${fileIdDeleted3}`, - // same content as 2, deduplicate - Stream.Readable.from([fileIdDeleted2.toString()]) - ) - await FILESTORE_PERSISTOR.sendStream( - USER_FILES_BUCKET_NAME, - `${projectIdDeleted0}/${fileIdDeleted4}`, - Stream.Readable.from([fileIdDeleted4.toString()]) - ) await FILESTORE_PERSISTOR.sendStream( USER_FILES_BUCKET_NAME, `${projectIdBadFileTree3}/${fileId9}`, @@ -579,7 +553,6 @@ describe('back_fill_file_hash script', function () { 'storage/scripts/back_fill_file_hash.mjs', '--processNonDeletedProjects=true', '--processDeletedProjects=true', - '--processDeletedFiles=true', ...args, ], { @@ -741,6 +714,8 @@ describe('back_fill_file_hash script', function () { { fileRefs: [ { _id: fileId3, hash: gitBlobHash(fileId3) }, + { _id: fileId10, hash: gitBlobHash(fileId3) }, + { _id: fileId11, hash: gitBlobHash(fileId3) }, ], folders: [], }, @@ -868,34 +843,6 @@ describe('back_fill_file_hash script', function () { }, }, ]) - expect(await deletedFilesCollection.find({}).toArray()).to.deep.equal([ - { - _id: fileIdDeleted1, - projectId: projectId1, - hash: gitBlobHash(fileIdDeleted1), - }, - { - _id: fileIdDeleted2, - projectId: projectIdDeleted0, - hash: gitBlobHash(fileIdDeleted2), - }, - { - _id: fileIdDeleted3, - projectId: projectIdDeleted0, - // uses the same content as fileIdDeleted2 - hash: gitBlobHash(fileIdDeleted2), - }, - { - _id: fileIdDeleted4, - projectId: projectIdDeleted0, - hash: gitBlobHash(fileIdDeleted4), - }, - { - _id: fileIdDeleted5, - projectId: projectId0, - hash: gitBlobHash(fileIdDeleted5), - }, - ]) expect( (await backedUpBlobs.find({}, { sort: { _id: 1 } }).toArray()).map( entry => { @@ -910,7 +857,6 @@ describe('back_fill_file_hash script', function () { blobs: [ binaryForGitBlobHash(gitBlobHash(fileId0)), binaryForGitBlobHash(hashFile7), - binaryForGitBlobHash(gitBlobHash(fileIdDeleted5)), binaryForGitBlobHash(hashTextBlob0), ].sort(), }, @@ -918,7 +864,6 @@ describe('back_fill_file_hash script', function () { _id: projectId1, blobs: [ binaryForGitBlobHash(gitBlobHash(fileId1)), - binaryForGitBlobHash(gitBlobHash(fileIdDeleted1)), binaryForGitBlobHash(hashTextBlob1), ].sort(), }, @@ -934,16 +879,7 @@ describe('back_fill_file_hash script', function () { }, { _id: projectIdDeleted0, - blobs: [ - binaryForGitBlobHash(gitBlobHash(fileId4)), - binaryForGitBlobHash(gitBlobHash(fileIdDeleted2)), - ] - .concat( - processHashedFiles - ? [binaryForGitBlobHash(gitBlobHash(fileIdDeleted4))] - : [] - ) - .sort(), + blobs: [binaryForGitBlobHash(gitBlobHash(fileId4))].sort(), }, { _id: projectId3, @@ -971,11 +907,15 @@ describe('back_fill_file_hash script', function () { expect(tieringStorageClass).to.exist const blobs = await listS3Bucket(projectBlobsBucket, tieringStorageClass) expect(blobs.sort()).to.deep.equal( - writtenBlobs - .map(({ historyId, fileId, hash }) => - makeProjectKey(historyId, hash || gitBlobHash(fileId)) + Array.from( + new Set( + writtenBlobs + .map(({ historyId, fileId, hash }) => + makeProjectKey(historyId, hash || gitBlobHash(fileId)) + ) + .sort() ) - .sort() + ) ) for (let { historyId, fileId, hash, content } of writtenBlobs) { hash = hash || gitBlobHash(fileId.toString()) @@ -1037,15 +977,15 @@ describe('back_fill_file_hash script', function () { ...STATS_ALL_ZERO, // We still need to iterate over all the projects and blobs. projects: 10, - blobs: 13, - backedUpBlobs: 13, + blobs: 10, + backedUpBlobs: 10, badFileTrees: 4, } if (processHashedFiles) { stats = sumStats(stats, { ...STATS_ALL_ZERO, - blobs: 3, - backedUpBlobs: 3, + blobs: 2, + backedUpBlobs: 2, }) } expect(rerun.stats).deep.equal(stats) @@ -1101,7 +1041,7 @@ describe('back_fill_file_hash script', function () { blobs: 2, backedUpBlobs: 0, filesWithHash: 0, - filesWithoutHash: 7, + filesWithoutHash: 5, filesDuplicated: 1, filesRetries: 0, filesFailed: 0, @@ -1112,24 +1052,24 @@ describe('back_fill_file_hash script', function () { projectHardDeleted: 0, fileHardDeleted: 0, badFileTrees: 0, - mongoUpdates: 6, + mongoUpdates: 4, deduplicatedWriteToAWSLocalCount: 0, deduplicatedWriteToAWSLocalEgress: 0, deduplicatedWriteToAWSRemoteCount: 0, deduplicatedWriteToAWSRemoteEgress: 0, - readFromGCSCount: 8, - readFromGCSIngress: 4000134, - writeToAWSCount: 7, - writeToAWSEgress: 4086, - writeToGCSCount: 5, - writeToGCSEgress: 4000096, + readFromGCSCount: 6, + readFromGCSIngress: 4000086, + writeToAWSCount: 5, + writeToAWSEgress: 4026, + writeToGCSCount: 3, + writeToGCSEgress: 4000048, } const STATS_UP_FROM_PROJECT1_ONWARD = { projects: 8, blobs: 2, backedUpBlobs: 0, filesWithHash: 0, - filesWithoutHash: 5, + filesWithoutHash: 4, filesDuplicated: 0, filesRetries: 0, filesFailed: 0, @@ -1140,28 +1080,28 @@ describe('back_fill_file_hash script', function () { projectHardDeleted: 0, fileHardDeleted: 0, badFileTrees: 4, - mongoUpdates: 10, + mongoUpdates: 8, deduplicatedWriteToAWSLocalCount: 1, deduplicatedWriteToAWSLocalEgress: 30, deduplicatedWriteToAWSRemoteCount: 0, deduplicatedWriteToAWSRemoteEgress: 0, - readFromGCSCount: 7, - readFromGCSIngress: 134, - writeToAWSCount: 6, - writeToAWSEgress: 173, - writeToGCSCount: 4, - writeToGCSEgress: 96, + readFromGCSCount: 6, + readFromGCSIngress: 110, + writeToAWSCount: 5, + writeToAWSEgress: 143, + writeToGCSCount: 3, + writeToGCSEgress: 72, } const STATS_FILES_HASHED_EXTRA = { ...STATS_ALL_ZERO, - filesWithHash: 3, - mongoUpdates: 1, - readFromGCSCount: 3, - readFromGCSIngress: 72, - writeToAWSCount: 3, - writeToAWSEgress: 89, - writeToGCSCount: 3, - writeToGCSEgress: 72, + filesWithHash: 2, + mongoUpdates: 2, + readFromGCSCount: 2, + readFromGCSIngress: 48, + writeToAWSCount: 2, + writeToAWSEgress: 60, + writeToGCSCount: 2, + writeToGCSEgress: 48, } function sumStats(a, b) { @@ -1331,10 +1271,9 @@ describe('back_fill_file_hash script', function () { expect(output2.stats).deep.equal({ ...STATS_FILES_HASHED_EXTRA, projects: 10, - blobs: 13, - backedUpBlobs: 13, + blobs: 10, + backedUpBlobs: 10, badFileTrees: 4, - mongoUpdates: 3, }) }) commonAssertions(true) @@ -1376,7 +1315,15 @@ describe('back_fill_file_hash script', function () { }) it('should print stats', function () { expect(output.stats).deep.equal( - sumStats(STATS_ALL, STATS_FILES_HASHED_EXTRA) + sumStats(STATS_ALL, { + ...STATS_FILES_HASHED_EXTRA, + readFromGCSCount: 3, + readFromGCSIngress: 72, + deduplicatedWriteToAWSLocalCount: 1, + deduplicatedWriteToAWSLocalEgress: 30, + mongoUpdates: 1, + filesWithHash: 3, + }) ) }) commonAssertions(true) diff --git a/services/history-v1/test/acceptance/js/storage/chunk_store.test.js b/services/history-v1/test/acceptance/js/storage/chunk_store.test.js index da70467934..8b06b8e412 100644 --- a/services/history-v1/test/acceptance/js/storage/chunk_store.test.js +++ b/services/history-v1/test/acceptance/js/storage/chunk_store.test.js @@ -470,6 +470,8 @@ describe('chunkStore', function () { describe('with changes queued in the Redis buffer', function () { let queuedChanges + const firstQueuedChangeTimestamp = new Date('2017-01-01T00:01:00') + const lastQueuedChangeTimestamp = new Date('2017-01-01T00:02:00') beforeEach(async function () { const snapshot = thirdChunk.getSnapshot() @@ -481,7 +483,15 @@ describe('chunkStore', function () { 'in-redis.tex', File.createLazyFromBlobs(blob) ), - new Date() + firstQueuedChangeTimestamp + ), + makeChange( + // Add a second change to make the buffer more interesting + Operation.editFile( + 'in-redis.tex', + TextOperation.fromJSON({ textOperation: ['hello'] }) + ), + lastQueuedChangeTimestamp ), ] await redisBackend.queueChanges( @@ -498,6 +508,15 @@ describe('chunkStore', function () { .getChanges() .concat(queuedChanges) expect(chunk.getChanges()).to.deep.equal(expectedChanges) + expect(chunk.getStartVersion()).to.equal( + thirdChunk.getStartVersion() + ) + expect(chunk.getEndVersion()).to.equal( + thirdChunk.getEndVersion() + queuedChanges.length + ) + expect(chunk.getEndTimestamp()).to.deep.equal( + lastQueuedChangeTimestamp + ) }) it('includes the queued changes when getting the latest chunk by timestamp', async function () { @@ -509,6 +528,12 @@ describe('chunkStore', function () { .getChanges() .concat(queuedChanges) expect(chunk.getChanges()).to.deep.equal(expectedChanges) + expect(chunk.getStartVersion()).to.equal( + thirdChunk.getStartVersion() + ) + expect(chunk.getEndVersion()).to.equal( + thirdChunk.getEndVersion() + queuedChanges.length + ) }) it("doesn't include the queued changes when getting another chunk by timestamp", async function () { @@ -518,6 +543,11 @@ describe('chunkStore', function () { ) const expectedChanges = secondChunk.getChanges() expect(chunk.getChanges()).to.deep.equal(expectedChanges) + expect(chunk.getStartVersion()).to.equal( + secondChunk.getStartVersion() + ) + expect(chunk.getEndVersion()).to.equal(secondChunk.getEndVersion()) + expect(chunk.getEndTimestamp()).to.deep.equal(secondChunkTimestamp) }) it('includes the queued changes when getting the latest chunk by version', async function () { @@ -529,6 +559,15 @@ describe('chunkStore', function () { .getChanges() .concat(queuedChanges) expect(chunk.getChanges()).to.deep.equal(expectedChanges) + expect(chunk.getStartVersion()).to.equal( + thirdChunk.getStartVersion() + ) + expect(chunk.getEndVersion()).to.equal( + thirdChunk.getEndVersion() + queuedChanges.length + ) + expect(chunk.getEndTimestamp()).to.deep.equal( + lastQueuedChangeTimestamp + ) }) it("doesn't include the queued changes when getting another chunk by version", async function () { @@ -538,6 +577,47 @@ describe('chunkStore', function () { ) const expectedChanges = secondChunk.getChanges() expect(chunk.getChanges()).to.deep.equal(expectedChanges) + expect(chunk.getStartVersion()).to.equal( + secondChunk.getStartVersion() + ) + expect(chunk.getEndVersion()).to.equal(secondChunk.getEndVersion()) + expect(chunk.getEndTimestamp()).to.deep.equal(secondChunkTimestamp) + }) + + it('loads a version that is only in the Redis buffer', async function () { + const versionInRedis = thirdChunk.getEndVersion() + 1 // the first change in Redis + const chunk = await chunkStore.loadAtVersion( + projectId, + versionInRedis + ) + // The chunk should contain changes from the thirdChunk and the queuedChanges + const expectedChanges = thirdChunk + .getChanges() + .concat(queuedChanges) + expect(chunk.getChanges()).to.deep.equal(expectedChanges) + expect(chunk.getStartVersion()).to.equal( + thirdChunk.getStartVersion() + ) + expect(chunk.getEndVersion()).to.equal( + thirdChunk.getEndVersion() + queuedChanges.length + ) + expect(chunk.getEndTimestamp()).to.deep.equal( + lastQueuedChangeTimestamp + ) + }) + + it('throws an error when loading a version beyond the Redis buffer', async function () { + const versionBeyondRedis = + thirdChunk.getEndVersion() + queuedChanges.length + 1 + await expect( + chunkStore.loadAtVersion(projectId, versionBeyondRedis) + ) + .to.be.rejectedWith(chunkStore.VersionOutOfBoundsError) + .and.eventually.satisfy(err => { + expect(err.info).to.have.property('projectId', projectId) + expect(err.info).to.have.property('version', versionBeyondRedis) + return true + }) }) }) diff --git a/services/history-v1/test/acceptance/js/storage/chunk_store_redis_backend.test.js b/services/history-v1/test/acceptance/js/storage/chunk_store_redis_backend.test.js index 2b13343fc4..d34cd701d0 100644 --- a/services/history-v1/test/acceptance/js/storage/chunk_store_redis_backend.test.js +++ b/services/history-v1/test/acceptance/js/storage/chunk_store_redis_backend.test.js @@ -699,6 +699,8 @@ describe('chunk buffer Redis backend', function () { }) describe('setPersistedVersion', function () { + const persistTime = Date.now() + 60 * 1000 // 1 minute from now + it('should return not_found when project does not exist', async function () { const result = await redisBackend.setPersistedVersion(projectId, 5) expect(result).to.equal('not_found') @@ -709,15 +711,41 @@ describe('chunk buffer Redis backend', function () { await setupState(projectId, { headVersion: 5, persistedVersion: null, + persistTime, changes: 5, }) }) it('should set the persisted version', async function () { - await redisBackend.setPersistedVersion(projectId, 3) + const status = await redisBackend.setPersistedVersion(projectId, 3) + expect(status).to.equal('ok') const state = await redisBackend.getState(projectId) expect(state.persistedVersion).to.equal(3) }) + + it('should leave the persist time if the persisted version is not current', async function () { + const status = await redisBackend.setPersistedVersion(projectId, 3) + expect(status).to.equal('ok') + const state = await redisBackend.getState(projectId) + expect(state.persistTime).to.deep.equal(persistTime) // Persist time should remain unchanged + }) + + it('should refuse to set a persisted version greater than the head version', async function () { + await expect( + redisBackend.setPersistedVersion(projectId, 10) + ).to.be.rejectedWith(VersionOutOfBoundsError) + // Ensure persisted version remains unchanged + const state = await redisBackend.getState(projectId) + expect(state.persistedVersion).to.be.null + }) + + it('should clear the persist time when the persisted version is current', async function () { + const status = await redisBackend.setPersistedVersion(projectId, 5) + expect(status).to.equal('ok') + const state = await redisBackend.getState(projectId) + expect(state.persistedVersion).to.equal(5) + expect(state.persistTime).to.be.null // Persist time should be cleared + }) }) describe('when the persisted version is set', function () { @@ -725,18 +753,46 @@ describe('chunk buffer Redis backend', function () { await setupState(projectId, { headVersion: 5, persistedVersion: 3, + persistTime, changes: 5, }) }) it('should set the persisted version', async function () { - await redisBackend.setPersistedVersion(projectId, 5) + const status = await redisBackend.setPersistedVersion(projectId, 5) + expect(status).to.equal('ok') const state = await redisBackend.getState(projectId) expect(state.persistedVersion).to.equal(5) }) + it('should clear the persist time when the persisted version is current', async function () { + const status = await redisBackend.setPersistedVersion(projectId, 5) + expect(status).to.equal('ok') + const state = await redisBackend.getState(projectId) + expect(state.persistedVersion).to.equal(5) + expect(state.persistTime).to.be.null // Persist time should be cleared + }) + + it('should leave the persist time if the persisted version is not current', async function () { + const status = await redisBackend.setPersistedVersion(projectId, 4) + expect(status).to.equal('ok') + const state = await redisBackend.getState(projectId) + expect(state.persistedVersion).to.equal(4) + expect(state.persistTime).to.deep.equal(persistTime) // Persist time should remain unchanged + }) + it('should not decrease the persisted version', async function () { - await redisBackend.setPersistedVersion(projectId, 2) + const status = await redisBackend.setPersistedVersion(projectId, 2) + expect(status).to.equal('too_low') + const state = await redisBackend.getState(projectId) + expect(state.persistedVersion).to.equal(3) + }) + + it('should refuse to set a persisted version greater than the head version', async function () { + await expect( + redisBackend.setPersistedVersion(projectId, 10) + ).to.be.rejectedWith(VersionOutOfBoundsError) + // Ensure persisted version remains unchanged const state = await redisBackend.getState(projectId) expect(state.persistedVersion).to.equal(3) }) @@ -1162,6 +1218,8 @@ function makeChange() { * @param {object} params * @param {number} params.headVersion * @param {number | null} params.persistedVersion + * @param {number | null} params.persistTime - time when the project should be persisted + * @param {number | null} params.expireTime - time when the project should expire * @param {number} params.changes - number of changes to create * @return {Promise} dummy changes that have been created */ @@ -1173,7 +1231,12 @@ async function setupState(projectId, params) { params.persistedVersion ) } - + if (params.persistTime) { + await rclient.set(keySchema.persistTime({ projectId }), params.persistTime) + } + if (params.expireTime) { + await rclient.set(keySchema.expireTime({ projectId }), params.expireTime) + } const changes = [] for (let i = 1; i <= params.changes; i++) { const change = new Change( diff --git a/services/history-v1/test/acceptance/js/storage/expire_redis_chunks.test.js b/services/history-v1/test/acceptance/js/storage/expire_redis_chunks.test.js index b657991dda..f8a5943c43 100644 --- a/services/history-v1/test/acceptance/js/storage/expire_redis_chunks.test.js +++ b/services/history-v1/test/acceptance/js/storage/expire_redis_chunks.test.js @@ -1,91 +1,13 @@ 'use strict' const { expect } = require('chai') -const { promisify } = require('node:util') -const { execFile } = require('node:child_process') -const { Snapshot, Author, Change } = require('overleaf-editor-core') +const { Author, Change } = require('overleaf-editor-core') const cleanup = require('./support/cleanup') -const redisBackend = require('../../../../storage/lib/chunk_store/redis') -const redis = require('../../../../storage/lib/redis') -const rclient = redis.rclientHistory -const keySchema = redisBackend.keySchema +const { setupProjectState, rclient, keySchema } = require('./support/redis') +const { runScript } = require('./support/runscript') const SCRIPT_PATH = 'storage/scripts/expire_redis_chunks.js' -async function runExpireScript() { - const TIMEOUT = 10 * 1000 // 10 seconds - let result - try { - result = await promisify(execFile)('node', [SCRIPT_PATH], { - encoding: 'utf-8', - timeout: TIMEOUT, - env: { - ...process.env, - LOG_LEVEL: 'debug', // Override LOG_LEVEL for script output - }, - }) - result.status = 0 - } catch (err) { - const { stdout, stderr, code } = err - if (typeof code !== 'number') { - console.error('Error running expire script:', err) - throw err - } - result = { stdout, stderr, status: code } - } - // The script might exit with status 1 if it finds no keys to process, which is ok - if (result.status !== 0 && result.status !== 1) { - console.error('Expire script failed:', result.stderr) - throw new Error(`expire script failed with status ${result.status}`) - } - return result -} - -// Helper to set up a basic project state in Redis -async function setupProjectState( - projectId, - { - headVersion = 0, - persistedVersion = null, - expireTime = null, - persistTime = null, - changes = [], - } -) { - const headSnapshot = new Snapshot() - await rclient.set( - keySchema.head({ projectId }), - JSON.stringify(headSnapshot.toRaw()) - ) - await rclient.set( - keySchema.headVersion({ projectId }), - headVersion.toString() - ) - - if (persistedVersion !== null) { - await rclient.set( - keySchema.persistedVersion({ projectId }), - persistedVersion.toString() - ) - } - if (expireTime !== null) { - await rclient.set( - keySchema.expireTime({ projectId }), - expireTime.toString() - ) - } - if (persistTime !== null) { - await rclient.set( - keySchema.persistTime({ projectId }), - persistTime.toString() - ) - } - if (changes.length > 0) { - const rawChanges = changes.map(c => JSON.stringify(c.toRaw())) - await rclient.rpush(keySchema.changes({ projectId }), ...rawChanges) - } -} - function makeChange() { const timestamp = new Date() const author = new Author(123, 'test@example.com', 'Test User') @@ -150,7 +72,7 @@ describe('expire_redis_chunks script', function () { }) // Run the expire script once after all projects are set up - await runExpireScript() + await runScript(SCRIPT_PATH) }) async function checkProjectStatus(projectId) { diff --git a/services/history-v1/test/acceptance/js/storage/persist_buffer.test.mjs b/services/history-v1/test/acceptance/js/storage/persist_buffer.test.mjs new file mode 100644 index 0000000000..138a70e626 --- /dev/null +++ b/services/history-v1/test/acceptance/js/storage/persist_buffer.test.mjs @@ -0,0 +1,519 @@ +'use strict' + +import fs from 'node:fs' +import { expect } from 'chai' +import { + Change, + Snapshot, + File, + TextOperation, + AddFileOperation, + EditFileOperation, // Added EditFileOperation +} from 'overleaf-editor-core' +import persistBuffer from '../../../../storage/lib/persist_buffer.js' +import chunkStore from '../../../../storage/lib/chunk_store/index.js' +import redisBackend from '../../../../storage/lib/chunk_store/redis.js' +import persistChanges from '../../../../storage/lib/persist_changes.js' +import cleanup from './support/cleanup.js' +import fixtures from './support/fixtures.js' +import testFiles from './support/test_files.js' + +describe('persistBuffer', function () { + let projectId + const initialVersion = 0 + let limitsToPersistImmediately + + before(function () { + const farFuture = new Date() + farFuture.setTime(farFuture.getTime() + 7 * 24 * 3600 * 1000) + limitsToPersistImmediately = { + minChangeTimestamp: farFuture, + maxChangeTimestamp: farFuture, + maxChunkChanges: 10, + } + }) + + beforeEach(cleanup.everything) + beforeEach(fixtures.create) + + beforeEach(async function () { + projectId = fixtures.docs.uninitializedProject.id + await chunkStore.initializeProject(projectId) + }) + + describe('with an empty initial chunk (new project)', function () { + it('should persist changes from Redis to a new chunk', async function () { + // create an initial snapshot and add the empty file `main.tex` + const HELLO_TXT = fs.readFileSync(testFiles.path('hello.txt')).toString() + + const createFile = new Change( + [new AddFileOperation('main.tex', File.fromString(HELLO_TXT))], + new Date(), + [] + ) + + await persistChanges( + projectId, + [createFile], + limitsToPersistImmediately, + 0 + ) + // Now queue some changes in Redis + const op1 = new TextOperation().insert('Hello').retain(HELLO_TXT.length) + const change1 = new Change( + [new EditFileOperation('main.tex', op1)], + new Date() + ) + + const op2 = new TextOperation() + .retain('Hello'.length) + .insert(' World') + .retain(HELLO_TXT.length) + const change2 = new Change( + [new EditFileOperation('main.tex', op2)], + new Date() + ) + + const changesToQueue = [change1, change2] + + const finalHeadVersion = initialVersion + 1 + changesToQueue.length + + const now = Date.now() + await redisBackend.queueChanges( + projectId, + new Snapshot(), // dummy snapshot + 1, + changesToQueue, + { + persistTime: now + redisBackend.MAX_PERSIST_DELAY_MS, + expireTime: now + redisBackend.PROJECT_TTL_MS, + } + ) + await redisBackend.setPersistedVersion(projectId, initialVersion) + + // Persist the changes from Redis to the chunk store + const persistResult = await persistBuffer( + projectId, + limitsToPersistImmediately + ) + + // Check the return value of persistBuffer + expect(persistResult).to.exist + expect(persistResult).to.have.property('numberOfChangesPersisted') + expect(persistResult).to.have.property('originalEndVersion') + expect(persistResult).to.have.property('currentChunk') + expect(persistResult).to.have.property('resyncNeeded') + expect(persistResult.numberOfChangesPersisted).to.equal( + changesToQueue.length + ) + expect(persistResult.originalEndVersion).to.equal(initialVersion + 1) + expect(persistResult.resyncNeeded).to.be.false + + const latestChunk = await chunkStore.loadLatest(projectId, { + persistedOnly: true, + }) + expect(latestChunk).to.exist + expect(latestChunk.getStartVersion()).to.equal(initialVersion) + expect(latestChunk.getEndVersion()).to.equal(finalHeadVersion) + expect(latestChunk.getChanges().length).to.equal( + changesToQueue.length + 1 + ) + // Check that chunk returned by persistBuffer matches the latest chunk + expect(latestChunk).to.deep.equal(persistResult.currentChunk) + + const chunkSnapshot = latestChunk.getSnapshot() + expect(Object.keys(chunkSnapshot.getFileMap()).length).to.equal(1) + + const persistedVersionInRedis = (await redisBackend.getState(projectId)) + .persistedVersion + expect(persistedVersionInRedis).to.equal(finalHeadVersion) + + const nonPersisted = await redisBackend.getNonPersistedChanges( + projectId, + finalHeadVersion + ) + expect(nonPersisted).to.be.an('array').that.is.empty + }) + }) + + describe('with an existing chunk and new changes in Redis', function () { + it('should persist new changes from Redis, appending to existing history', async function () { + const initialContent = 'Initial document content.\n' + + const addInitialFileChange = new Change( + [new AddFileOperation('main.tex', File.fromString(initialContent))], + new Date(), + [] + ) + + await persistChanges( + projectId, + [addInitialFileChange], + limitsToPersistImmediately, + initialVersion + ) + const versionAfterInitialSetup = initialVersion + 1 // Now version is 1 + + const opForChunk1 = new TextOperation() + .retain(initialContent.length) + .insert(' First addition.') + const changesForChunk1 = [ + new Change( + [new EditFileOperation('main.tex', opForChunk1)], + new Date(), + [] + ), + ] + + await persistChanges( + projectId, + changesForChunk1, + limitsToPersistImmediately, // Original limits for this step + versionAfterInitialSetup // Correct clientEndVersion + ) + // Update persistedChunkEndVersion: 1 (from setup) + 1 (from changesForChunk1) = 2 + const persistedChunkEndVersion = + versionAfterInitialSetup + changesForChunk1.length + const contentAfterChunk1 = initialContent + ' First addition.' + + const opVersion2 = new TextOperation() + .retain(contentAfterChunk1.length) + .insert(' Second addition.') + const changeVersion2 = new Change( + [new EditFileOperation('main.tex', opVersion2)], + new Date(), + [] + ) + + const contentAfterChange2 = contentAfterChunk1 + ' Second addition.' + const opVersion3 = new TextOperation() + .retain(contentAfterChange2.length) + .insert(' Third addition.') + const changeVersion3 = new Change( + [new EditFileOperation('main.tex', opVersion3)], + new Date(), + [] + ) + + const redisChangesToPush = [changeVersion2, changeVersion3] + const finalHeadVersionAfterRedisPush = + persistedChunkEndVersion + redisChangesToPush.length + const now = Date.now() + + await redisBackend.queueChanges( + projectId, + new Snapshot(), // Use new Snapshot() like in the first test + persistedChunkEndVersion, + redisChangesToPush, + { + persistTime: now + redisBackend.MAX_PERSIST_DELAY_MS, + expireTime: now + redisBackend.PROJECT_TTL_MS, + } + ) + await redisBackend.setPersistedVersion( + projectId, + persistedChunkEndVersion + ) + + const persistResult = await persistBuffer( + projectId, + limitsToPersistImmediately + ) + + // Check the return value of persistBuffer + expect(persistResult).to.exist + expect(persistResult).to.have.property('numberOfChangesPersisted') + expect(persistResult).to.have.property('originalEndVersion') + expect(persistResult).to.have.property('currentChunk') + expect(persistResult).to.have.property('resyncNeeded') + expect(persistResult.numberOfChangesPersisted).to.equal( + redisChangesToPush.length + ) + expect(persistResult.originalEndVersion).to.equal( + persistedChunkEndVersion + ) + expect(persistResult.resyncNeeded).to.be.false + + const latestChunk = await chunkStore.loadLatest(projectId, { + persistedOnly: true, + }) + expect(latestChunk).to.exist + expect(latestChunk.getStartVersion()).to.equal(0) + expect(latestChunk.getEndVersion()).to.equal( + finalHeadVersionAfterRedisPush + ) + expect(latestChunk.getChanges().length).to.equal( + persistedChunkEndVersion + redisChangesToPush.length + ) + + const persistedVersionInRedisAfter = ( + await redisBackend.getState(projectId) + ).persistedVersion + expect(persistedVersionInRedisAfter).to.equal( + finalHeadVersionAfterRedisPush + ) + + // Check that chunk returned by persistBuffer matches the latest chunk + expect(persistResult.currentChunk).to.deep.equal(latestChunk) + + const nonPersisted = await redisBackend.getNonPersistedChanges( + projectId, + finalHeadVersionAfterRedisPush + ) + expect(nonPersisted).to.be.an('array').that.is.empty + }) + }) + + describe('when Redis has no new changes', function () { + let persistedChunkEndVersion + let changesForChunk1 + + beforeEach(async function () { + const initialContent = 'Content.' + + const addInitialFileChange = new Change( + [new AddFileOperation('main.tex', File.fromString(initialContent))], + new Date(), + [] + ) + + // Replace chunkStore.create with persistChanges + // clientEndVersion is initialVersion (0). This advances version to 1. + await persistChanges( + projectId, + [addInitialFileChange], + limitsToPersistImmediately, + initialVersion + ) + const versionAfterInitialSetup = initialVersion + 1 // Now version is 1 + + const opForChunk1 = new TextOperation() + .retain(initialContent.length) + .insert(' More.') + changesForChunk1 = [ + new Change( + [new EditFileOperation('main.tex', opForChunk1)], + new Date(), + [] + ), + ] + // Corrected persistChanges call: clientEndVersion is versionAfterInitialSetup (1) + await persistChanges( + projectId, + changesForChunk1, + limitsToPersistImmediately, // Original limits for this step + versionAfterInitialSetup // Correct clientEndVersion + ) + // Update persistedChunkEndVersion: 1 (from setup) + 1 (from changesForChunk1) = 2 + persistedChunkEndVersion = + versionAfterInitialSetup + changesForChunk1.length + }) + + it('should leave the persisted version and stored chunks unchanged', async function () { + const now = Date.now() + await redisBackend.queueChanges( + projectId, + new Snapshot(), + persistedChunkEndVersion - 1, + changesForChunk1, + { + persistTime: now + redisBackend.MAX_PERSIST_DELAY_MS, + expireTime: now + redisBackend.PROJECT_TTL_MS, + } + ) + await redisBackend.setPersistedVersion( + projectId, + persistedChunkEndVersion + ) + + const chunksBefore = await chunkStore.getProjectChunks(projectId) + + const persistResult = await persistBuffer( + projectId, + limitsToPersistImmediately + ) + + const currentChunk = await chunkStore.loadLatest(projectId, { + persistedOnly: true, + }) + expect(persistResult).to.deep.equal({ + numberOfChangesPersisted: 0, + originalEndVersion: persistedChunkEndVersion, + currentChunk, + }) + + const chunksAfter = await chunkStore.getProjectChunks(projectId) + expect(chunksAfter.length).to.equal(chunksBefore.length) + expect(chunksAfter).to.deep.equal(chunksBefore) + + const finalPersistedVersionInRedis = ( + await redisBackend.getState(projectId) + ).persistedVersion + expect(finalPersistedVersionInRedis).to.equal(persistedChunkEndVersion) + }) + + it('should update the persisted version if it is behind the chunk store end version', async function () { + const now = Date.now() + + await redisBackend.queueChanges( + projectId, + new Snapshot(), + persistedChunkEndVersion - 1, + changesForChunk1, + { + persistTime: now + redisBackend.MAX_PERSIST_DELAY_MS, + expireTime: now + redisBackend.PROJECT_TTL_MS, + } + ) + // Force the persisted version in Redis to lag behind the chunk store, + // simulating the situation where a worker has persisted changes to the + // chunk store but failed to update the version in redis. + await redisBackend.setPersistedVersion( + projectId, + persistedChunkEndVersion - 1 + ) + + const chunksBefore = await chunkStore.getProjectChunks(projectId) + + // Persist buffer (which should do nothing as there are no new changes) + const persistResult = await persistBuffer( + projectId, + limitsToPersistImmediately + ) + + // Check the return value + const currentChunk = await chunkStore.loadLatest(projectId, { + persistedOnly: true, + }) + expect(persistResult).to.deep.equal({ + numberOfChangesPersisted: 0, + originalEndVersion: persistedChunkEndVersion, + currentChunk, + }) + + const chunksAfter = await chunkStore.getProjectChunks(projectId) + expect(chunksAfter.length).to.equal(chunksBefore.length) + expect(chunksAfter).to.deep.equal(chunksBefore) + + const finalPersistedVersionInRedis = ( + await redisBackend.getState(projectId) + ).persistedVersion + expect(finalPersistedVersionInRedis).to.equal(persistedChunkEndVersion) + }) + }) + + describe('when limits restrict the number of changes to persist', function () { + it('should persist only a subset of changes and update persistedVersion accordingly', async function () { + const now = Date.now() + const oneDayAgo = now - 1000 * 60 * 60 * 24 + const oneHourAgo = now - 1000 * 60 * 60 + const twoHoursAgo = now - 1000 * 60 * 60 * 2 + const threeHoursAgo = now - 1000 * 60 * 60 * 3 + + // Create an initial file with some content + const initialContent = 'Initial content.' + const addInitialFileChange = new Change( + [new AddFileOperation('main.tex', File.fromString(initialContent))], + new Date(oneDayAgo), + [] + ) + + await persistChanges( + projectId, + [addInitialFileChange], + limitsToPersistImmediately, + initialVersion + ) + const versionAfterInitialSetup = initialVersion + 1 // Version is 1 + + // Queue three additional changes in Redis + const op1 = new TextOperation() + .retain(initialContent.length) + .insert(' Change 1.') + const change1 = new Change( + [new EditFileOperation('main.tex', op1)], + new Date(threeHoursAgo) + ) + const contentAfterC1 = initialContent + ' Change 1.' + + const op2 = new TextOperation() + .retain(contentAfterC1.length) + .insert(' Change 2.') + const change2 = new Change( + [new EditFileOperation('main.tex', op2)], + new Date(twoHoursAgo) + ) + const contentAfterC2 = contentAfterC1 + ' Change 2.' + + const op3 = new TextOperation() + .retain(contentAfterC2.length) + .insert(' Change 3.') + const change3 = new Change( + [new EditFileOperation('main.tex', op3)], + new Date(oneHourAgo) + ) + + const changesToQueue = [change1, change2, change3] + await redisBackend.queueChanges( + projectId, + new Snapshot(), // dummy snapshot + versionAfterInitialSetup, // startVersion for queued changes + changesToQueue, + { + persistTime: now + redisBackend.MAX_PERSIST_DELAY_MS, + expireTime: now + redisBackend.PROJECT_TTL_MS, + } + ) + await redisBackend.setPersistedVersion( + projectId, + versionAfterInitialSetup + ) + + // Define limits to only persist 2 additional changes (on top of the initial file creation), + // which should leave the final change (change3) in the redis buffer. + const restrictiveLimits = { + minChangeTimestamp: new Date(oneHourAgo), // only changes more than 1 hour old are considered + maxChangeTimestamp: new Date(twoHoursAgo), // they will be persisted if any change is older than 2 hours + } + + const persistResult = await persistBuffer(projectId, restrictiveLimits) + + // Check the return value of persistBuffer + expect(persistResult).to.exist + expect(persistResult).to.have.property('numberOfChangesPersisted') + expect(persistResult).to.have.property('originalEndVersion') + expect(persistResult).to.have.property('currentChunk') + expect(persistResult).to.have.property('resyncNeeded') + expect(persistResult.numberOfChangesPersisted).to.equal(2) // change1 + change2 + expect(persistResult.originalEndVersion).to.equal( + versionAfterInitialSetup + ) + expect(persistResult.resyncNeeded).to.be.false + + // Check the latest persisted chunk, it should only have the initial file and the first two changes + const latestChunk = await chunkStore.loadLatest(projectId, { + persistedOnly: true, + }) + expect(latestChunk).to.exist + expect(latestChunk.getChanges().length).to.equal(3) // addInitialFileChange + change1 + change2 + expect(latestChunk.getStartVersion()).to.equal(initialVersion) + const expectedEndVersion = versionAfterInitialSetup + 2 // Persisted two changes from the queue + expect(latestChunk.getEndVersion()).to.equal(expectedEndVersion) + + // Check that chunk returned by persistBuffer matches the latest chunk + expect(persistResult.currentChunk).to.deep.equal(latestChunk) + + // Check persisted version in Redis + const state = await redisBackend.getState(projectId) + expect(state.persistedVersion).to.equal(expectedEndVersion) + + // Check non-persisted changes in Redis + const nonPersisted = await redisBackend.getNonPersistedChanges( + projectId, + expectedEndVersion + ) + expect(nonPersisted).to.be.an('array').with.lengthOf(1) // change3 should remain + expect(nonPersisted).to.deep.equal([change3]) + }) + }) +}) diff --git a/services/history-v1/test/acceptance/js/storage/persist_redis_chunks.test.js b/services/history-v1/test/acceptance/js/storage/persist_redis_chunks.test.js new file mode 100644 index 0000000000..58261703bb --- /dev/null +++ b/services/history-v1/test/acceptance/js/storage/persist_redis_chunks.test.js @@ -0,0 +1,262 @@ +'use strict' + +const { expect } = require('chai') +const { + Change, + AddFileOperation, + EditFileOperation, + TextOperation, + File, +} = require('overleaf-editor-core') +const cleanup = require('./support/cleanup') +const fixtures = require('./support/fixtures') +const chunkStore = require('../../../../storage/lib/chunk_store') +const { getState } = require('../../../../storage/lib/chunk_store/redis') +const { setupProjectState } = require('./support/redis') +const { runScript } = require('./support/runscript') +const persistChanges = require('../../../../storage/lib/persist_changes') + +const SCRIPT_PATH = 'storage/scripts/persist_redis_chunks.mjs' + +describe('persist_redis_chunks script', function () { + before(cleanup.everything) + + let now, past, future + let projectIdsStore // To store the generated project IDs, keyed by scenario name + let limitsToPersistImmediately + + before(async function () { + const farFuture = new Date() + farFuture.setTime(farFuture.getTime() + 7 * 24 * 3600 * 1000) + limitsToPersistImmediately = { + minChangeTimestamp: farFuture, + maxChangeTimestamp: farFuture, + maxChunkChanges: 100, // Allow enough changes for setup + } + + await fixtures.create() + + now = Date.now() + past = now - 10000 // 10 seconds ago + future = now + 60000 // 1 minute in the future + + projectIdsStore = {} + + // Scenario 1: project_due_for_persistence + // Goal: Has initial persisted content (v1), Redis has new changes (v1->v2) due for persistence. + // Expected: Script persists Redis changes, persistedVersion becomes 2. + { + const dueProjectId = await chunkStore.initializeProject() + projectIdsStore.project_due_for_persistence = dueProjectId + const initialContent = 'Initial content for due project.' + const initialChange = new Change( + [new AddFileOperation('main.tex', File.fromString(initialContent))], + new Date(now - 30000), // 30 seconds ago + [] + ) + await persistChanges( + dueProjectId, + [initialChange], + limitsToPersistImmediately, + 0 + ) + const secondChangeDue = new Change( + [ + new EditFileOperation( + 'main.tex', + new TextOperation() + .retain(initialContent.length) + .insert(' More content.') + ), + ], + new Date(now - 20000), // 20 seconds ago + [] + ) + await setupProjectState(dueProjectId, { + persistTime: past, + headVersion: 2, // After secondChangeDue + persistedVersion: 1, // Initial content is at v1 + changes: [secondChangeDue], // New changes in Redis (v1->v2) + expireTimeFuture: true, + }) + } + + // Scenario 2: project_not_due_for_persistence + // Goal: Has initial persisted content (v1), Redis has no new changes, not due. + // Expected: Script does nothing, persistedVersion remains 1. + { + const notDueProjectId = await chunkStore.initializeProject() + projectIdsStore.project_not_due_for_persistence = notDueProjectId + const initialContent = 'Initial content for not_due project.' + const initialChange = new Change( + [new AddFileOperation('main.tex', File.fromString(initialContent))], + new Date(now - 30000), // 30 seconds ago + [] + ) + await persistChanges( + notDueProjectId, + [initialChange], + limitsToPersistImmediately, + 0 + ) // Persisted: v0 -> v1 + await setupProjectState(notDueProjectId, { + persistTime: future, + headVersion: 1, // Matches persisted version + persistedVersion: 1, + changes: [], // No new changes in Redis + expireTimeFuture: true, + }) + } + + // Scenario 3: project_no_persist_time + // Goal: Has initial persisted content (v1), Redis has no new changes, no persistTime. + // Expected: Script does nothing, persistedVersion remains 1. + { + const noPersistTimeProjectId = await chunkStore.initializeProject() + projectIdsStore.project_no_persist_time = noPersistTimeProjectId + const initialContent = 'Initial content for no_persist_time project.' + const initialChange = new Change( + [new AddFileOperation('main.tex', File.fromString(initialContent))], + new Date(now - 30000), // 30 seconds ago + [] + ) + await persistChanges( + noPersistTimeProjectId, + [initialChange], + limitsToPersistImmediately, + 0 + ) // Persisted: v0 -> v1 + await setupProjectState(noPersistTimeProjectId, { + persistTime: null, + headVersion: 1, // Matches persisted version + persistedVersion: 1, + changes: [], // No new changes in Redis + expireTimeFuture: true, + }) + } + + // Scenario 4: project_due_fully_persisted + // Goal: Has content persisted up to v2, Redis reflects this (head=2, persisted=2), due for check. + // Expected: Script clears persistTime, persistedVersion remains 2. + { + const dueFullyPersistedId = await chunkStore.initializeProject() + projectIdsStore.project_due_fully_persisted = dueFullyPersistedId + const initialContent = 'Content part 1 for fully persisted.' + const change1 = new Change( + [new AddFileOperation('main.tex', File.fromString(initialContent))], + new Date(now - 40000), // 40 seconds ago + [] + ) + const change2 = new Change( + [ + new EditFileOperation( + 'main.tex', + new TextOperation() + .retain(initialContent.length) + .insert(' Content part 2.') + ), + ], + new Date(now - 30000), // 30 seconds ago + [] + ) + await persistChanges( + dueFullyPersistedId, + [change1, change2], + limitsToPersistImmediately, + 0 + ) + await setupProjectState(dueFullyPersistedId, { + persistTime: past, + headVersion: 2, + persistedVersion: 2, + changes: [], // No new unpersisted changes in Redis + expireTimeFuture: true, + }) + } + + // Scenario 5: project_fails_to_persist + // Goal: Has initial persisted content (v1), Redis has new changes (v1->v2) due for persistence, but these changes will cause an error. + // Expected: Script attempts to persist, fails, and persistTime is NOT cleared. + { + const failsToPersistProjectId = await chunkStore.initializeProject() + projectIdsStore.project_fails_to_persist = failsToPersistProjectId + const initialContent = 'Initial content for failure case.' + const initialChange = new Change( + [new AddFileOperation('main.tex', File.fromString(initialContent))], + new Date(now - 30000), // 30 seconds ago + [] + ) + await persistChanges( + failsToPersistProjectId, + [initialChange], + limitsToPersistImmediately, + 0 + ) + // This change will fail because it tries to insert at a non-existent offset + // assuming the initial content is shorter than 1000 characters. + const conflictingChange = new Change( + [ + new EditFileOperation( + 'main.tex', + new TextOperation().retain(1000).insert('This will fail.') + ), + ], + new Date(now - 20000), // 20 seconds ago + [] + ) + await setupProjectState(failsToPersistProjectId, { + persistTime: past, // Due for persistence + headVersion: 2, // After conflictingChange + persistedVersion: 1, // Initial content is at v1 + changes: [conflictingChange], // New changes in Redis (v1->v2) + expireTimeFuture: true, + }) + } + + await runScript(SCRIPT_PATH) + }) + + describe('when the buffer has new changes', function () { + it('should update persisted-version when the persist-time is in the past', async function () { + const projectId = projectIdsStore.project_due_for_persistence + const state = await getState(projectId) + // console.log('State after running script (project_due_for_persistence):', state) + expect(state.persistTime).to.be.null + expect(state.persistedVersion).to.equal(2) + }) + + it('should not perform any operations when the persist-time is in the future', async function () { + const projectId = projectIdsStore.project_not_due_for_persistence + const state = await getState(projectId) + expect(state.persistTime).to.equal(future) + expect(state.persistedVersion).to.equal(1) + }) + }) + + describe('when the changes in the buffer are already persisted', function () { + it('should delete persist-time for a project when the persist-time is in the past', async function () { + const projectId = projectIdsStore.project_due_fully_persisted + const state = await getState(projectId) + expect(state.persistTime).to.be.null + expect(state.persistedVersion).to.equal(2) + }) + }) + + describe('when there is no persist-time set', function () { + it('should not change redis when there is no persist-time set initially', async function () { + const projectId = projectIdsStore.project_no_persist_time + const state = await getState(projectId) + expect(state.persistTime).to.be.null + expect(state.persistedVersion).to.equal(1) + }) + }) + + describe('when persistence fails due to conflicting changes', function () { + it('should not clear persist-time and not update persisted-version', async function () { + const projectId = projectIdsStore.project_fails_to_persist + const state = await getState(projectId) + expect(state.persistTime).to.be.greaterThan(now) // persistTime should be pushed to the future by RETRY_DELAY_MS + expect(state.persistedVersion).to.equal(1) // persistedVersion should not change + }) + }) +}) diff --git a/services/history-v1/test/acceptance/js/storage/queue_changes.test.js b/services/history-v1/test/acceptance/js/storage/queue_changes.test.js new file mode 100644 index 0000000000..dbfe8c7e56 --- /dev/null +++ b/services/history-v1/test/acceptance/js/storage/queue_changes.test.js @@ -0,0 +1,416 @@ +'use strict' + +const { expect } = require('chai') +const sinon = require('sinon') + +const cleanup = require('./support/cleanup') +const fixtures = require('./support/fixtures') +const testFiles = require('./support/test_files.js') +const storage = require('../../../../storage') +const chunkStore = storage.chunkStore +const queueChanges = storage.queueChanges +const redisBackend = require('../../../../storage/lib/chunk_store/redis') + +const core = require('overleaf-editor-core') +const AddFileOperation = core.AddFileOperation +const EditFileOperation = core.EditFileOperation +const TextOperation = core.TextOperation +const Change = core.Change +const Chunk = core.Chunk +const File = core.File +const Snapshot = core.Snapshot +const BlobStore = storage.BlobStore +const persistChanges = storage.persistChanges + +describe('queueChanges', function () { + let limitsToPersistImmediately + before(function () { + // Used to provide a limit which forces us to persist all of the changes + const farFuture = new Date() + farFuture.setTime(farFuture.getTime() + 7 * 24 * 3600 * 1000) + limitsToPersistImmediately = { + minChangeTimestamp: farFuture, + maxChangeTimestamp: farFuture, + maxChanges: 10, + maxChunkChanges: 10, + } + }) + + beforeEach(cleanup.everything) + beforeEach(fixtures.create) + afterEach(function () { + sinon.restore() + }) + + it('queues changes when redis has no snapshot (falls back to chunkStore with an empty chunk)', async function () { + // Start with an empty chunk store for the project + const projectId = fixtures.docs.uninitializedProject.id + await chunkStore.initializeProject(projectId) + + // Ensure that the initial state in redis is empty + const initialRedisState = await redisBackend.getState(projectId) + expect(initialRedisState.headVersion).to.be.null + expect(initialRedisState.headSnapshot).to.be.null + expect(initialRedisState.changes).to.be.an('array').that.is.empty + + // Add a test file to the blob store + const blobStore = new BlobStore(projectId) + await blobStore.putFile(testFiles.path('hello.txt')) + + // Prepare an initial change to add a single file to an empty project + const change = new Change( + [ + new AddFileOperation( + 'test.tex', + File.fromHash(testFiles.HELLO_TXT_HASH) + ), + ], + new Date(), + [] + ) + const changesToQueue = [change] + const endVersion = 0 + + // Queue the changes to add the test file + const status = await queueChanges(projectId, changesToQueue, endVersion) + expect(status).to.equal('ok') + + // Verify that we now have some state in redis + const redisState = await redisBackend.getState(projectId) + expect(redisState).to.not.be.null + + // Compute the expected snapshot after applying the changes + const expectedSnapshot = new Snapshot() + await expectedSnapshot.loadFiles('hollow', blobStore) + for (const change of changesToQueue) { + const hollowChange = change.clone() + await hollowChange.loadFiles('hollow', blobStore) + hollowChange.applyTo(expectedSnapshot, { strict: true }) + } + + // Confirm that state in redis matches the expected snapshot and changes queue + const expectedVersionInRedis = endVersion + changesToQueue.length + expect(redisState.headVersion).to.equal(expectedVersionInRedis) + expect(redisState.headSnapshot).to.deep.equal(expectedSnapshot.toRaw()) + expect(redisState.changes).to.deep.equal(changesToQueue.map(c => c.toRaw())) + }) + + it('queues changes when redis has no snapshot (falls back to chunkStore with an existing chunk)', async function () { + const projectId = fixtures.docs.uninitializedProject.id + + // Initialise the project in the chunk store using the "Hello World" test file + await chunkStore.initializeProject(projectId) + const blobStore = new BlobStore(projectId) + await blobStore.putFile(testFiles.path('hello.txt')) + const change = new Change( + [ + new AddFileOperation( + 'hello.tex', + File.fromHash(testFiles.HELLO_TXT_HASH) + ), + ], + new Date(), + [] + ) + const initialChanges = [change] + const initialVersion = 0 + + const result = await persistChanges( + projectId, + initialChanges, + limitsToPersistImmediately, + initialVersion + ) + // Compute the state after the initial changes are persisted for later comparison + const endVersion = initialVersion + initialChanges.length + const { currentChunk } = result + const originalSnapshot = result.currentChunk.getSnapshot() + await originalSnapshot.loadFiles('hollow', blobStore) + originalSnapshot.applyAll(currentChunk.getChanges()) + + // Ensure that the initial state in redis is empty + const initialRedisState = await redisBackend.getState(projectId) + expect(initialRedisState.headVersion).to.be.null + expect(initialRedisState.headSnapshot).to.be.null + expect(initialRedisState.changes).to.be.an('array').that.is.empty + + // Prepare a change to edit the existing file + const editFileOp = new EditFileOperation( + 'hello.tex', + new TextOperation() + .insert('Hello') + .retain(testFiles.HELLO_TXT_UTF8_LENGTH) + ) + const editFileChange = new Change([editFileOp], new Date(), []) + const changesToQueue = [editFileChange] + + // Queue the changes to edit the existing file + const status = await queueChanges(projectId, changesToQueue, endVersion) + expect(status).to.equal('ok') + + // Verify that we now have some state in redis + const redisState = await redisBackend.getState(projectId) + expect(redisState).to.not.be.null + + // Compute the expected snapshot after applying the changes + const expectedSnapshot = originalSnapshot.clone() + await expectedSnapshot.loadFiles('hollow', blobStore) + expectedSnapshot.applyAll(changesToQueue) + + // Confirm that state in redis matches the expected snapshot and changes queue + const expectedVersionInRedis = endVersion + changesToQueue.length + expect(redisState.headVersion).to.equal(expectedVersionInRedis) + expect(redisState.headSnapshot).to.deep.equal(expectedSnapshot.toRaw()) + expect(redisState.changes).to.deep.equal(changesToQueue.map(c => c.toRaw())) + }) + + it('queues changes when redis has a snapshot with existing changes', async function () { + const projectId = fixtures.docs.uninitializedProject.id + + // Initialise the project in redis using the "Hello World" test file + await chunkStore.initializeProject(projectId) + const blobStore = new BlobStore(projectId) + await blobStore.putFile(testFiles.path('hello.txt')) + const initialChangeOp = new AddFileOperation( + 'existing.tex', + File.fromHash(testFiles.HELLO_TXT_HASH) + ) + const initialChange = new Change([initialChangeOp], new Date(), []) + const initialChangesToQueue = [initialChange] + const versionBeforeInitialQueue = 0 + + // Queue the initial changes + const status = await queueChanges( + projectId, + initialChangesToQueue, + versionBeforeInitialQueue + ) + // Confirm that the initial changes were queued successfully + expect(status).to.equal('ok') + const versionAfterInitialQueue = + versionBeforeInitialQueue + initialChangesToQueue.length + + // Compute the snapshot after the initial changes for later use + const initialSnapshot = new Snapshot() + await initialSnapshot.loadFiles('hollow', blobStore) + for (const change of initialChangesToQueue) { + const hollowChange = change.clone() + await hollowChange.loadFiles('hollow', blobStore) + hollowChange.applyTo(initialSnapshot, { strict: true }) + } + + // Now prepare some subsequent changes for the queue + await blobStore.putFile(testFiles.path('graph.png')) + const addFileOp = new AddFileOperation( + 'graph.png', + File.fromHash(testFiles.GRAPH_PNG_HASH) + ) + const addFileChange = new Change([addFileOp], new Date(), []) + const editFileOp = new EditFileOperation( + 'existing.tex', + new TextOperation() + .insert('Hello') + .retain(testFiles.HELLO_TXT_UTF8_LENGTH) + ) + const editFileChange = new Change([editFileOp], new Date(), []) + + const subsequentChangesToQueue = [addFileChange, editFileChange] + const versionBeforeSubsequentQueue = versionAfterInitialQueue + + // Queue the subsequent changes + const subsequentStatus = await queueChanges( + projectId, + subsequentChangesToQueue, + versionBeforeSubsequentQueue + ) + expect(subsequentStatus).to.equal('ok') + + // Compute the expected snapshot after applying all changes + const expectedSnapshot = initialSnapshot.clone() + await expectedSnapshot.loadFiles('hollow', blobStore) + for (const change of subsequentChangesToQueue) { + const hollowChange = change.clone() + await hollowChange.loadFiles('hollow', blobStore) + hollowChange.applyTo(expectedSnapshot, { strict: true }) + } + + // Confirm that state in redis matches the expected snapshot and changes queue + const finalRedisState = await redisBackend.getState(projectId) + expect(finalRedisState).to.not.be.null + const expectedFinalVersion = + versionBeforeSubsequentQueue + subsequentChangesToQueue.length + expect(finalRedisState.headVersion).to.equal(expectedFinalVersion) + expect(finalRedisState.headSnapshot).to.deep.equal(expectedSnapshot.toRaw()) + const allQueuedChangesRaw = initialChangesToQueue + .concat(subsequentChangesToQueue) + .map(c => c.toRaw()) + expect(finalRedisState.changes).to.deep.equal(allQueuedChangesRaw) + }) + + it('skips queuing changes when there is no snapshot and the onlyIfExists flag is set', async function () { + // Start with an empty chunk store for the project + const projectId = fixtures.docs.uninitializedProject.id + await chunkStore.initializeProject(projectId) + + // Ensure that the initial state in redis is empty + const initialRedisState = await redisBackend.getState(projectId) + expect(initialRedisState.headVersion).to.be.null + expect(initialRedisState.headSnapshot).to.be.null + expect(initialRedisState.changes).to.be.an('array').that.is.empty + + // Add a test file to the blob store + const blobStore = new BlobStore(projectId) + await blobStore.putFile(testFiles.path('hello.txt')) + + // Prepare an initial change to add a single file to an empty project + const change = new Change( + [ + new AddFileOperation( + 'test.tex', + File.fromHash(testFiles.HELLO_TXT_HASH) + ), + ], + new Date(), + [] + ) + const changesToQueue = [change] + const endVersion = 0 + + // Queue the changes to add the test file + const status = await queueChanges(projectId, changesToQueue, endVersion, { + onlyIfExists: true, + }) + expect(status).to.equal('ignore') + + // Verify that the state in redis has not changed + const redisState = await redisBackend.getState(projectId) + expect(redisState).to.deep.equal(initialRedisState) + }) + + it('creates an initial hollow snapshot when redis has no snapshot (falls back to chunkStore with an empty chunk)', async function () { + // Start with an empty chunk store for the project + const projectId = fixtures.docs.uninitializedProject.id + await chunkStore.initializeProject(projectId) + const blobStore = new BlobStore(projectId) + await blobStore.putFile(testFiles.path('hello.txt')) + + // Prepare an initial change to add a single file to an empty project + const change = new Change( + [ + new AddFileOperation( + 'test.tex', + File.fromHash(testFiles.HELLO_TXT_HASH) + ), + ], + new Date(), + [] + ) + const changesToQueue = [change] + const endVersion = 0 + + // Queue the changes to add the test file + const status = await queueChanges(projectId, changesToQueue, endVersion) + expect(status).to.equal('ok') + + // Verify that we now have some state in redis + const redisState = await redisBackend.getState(projectId) + expect(redisState).to.not.be.null + expect(redisState.headSnapshot.files['test.tex']).to.deep.equal({ + stringLength: testFiles.HELLO_TXT_UTF8_LENGTH, + }) + }) + + it('throws ConflictingEndVersion if endVersion does not match current version (from chunkStore)', async function () { + const projectId = fixtures.docs.uninitializedProject.id + // Initialise an empty project in the chunk store + await chunkStore.initializeProject(projectId) + + // Ensure that the initial state in redis is empty + const initialRedisState = await redisBackend.getState(projectId) + expect(initialRedisState.headVersion).to.be.null + + // Prepare a change to add a file + const change = new Change( + [new AddFileOperation('test.tex', File.fromString(''))], + new Date(), + [] + ) + const changesToQueue = [change] + const incorrectEndVersion = 1 + + // Attempt to queue the changes with an incorrect endVersion (1 instead of 0) + await expect(queueChanges(projectId, changesToQueue, incorrectEndVersion)) + .to.be.rejectedWith(Chunk.ConflictingEndVersion) + .and.eventually.satisfies(err => { + expect(err.info).to.have.property( + 'clientEndVersion', + incorrectEndVersion + ) + expect(err.info).to.have.property('latestEndVersion', 0) + return true + }) + + // Verify that the state in redis has not changed + const redisStateAfterError = await redisBackend.getState(projectId) + expect(redisStateAfterError).to.deep.equal(initialRedisState) + }) + + it('throws ConflictingEndVersion if endVersion does not match current version (from redis snapshot)', async function () { + const projectId = fixtures.docs.uninitializedProject.id + + // Initialise the project in the redis with a test file + await chunkStore.initializeProject(projectId) + const initialChange = new Change( + [new AddFileOperation('initial.tex', File.fromString('content'))], + new Date(), + [] + ) + const initialChangesToQueue = [initialChange] + const versionBeforeInitialQueue = 0 + + // Queue the initial changes + await queueChanges( + projectId, + initialChangesToQueue, + versionBeforeInitialQueue + ) + const versionInRedisAfterSetup = + versionBeforeInitialQueue + initialChangesToQueue.length + + // Confirm that the initial changes were queued successfully + const initialRedisState = await redisBackend.getState(projectId) + expect(initialRedisState).to.not.be.null + expect(initialRedisState.headVersion).to.equal(versionInRedisAfterSetup) + + // Now prepare a subsequent change for the queue + const subsequentChange = new Change( + [new AddFileOperation('another.tex', File.fromString(''))], + new Date(), + [] + ) + const subsequentChangesToQueue = [subsequentChange] + const incorrectEndVersion = 0 + + // Attempt to queue the changes with an incorrect endVersion (0 instead of 1) + await expect( + queueChanges(projectId, subsequentChangesToQueue, incorrectEndVersion) + ) + .to.be.rejectedWith(Chunk.ConflictingEndVersion) + .and.eventually.satisfies(err => { + expect(err.info).to.have.property( + 'clientEndVersion', + incorrectEndVersion + ) + expect(err.info).to.have.property( + 'latestEndVersion', + versionInRedisAfterSetup + ) + return true + }) + + // Verify that the state in redis has not changed + const redisStateAfterError = await redisBackend.getState(projectId) + expect(redisStateAfterError).to.not.be.null + expect(redisStateAfterError).to.deep.equal(initialRedisState) + }) +}) diff --git a/services/history-v1/test/acceptance/js/storage/support/cleanup.js b/services/history-v1/test/acceptance/js/storage/support/cleanup.js index 632cc96c04..4df985d613 100644 --- a/services/history-v1/test/acceptance/js/storage/support/cleanup.js +++ b/services/history-v1/test/acceptance/js/storage/support/cleanup.js @@ -17,7 +17,6 @@ const MONGO_COLLECTIONS = [ 'projectHistoryChunks', // back_fill_file_hash.test.mjs - 'deletedFiles', 'deletedProjects', 'projects', 'projectHistoryBackedUpBlobs', diff --git a/services/history-v1/test/acceptance/js/storage/support/redis.js b/services/history-v1/test/acceptance/js/storage/support/redis.js new file mode 100644 index 0000000000..3f5b9cda27 --- /dev/null +++ b/services/history-v1/test/acceptance/js/storage/support/redis.js @@ -0,0 +1,75 @@ +'use strict' + +const { Snapshot } = require('overleaf-editor-core') +const redis = require('../../../../../storage/lib/redis') +const redisBackend = require('../../../../../storage/lib/chunk_store/redis') +const rclient = redis.rclientHistory +const keySchema = redisBackend.keySchema + +// Helper to set up a basic project state in Redis +async function setupProjectState( + projectId, + { + headVersion = 0, + persistedVersion = null, + expireTime = null, + persistTime = null, + changes = [], + expireTimeFuture = false, // Default to not setting future expire time unless specified + } +) { + const headSnapshot = new Snapshot() + await rclient.set( + keySchema.head({ projectId }), + JSON.stringify(headSnapshot.toRaw()) + ) + await rclient.set( + keySchema.headVersion({ projectId }), + headVersion.toString() + ) + + if (persistedVersion !== null) { + await rclient.set( + keySchema.persistedVersion({ projectId }), + persistedVersion.toString() + ) + } else { + await rclient.del(keySchema.persistedVersion({ projectId })) + } + + if (expireTime !== null) { + await rclient.set( + keySchema.expireTime({ projectId }), + expireTime.toString() + ) + } else { + // If expireTimeFuture is true, set it to a future time, otherwise delete it if null + if (expireTimeFuture) { + const futureExpireTime = Date.now() + 5 * 60 * 1000 // 5 minutes in the future + await rclient.set( + keySchema.expireTime({ projectId }), + futureExpireTime.toString() + ) + } else { + await rclient.del(keySchema.expireTime({ projectId })) + } + } + + if (persistTime !== null) { + await rclient.set( + keySchema.persistTime({ projectId }), + persistTime.toString() + ) + } else { + await rclient.del(keySchema.persistTime({ projectId })) + } + + if (changes.length > 0) { + const rawChanges = changes.map(c => JSON.stringify(c.toRaw())) + await rclient.rpush(keySchema.changes({ projectId }), ...rawChanges) + } else { + await rclient.del(keySchema.changes({ projectId })) + } +} + +module.exports = { setupProjectState, rclient, keySchema } diff --git a/services/history-v1/test/acceptance/js/storage/support/runscript.js b/services/history-v1/test/acceptance/js/storage/support/runscript.js new file mode 100644 index 0000000000..7ff8355566 --- /dev/null +++ b/services/history-v1/test/acceptance/js/storage/support/runscript.js @@ -0,0 +1,35 @@ +'use strict' + +const { promisify } = require('node:util') +const { execFile } = require('node:child_process') + +async function runScript(scriptPath, options = {}) { + const TIMEOUT = options.timeout || 10 * 1000 // 10 seconds default + let result + try { + result = await promisify(execFile)('node', [scriptPath], { + encoding: 'utf-8', + timeout: TIMEOUT, + env: { + ...process.env, + LOG_LEVEL: 'debug', // Override LOG_LEVEL for script output + }, + }) + result.status = 0 + } catch (err) { + const { stdout, stderr, code } = err + if (typeof code !== 'number') { + console.error(`Error running script ${scriptPath}:`, err) + throw err + } + result = { stdout, stderr, status: code } + } + // The script might exit with status 1 if it finds no keys to process, which is ok + if (result.status !== 0 && result.status !== 1) { + console.error(`Script ${scriptPath} failed:`, result.stderr) + throw new Error(`Script ${scriptPath} failed with status ${result.status}`) + } + return result +} + +module.exports = { runScript } diff --git a/services/notifications/docker-compose.ci.yml b/services/notifications/docker-compose.ci.yml index 8fd86c1fbb..24b57ab084 100644 --- a/services/notifications/docker-compose.ci.yml +++ b/services/notifications/docker-compose.ci.yml @@ -24,10 +24,13 @@ services: MOCHA_GREP: ${MOCHA_GREP} NODE_ENV: test NODE_OPTIONS: "--unhandled-rejections=strict" + volumes: + - ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it depends_on: mongo: condition: service_started user: node + entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 -- command: npm run test:acceptance diff --git a/services/notifications/docker-compose.yml b/services/notifications/docker-compose.yml index 090742ff6d..167e45fdb1 100644 --- a/services/notifications/docker-compose.yml +++ b/services/notifications/docker-compose.yml @@ -26,6 +26,7 @@ services: - .:/overleaf/services/notifications - ../../node_modules:/overleaf/node_modules - ../../libraries:/overleaf/libraries + - ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it working_dir: /overleaf/services/notifications environment: ELASTIC_SEARCH_DSN: es:9200 @@ -39,6 +40,7 @@ services: depends_on: mongo: condition: service_started + entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 -- command: npm run --silent test:acceptance mongo: diff --git a/services/project-history/app/js/FlushManager.js b/services/project-history/app/js/FlushManager.js index 6df3b20a87..455a4f56f7 100644 --- a/services/project-history/app/js/FlushManager.js +++ b/services/project-history/app/js/FlushManager.js @@ -11,6 +11,7 @@ import async from 'async' import logger from '@overleaf/logger' import OError from '@overleaf/o-error' import metrics from '@overleaf/metrics' +import Settings from '@overleaf/settings' import _ from 'lodash' import * as RedisManager from './RedisManager.js' import * as UpdatesProcessor from './UpdatesProcessor.js' @@ -37,6 +38,13 @@ export function flushIfOld(projectId, cutoffTime, callback) { ) metrics.inc('flush-old-updates', 1, { status: 'flushed' }) return UpdatesProcessor.processUpdatesForProject(projectId, callback) + } else if (Settings.shortHistoryQueues.includes(projectId)) { + logger.debug( + { projectId, firstOpTimestamp, cutoffTime }, + 'flushing project with short queue' + ) + metrics.inc('flush-old-updates', 1, { status: 'short-queue' }) + return UpdatesProcessor.processUpdatesForProject(projectId, callback) } else { metrics.inc('flush-old-updates', 1, { status: 'skipped' }) return callback() diff --git a/services/project-history/app/js/HistoryStoreManager.js b/services/project-history/app/js/HistoryStoreManager.js index bb41dfb3c0..38658bdf5b 100644 --- a/services/project-history/app/js/HistoryStoreManager.js +++ b/services/project-history/app/js/HistoryStoreManager.js @@ -35,7 +35,10 @@ class StringStream extends stream.Readable { _mocks.getMostRecentChunk = (projectId, historyId, callback) => { const path = `projects/${historyId}/latest/history` logger.debug({ projectId, historyId }, 'getting chunk from history service') - _requestChunk({ path, json: true }, callback) + _requestChunk({ path, json: true }, (err, chunk) => { + if (err) return callback(OError.tag(err)) + callback(null, chunk) + }) } /** @@ -54,7 +57,10 @@ export function getChunkAtVersion(projectId, historyId, version, callback) { { projectId, historyId, version }, 'getting chunk from history service for version' ) - _requestChunk({ path, json: true }, callback) + _requestChunk({ path, json: true }, (err, chunk) => { + if (err) return callback(OError.tag(err)) + callback(null, chunk) + }) } export function getMostRecentVersion(projectId, historyId, callback) { @@ -68,8 +74,10 @@ export function getMostRecentVersion(projectId, historyId, callback) { _.sortBy(chunk.chunk.history.changes || [], x => x.timestamp) ) // find the latest project and doc versions in the chunk - _getLatestProjectVersion(projectId, chunk, (err1, projectVersion) => + _getLatestProjectVersion(projectId, chunk, (err1, projectVersion) => { + if (err1) err1 = OError.tag(err1) _getLatestV2DocVersions(projectId, chunk, (err2, v2DocVersions) => { + if (err2) err2 = OError.tag(err2) // return the project and doc versions const projectStructureAndDocVersions = { project: projectVersion, @@ -83,7 +91,7 @@ export function getMostRecentVersion(projectId, historyId, callback) { chunk ) }) - ) + }) }) } @@ -211,7 +219,10 @@ export function getProjectBlob(historyId, blobHash, callback) { logger.debug({ historyId, blobHash }, 'getting blob from history service') _requestHistoryService( { path: `projects/${historyId}/blobs/${blobHash}` }, - callback + (err, blob) => { + if (err) return callback(OError.tag(err)) + callback(null, blob) + } ) } @@ -277,7 +288,10 @@ function createBlobFromString(historyId, data, fileId, callback) { (fsPath, cb) => { _createBlob(historyId, fsPath, cb) }, - callback + (err, hash) => { + if (err) return callback(OError.tag(err)) + callback(null, hash) + } ) } @@ -330,7 +344,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) { try { ranges = HistoryBlobTranslator.createRangeBlobDataFromUpdate(update) } catch (error) { - return callback(error) + return callback(OError.tag(error)) } createBlobFromString( historyId, @@ -338,7 +352,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) { `project-${projectId}-doc-${update.doc}`, (err, fileHash) => { if (err) { - return callback(err) + return callback(OError.tag(err)) } if (ranges) { createBlobFromString( @@ -347,7 +361,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) { `project-${projectId}-doc-${update.doc}-ranges`, (err, rangesHash) => { if (err) { - return callback(err) + return callback(OError.tag(err)) } logger.debug( { fileHash, rangesHash }, @@ -415,7 +429,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) { }, (err, fileHash) => { if (err) { - return callback(err) + return callback(OError.tag(err)) } if (update.hash && update.hash !== fileHash) { logger.warn( @@ -447,7 +461,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) { }, (err, fileHash) => { if (err) { - return callback(err) + return callback(OError.tag(err)) } logger.debug({ fileHash }, 'created empty blob for file') callback(null, { file: fileHash }) @@ -520,7 +534,10 @@ export function initializeProject(historyId, callback) { export function deleteProject(projectId, callback) { _requestHistoryService( { method: 'DELETE', path: `projects/${projectId}` }, - callback + err => { + if (err) return callback(OError.tag(err)) + callback(null) + } ) } diff --git a/services/project-history/app/js/SyncManager.js b/services/project-history/app/js/SyncManager.js index ef8caf69eb..43cb61be9f 100644 --- a/services/project-history/app/js/SyncManager.js +++ b/services/project-history/app/js/SyncManager.js @@ -23,6 +23,7 @@ import { isInsert, isDelete } from './Utils.js' /** * @import { Comment as HistoryComment, TrackedChange as HistoryTrackedChange } from 'overleaf-editor-core' + * @import { CommentRawData, TrackedChangeRawData } from 'overleaf-editor-core/lib/types' * @import { Comment, Entity, ResyncDocContentUpdate, RetainOp, TrackedChange } from './types' * @import { TrackedChangeTransition, TrackingDirective, TrackingType, Update } from './types' * @import { ProjectStructureUpdate } from './types' @@ -764,11 +765,19 @@ class SyncUpdateExpander { } const persistedComments = file.getComments().toArray() - await this.queueUpdatesForOutOfSyncComments( - update, - pathname, - persistedComments - ) + if (update.resyncDocContent.historyOTRanges) { + this.queueUpdatesForOutOfSyncCommentsHistoryOT( + update, + pathname, + file.getComments().toRaw() + ) + } else { + await this.queueUpdatesForOutOfSyncComments( + update, + pathname, + persistedComments + ) + } const persistedChanges = file.getTrackedChanges().asSorted() await this.queueUpdatesForOutOfSyncTrackedChanges( @@ -825,6 +834,91 @@ class SyncUpdateExpander { return expandedUpdate } + /** + * Queue updates for out of sync comments + * + * @param {ResyncDocContentUpdate} update + * @param {string} pathname + * @param {CommentRawData[]} persistedComments + */ + queueUpdatesForOutOfSyncCommentsHistoryOT( + update, + pathname, + persistedComments + ) { + const expectedComments = + update.resyncDocContent.historyOTRanges?.comments ?? [] + const expectedCommentsById = new Map( + expectedComments.map(comment => [comment.id, comment]) + ) + const persistedCommentsById = new Map( + persistedComments.map(comment => [comment.id, comment]) + ) + + // Delete any persisted comment that is not in the expected comment list. + for (const persistedComment of persistedComments) { + if (!expectedCommentsById.has(persistedComment.id)) { + this.expandedUpdates.push({ + doc: update.doc, + op: [{ deleteComment: persistedComment.id }], + meta: { + pathname, + resync: true, + origin: this.origin, + ts: update.meta.ts, + }, + }) + } + } + + for (const expectedComment of expectedComments) { + const persistedComment = persistedCommentsById.get(expectedComment.id) + if ( + persistedComment && + commentRangesAreInSyncHistoryOT(persistedComment, expectedComment) + ) { + if (expectedComment.resolved === persistedComment.resolved) { + // Both comments are identical; do nothing + } else { + // Only the resolved state differs + this.expandedUpdates.push({ + doc: update.doc, + op: [ + { + commentId: expectedComment.id, + resolved: expectedComment.resolved, + }, + ], + meta: { + pathname, + resync: true, + origin: this.origin, + ts: update.meta.ts, + }, + }) + } + } else { + // New comment or ranges differ + this.expandedUpdates.push({ + doc: update.doc, + op: [ + { + commentId: expectedComment.id, + ranges: expectedComment.ranges, + resolved: expectedComment.resolved, + }, + ], + meta: { + pathname, + resync: true, + origin: this.origin, + ts: update.meta.ts, + }, + }) + } + } + } + /** * Queue updates for out of sync comments * @@ -951,6 +1045,7 @@ class SyncUpdateExpander { for (const transition of getTrackedChangesTransitions( persistedChanges, expectedChanges, + update.resyncDocContent.historyOTRanges?.trackedChanges || [], expectedContent.length )) { if (transition.pos > cursor) { @@ -1018,6 +1113,25 @@ class SyncUpdateExpander { } } +/** + * Compares the ranges in the persisted and expected comments + * + * @param {CommentRawData} persistedComment + * @param {CommentRawData} expectedComment + */ +function commentRangesAreInSyncHistoryOT(persistedComment, expectedComment) { + if (persistedComment.ranges.length !== expectedComment.ranges.length) { + return false + } + for (let i = 0; i < persistedComment.ranges.length; i++) { + const persistedRange = persistedComment.ranges[i] + const expectedRange = expectedComment.ranges[i] + if (persistedRange.pos !== expectedRange.pos) return false + if (persistedRange.length !== expectedRange.length) return false + } + return true +} + /** * Compares the ranges in the persisted and expected comments * @@ -1049,11 +1163,13 @@ function commentRangesAreInSync(persistedComment, expectedComment) { * * @param {readonly HistoryTrackedChange[]} persistedChanges * @param {TrackedChange[]} expectedChanges + * @param {TrackedChangeRawData[]} persistedChangesHistoryOT * @param {number} docLength */ function getTrackedChangesTransitions( persistedChanges, expectedChanges, + persistedChangesHistoryOT, docLength ) { /** @type {TrackedChangeTransition[]} */ @@ -1076,6 +1192,19 @@ function getTrackedChangesTransitions( }) } + for (const change of persistedChangesHistoryOT) { + transitions.push({ + stage: 'expected', + pos: change.range.pos, + tracking: change.tracking, + }) + transitions.push({ + stage: 'expected', + pos: change.range.pos + change.range.length, + tracking: { type: 'none' }, + }) + } + for (const change of expectedChanges) { const op = change.op const pos = op.hpos ?? op.p diff --git a/services/project-history/app/js/UpdateCompressor.js b/services/project-history/app/js/UpdateCompressor.js index 471fc791ab..5ae7591a7f 100644 --- a/services/project-history/app/js/UpdateCompressor.js +++ b/services/project-history/app/js/UpdateCompressor.js @@ -1,8 +1,15 @@ // @ts-check +import Metrics from '@overleaf/metrics' import OError from '@overleaf/o-error' import DMP from 'diff-match-patch' import { EditOperationBuilder } from 'overleaf-editor-core' +import zlib from 'node:zlib' +import { ReadableString, WritableBuffer } from '@overleaf/stream-utils' +import Stream from 'node:stream' +import logger from '@overleaf/logger' +import { callbackify } from '@overleaf/promise-utils' +import Settings from '@overleaf/settings' /** * @import { DeleteOp, InsertOp, Op, Update } from './types' @@ -162,7 +169,9 @@ export function concatUpdatesWithSameVersion(updates) { lastUpdate.op != null && lastUpdate.v === update.v && lastUpdate.doc === update.doc && - lastUpdate.pathname === update.pathname + lastUpdate.pathname === update.pathname && + EditOperationBuilder.isValid(update.op[0]) === + EditOperationBuilder.isValid(lastUpdate.op[0]) ) { lastUpdate.op = lastUpdate.op.concat(update.op) if (update.meta.doc_hash == null) { @@ -180,6 +189,66 @@ export function concatUpdatesWithSameVersion(updates) { return concattedUpdates } +async function estimateStorage(updates) { + const blob = JSON.stringify(updates) + const bytes = Buffer.from(blob).byteLength + const read = new ReadableString(blob) + const compress = zlib.createGzip() + const write = new WritableBuffer() + await Stream.promises.pipeline(read, compress, write) + const bytesGz = write.size() + return { bytes, bytesGz, nUpdates: updates.length } +} + +/** + * @param {Update[]} rawUpdates + * @param {string} projectId + * @param {import("./Profiler").Profiler} profile + * @return {Promise} + */ +async function compressRawUpdatesWithMetrics(rawUpdates, projectId, profile) { + if (100 * Math.random() > Settings.estimateCompressionSample) { + return compressRawUpdatesWithProfile(rawUpdates, projectId, profile) + } + const before = await estimateStorage(rawUpdates) + profile.log('estimateRawUpdatesSize') + const updates = compressRawUpdatesWithProfile(rawUpdates, projectId, profile) + const after = await estimateStorage(updates) + for (const [path, values] of Object.entries({ before, after })) { + for (const [method, v] of Object.entries(values)) { + Metrics.summary('updates_compression_estimate', v, { path, method }) + } + } + for (const method of Object.keys(before)) { + const percentage = Math.ceil(100 * (after[method] / before[method])) + Metrics.summary('updates_compression_percentage', percentage, { method }) + } + profile.log('estimateCompressedUpdatesSize') + return updates +} + +export const compressRawUpdatesWithMetricsCb = callbackify( + compressRawUpdatesWithMetrics +) + +/** + * @param {Update[]} rawUpdates + * @param {string} projectId + * @param {import("./Profiler").Profiler} profile + * @return {Update[]} + */ +function compressRawUpdatesWithProfile(rawUpdates, projectId, profile) { + const updates = compressRawUpdates(rawUpdates) + const timeTaken = profile.log('compressRawUpdates').getTimeDelta() + if (timeTaken >= 1000) { + logger.debug( + { projectId, updates: rawUpdates, timeTaken }, + 'slow compression of raw updates' + ) + } + return updates +} + export function compressRawUpdates(rawUpdates) { let updates = convertToSingleOpUpdates(rawUpdates) updates = compressUpdates(updates) diff --git a/services/project-history/app/js/UpdatesProcessor.js b/services/project-history/app/js/UpdatesProcessor.js index a76241d7ca..b4895c012d 100644 --- a/services/project-history/app/js/UpdatesProcessor.js +++ b/services/project-history/app/js/UpdatesProcessor.js @@ -546,7 +546,10 @@ export function _processUpdates( } if (filteredUpdates.length === 0) { // return early if there are no updates to apply - return SyncManager.setResyncState(projectId, newSyncState, callback) + return SyncManager.setResyncState(projectId, newSyncState, err => { + if (err) return callback(err) + callback(null, { resyncNeeded: false }) + }) } // only make request to history service if we have actual updates to process _getMostRecentVersionWithDebug( @@ -593,17 +596,17 @@ export function _processUpdates( return cb(err) } profile.log('skipAlreadyAppliedUpdates') - const compressedUpdates = - UpdateCompressor.compressRawUpdates(unappliedUpdates) - const timeTaken = profile - .log('compressRawUpdates') - .getTimeDelta() - if (timeTaken >= 1000) { - logger.debug( - { projectId, updates: unappliedUpdates, timeTaken }, - 'slow compression of raw updates' - ) - } + cb(null, unappliedUpdates) + }, + (unappliedUpdates, cb) => { + UpdateCompressor.compressRawUpdatesWithMetricsCb( + unappliedUpdates, + projectId, + profile, + cb + ) + }, + (compressedUpdates, cb) => { cb = profile.wrap('createBlobs', cb) BlobManager.createBlobsForUpdates( projectId, diff --git a/services/project-history/app/js/types.ts b/services/project-history/app/js/types.ts index 96701e587f..c11b7741e3 100644 --- a/services/project-history/app/js/types.ts +++ b/services/project-history/app/js/types.ts @@ -3,6 +3,8 @@ import { LinkedFileData, RawEditOperation, RawOrigin, + CommentRawData, + TrackedChangeRawData, } from 'overleaf-editor-core/lib/types' export type Update = @@ -118,6 +120,10 @@ export type ResyncDocContentUpdate = { content: string version: number ranges?: Ranges + historyOTRanges?: { + comments: CommentRawData[] + trackedChanges: TrackedChangeRawData[] + } resolvedCommentIds?: string[] } projectHistoryId: string diff --git a/services/project-history/config/settings.defaults.cjs b/services/project-history/config/settings.defaults.cjs index 9e5a39868a..d767cddd96 100644 --- a/services/project-history/config/settings.defaults.cjs +++ b/services/project-history/config/settings.defaults.cjs @@ -106,4 +106,12 @@ module.exports = { }, maxFileSizeInBytes: 100 * 1024 * 1024, // 100 megabytes + + shortHistoryQueues: (process.env.SHORT_HISTORY_QUEUES || '') + .split(',') + .filter(s => !!s), + estimateCompressionSample: parseInt( + process.env.ESTIMATE_COMPRESSION_SAMPLE || '0', + 10 + ), } diff --git a/services/project-history/docker-compose.ci.yml b/services/project-history/docker-compose.ci.yml index 2fe97bd9b3..ca15f35fef 100644 --- a/services/project-history/docker-compose.ci.yml +++ b/services/project-history/docker-compose.ci.yml @@ -28,12 +28,15 @@ services: MOCHA_GREP: ${MOCHA_GREP} NODE_ENV: test NODE_OPTIONS: "--unhandled-rejections=strict" + volumes: + - ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it depends_on: mongo: condition: service_started redis: condition: service_healthy user: node + entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 -- command: npm run test:acceptance @@ -45,7 +48,7 @@ services: command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs . user: root redis: - image: redis + image: redis:7.4.3 healthcheck: test: ping="$$(redis-cli ping)" && [ "$$ping" = 'PONG' ] interval: 1s diff --git a/services/project-history/docker-compose.yml b/services/project-history/docker-compose.yml index 68360baf44..95a36b5fcb 100644 --- a/services/project-history/docker-compose.yml +++ b/services/project-history/docker-compose.yml @@ -26,6 +26,7 @@ services: - .:/overleaf/services/project-history - ../../node_modules:/overleaf/node_modules - ../../libraries:/overleaf/libraries + - ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it working_dir: /overleaf/services/project-history environment: ELASTIC_SEARCH_DSN: es:9200 @@ -45,10 +46,11 @@ services: condition: service_started redis: condition: service_healthy + entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 -- command: npm run --silent test:acceptance redis: - image: redis + image: redis:7.4.3 healthcheck: test: ping=$$(redis-cli ping) && [ "$$ping" = 'PONG' ] interval: 1s diff --git a/services/project-history/package.json b/services/project-history/package.json index 2a54a807d3..4160f36f6f 100644 --- a/services/project-history/package.json +++ b/services/project-history/package.json @@ -9,8 +9,8 @@ "test:unit": "npm run test:unit:_run -- --grep=$MOCHA_GREP", "start": "node app.js", "nodemon": "node --watch app.js", - "test:acceptance:_run": "LOG_LEVEL=fatal mocha --loader=esmock --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js", - "test:unit:_run": "LOG_LEVEL=fatal mocha --loader=esmock --recursive --reporter spec $@ test/unit/js", + "test:acceptance:_run": "mocha --loader=esmock --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js", + "test:unit:_run": "mocha --loader=esmock --recursive --reporter spec $@ test/unit/js", "lint": "eslint --max-warnings 0 --format unix .", "format": "prettier --list-different $PWD/'**/*.*js'", "format:fix": "prettier --write $PWD/'**/*.*js'", @@ -25,6 +25,7 @@ "@overleaf/promise-utils": "*", "@overleaf/redis-wrapper": "*", "@overleaf/settings": "*", + "@overleaf/stream-utils": "*", "async": "^3.2.5", "aws-sdk": "^2.650.0", "body-parser": "^1.20.3", diff --git a/services/project-history/scripts/flush_old.js b/services/project-history/scripts/flush_old.js index 6dc140196e..7ac13b757a 100644 --- a/services/project-history/scripts/flush_old.js +++ b/services/project-history/scripts/flush_old.js @@ -124,11 +124,14 @@ async function main() { .map((projectId, idx) => { return { projectId, timestamp: timestamps[idx] } }) - .filter(({ timestamp }) => { + .filter(({ projectId, timestamp }) => { if (!timestamp) { nullCount++ + return true // Unknown age } - return timestamp ? olderThan(maxAge, timestamp) : true + if (olderThan(maxAge, timestamp)) return true // Older than threshold + if (Settings.shortHistoryQueues.includes(projectId)) return true // Short queue + return false // Do not flush }) collectedProjects.push(...newProjects) } diff --git a/services/project-history/scripts/retry_failures.js b/services/project-history/scripts/retry_failures.js new file mode 100755 index 0000000000..85ee21faf4 --- /dev/null +++ b/services/project-history/scripts/retry_failures.js @@ -0,0 +1,26 @@ +import * as RetryManager from '../app/js/RetryManager.js' +import minimist from 'minimist' + +const args = minimist(process.argv.slice(2), { + string: ['failureType', 'timeout', 'limit'], + default: { + failureType: 'soft', + timeout: (60 * 60 * 1000).toString(), + limit: (100_000).toString(), + }, +}) + +const failureType = args.failureType +const timeout = parseInt(args.timeout, 10) +const limit = parseInt(args.limit, 10) + +RetryManager.retryFailures({ failureType, timeout, limit }, (err, result) => { + if (err) { + console.error(err) + process.exit(1) + } else { + console.log(JSON.stringify(result)) + console.log('Done.') + } + process.exit(0) +}) diff --git a/services/project-history/test/acceptance/js/FlushManagerTests.js b/services/project-history/test/acceptance/js/FlushManagerTests.js index d11346d9a3..8d4432d3ef 100644 --- a/services/project-history/test/acceptance/js/FlushManagerTests.js +++ b/services/project-history/test/acceptance/js/FlushManagerTests.js @@ -6,6 +6,7 @@ import assert from 'node:assert' import mongodb from 'mongodb-legacy' import * as ProjectHistoryClient from './helpers/ProjectHistoryClient.js' import * as ProjectHistoryApp from './helpers/ProjectHistoryApp.js' +import Settings from '@overleaf/settings' const { ObjectId } = mongodb const MockHistoryStore = () => nock('http://127.0.0.1:3100') @@ -127,7 +128,7 @@ describe('Flushing old queues', function () { 'made calls to history service to store updates in the background' ) done() - }, 100) + }, 1_000) } ) }) @@ -183,6 +184,88 @@ describe('Flushing old queues', function () { }) }) + describe('when the update is newer than the cutoff and project has short queue', function () { + beforeEach(function () { + Settings.shortHistoryQueues.push(this.projectId) + }) + afterEach(function () { + Settings.shortHistoryQueues.length = 0 + }) + beforeEach(function (done) { + this.flushCall = MockHistoryStore() + .put( + `/api/projects/${historyId}/blobs/0a207c060e61f3b88eaee0a8cd0696f46fb155eb` + ) + .reply(201) + .post(`/api/projects/${historyId}/legacy_changes?end_version=0`) + .reply(200) + const update = { + pathname: '/main.tex', + docLines: 'a\nb', + doc: this.docId, + meta: { user_id: this.user_id, ts: new Date() }, + } + async.series( + [ + cb => + ProjectHistoryClient.pushRawUpdate(this.projectId, update, cb), + cb => + ProjectHistoryClient.setFirstOpTimestamp( + this.projectId, + Date.now() - 60 * 1000, + cb + ), + ], + done + ) + }) + + it('flushes the project history queue', function (done) { + request.post( + { + url: `http://127.0.0.1:3054/flush/old?maxAge=${3 * 3600}`, + }, + (error, res, body) => { + if (error) { + return done(error) + } + expect(res.statusCode).to.equal(200) + assert( + this.flushCall.isDone(), + 'made calls to history service to store updates' + ) + done() + } + ) + }) + + it('flushes the project history queue in the background when requested', function (done) { + request.post( + { + url: `http://127.0.0.1:3054/flush/old?maxAge=${3 * 3600}&background=1`, + }, + (error, res, body) => { + if (error) { + return done(error) + } + expect(res.statusCode).to.equal(200) + expect(body).to.equal('{"message":"running flush in background"}') + assert( + !this.flushCall.isDone(), + 'did not make calls to history service to store updates in the foreground' + ) + setTimeout(() => { + assert( + this.flushCall.isDone(), + 'made calls to history service to store updates in the background' + ) + done() + }, 1_000) + } + ) + }) + }) + describe('when the update does not have a timestamp', function () { beforeEach(function (done) { this.flushCall = MockHistoryStore() diff --git a/services/project-history/test/acceptance/js/SyncTests.js b/services/project-history/test/acceptance/js/SyncTests.js index 89e002d4dd..f7420e6cdb 100644 --- a/services/project-history/test/acceptance/js/SyncTests.js +++ b/services/project-history/test/acceptance/js/SyncTests.js @@ -1225,7 +1225,7 @@ describe('Syncing with web and doc-updater', function () { ) }) - it('should fix comments in the history store', function (done) { + it('should add comments in the history store', function (done) { const commentId = 'comment-id' const addComment = MockHistoryStore() .post(`/api/projects/${historyId}/legacy_changes`, body => { @@ -1315,6 +1315,1195 @@ describe('Syncing with web and doc-updater', function () { } ) }) + + it('should add comments in the history store (history-ot)', function (done) { + const commentId = 'comment-id' + const addComment = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([ + { + v2Authors: [], + authors: [], + timestamp: this.timestamp.toJSON(), + operations: [ + { + pathname: 'main.tex', + commentId, + ranges: [{ pos: 1, length: 10 }], + }, + ], + origin: { kind: 'test-origin' }, + }, + ]) + return true + }) + .query({ end_version: 0 }) + .reply(204) + + async.series( + [ + cb => { + ProjectHistoryClient.resyncHistory(this.project_id, cb) + }, + cb => { + const update = { + projectHistoryId: historyId, + resyncProjectStructure: { + docs: [{ path: '/main.tex' }], + files: [], + }, + meta: { + ts: this.timestamp, + }, + } + ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) + }, + cb => { + const update = { + path: '/main.tex', + projectHistoryId: historyId, + resyncDocContent: { + content: 'a\nb', + historyOTRanges: { + comments: [ + { + id: commentId, + ranges: [ + { + pos: 1, + length: 10, + }, + ], + }, + ], + }, + }, + doc: this.doc_id, + meta: { + ts: this.timestamp, + }, + } + ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) + }, + cb => { + ProjectHistoryClient.flushProject(this.project_id, cb) + }, + ], + error => { + if (error) { + return done(error) + } + assert( + addComment.isDone(), + `/api/projects/${historyId}/changes should have been called` + ) + done() + } + ) + }) + + it('should add tracked changes in the history store', function (done) { + const fixTrackedChange = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([ + { + v2Authors: [], + authors: [], + timestamp: this.timestamp.toJSON(), + operations: [ + { + pathname: 'main.tex', + textOperation: [ + { + r: 1, + tracking: { + ts: this.timestamp.toJSON(), + type: 'delete', + userId: 'user-id', + }, + }, + { + r: 1, + tracking: { + ts: this.timestamp.toJSON(), + type: 'insert', + userId: 'user-id', + }, + }, + 1, + ], + }, + ], + origin: { kind: 'test-origin' }, + }, + ]) + return true + }) + .query({ end_version: 0 }) + .reply(204) + + async.series( + [ + cb => { + ProjectHistoryClient.resyncHistory(this.project_id, cb) + }, + cb => { + const update = { + projectHistoryId: historyId, + resyncProjectStructure: { + docs: [{ path: '/main.tex' }], + files: [], + }, + meta: { + ts: this.timestamp, + }, + } + ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) + }, + cb => { + const update = { + path: '/main.tex', + projectHistoryId: historyId, + resyncDocContent: { + content: 'a\nb', + ranges: { + changes: [ + { + id: 'id1', + op: { + d: 'a', + p: 0, + }, + metadata: { + user_id: 'user-id', + ts: this.timestamp, + }, + }, + { + id: 'id2', + op: { + i: '\n', + p: 0, + hpos: 1, + }, + metadata: { + user_id: 'user-id', + ts: this.timestamp, + }, + }, + ], + }, + }, + doc: this.doc_id, + meta: { + ts: this.timestamp, + }, + } + ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) + }, + cb => { + ProjectHistoryClient.flushProject(this.project_id, cb) + }, + ], + error => { + if (error) { + return done(error) + } + assert( + fixTrackedChange.isDone(), + `/api/projects/${historyId}/changes should have been called` + ) + done() + } + ) + }) + + it('should add tracked changes in the history store (history-ot)', function (done) { + const fixTrackedChange = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([ + { + v2Authors: [], + authors: [], + timestamp: this.timestamp.toJSON(), + operations: [ + { + pathname: 'main.tex', + textOperation: [ + { + r: 1, + tracking: { + ts: this.timestamp.toJSON(), + type: 'delete', + userId: 'user-id', + }, + }, + { + r: 1, + tracking: { + ts: this.timestamp.toJSON(), + type: 'insert', + userId: 'user-id', + }, + }, + 1, + ], + }, + ], + origin: { kind: 'test-origin' }, + }, + ]) + return true + }) + .query({ end_version: 0 }) + .reply(204) + + async.series( + [ + cb => { + ProjectHistoryClient.resyncHistory(this.project_id, cb) + }, + cb => { + const update = { + projectHistoryId: historyId, + resyncProjectStructure: { + docs: [{ path: '/main.tex' }], + files: [], + }, + meta: { + ts: this.timestamp, + }, + } + ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) + }, + cb => { + const update = { + path: '/main.tex', + projectHistoryId: historyId, + resyncDocContent: { + content: 'a\nb', + historyOTRanges: { + trackedChanges: [ + { + range: { pos: 0, length: 1 }, + tracking: { + ts: this.timestamp.toJSON(), + type: 'delete', + userId: 'user-id', + }, + }, + { + range: { pos: 1, length: 1 }, + tracking: { + ts: this.timestamp.toJSON(), + type: 'insert', + userId: 'user-id', + }, + }, + ], + }, + }, + doc: this.doc_id, + meta: { + ts: this.timestamp, + }, + } + ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) + }, + cb => { + ProjectHistoryClient.flushProject(this.project_id, cb) + }, + ], + error => { + if (error) { + return done(error) + } + assert( + fixTrackedChange.isDone(), + `/api/projects/${historyId}/changes should have been called` + ) + done() + } + ) + }) + }) + + describe("when a doc's ranges are out of sync", function () { + const commentId = 'comment-id' + beforeEach(function () { + MockHistoryStore() + .get(`/api/projects/${historyId}/latest/history`) + .reply(200, { + chunk: { + history: { + snapshot: { + files: { + 'main.tex': { + hash: '0a207c060e61f3b88eaee0a8cd0696f46fb155eb', + rangesHash: '0a207c060e61f3b88eaee0a8cd0696f46fb155ec', + stringLength: 3, + }, + }, + }, + changes: [], + }, + startVersion: 0, + }, + }) + + MockHistoryStore() + .get( + `/api/projects/${historyId}/blobs/0a207c060e61f3b88eaee0a8cd0696f46fb155eb` + ) + .reply(200, 'a\nb') + + MockHistoryStore() + .get( + `/api/projects/${historyId}/blobs/0a207c060e61f3b88eaee0a8cd0696f46fb155ec` + ) + .reply( + 200, + JSON.stringify({ + comments: [{ id: commentId, ranges: [{ pos: 0, length: 3 }] }], + trackedChanges: [ + { + range: { pos: 0, length: 1 }, + tracking: { + ts: this.timestamp.toJSON(), + type: 'delete', + userId: 'user-id', + }, + }, + { + range: { pos: 2, length: 1 }, + tracking: { + ts: this.timestamp.toJSON(), + type: 'insert', + userId: 'user-id', + }, + }, + ], + }) + ) + }) + + it('should fix comments in the history store', function (done) { + const addComment = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([ + { + v2Authors: [], + authors: [], + timestamp: this.timestamp.toJSON(), + operations: [ + { + pathname: 'main.tex', + commentId, + ranges: [{ pos: 1, length: 2 }], + }, + ], + origin: { kind: 'test-origin' }, + }, + ]) + return true + }) + .query({ end_version: 0 }) + .reply(204) + + async.series( + [ + cb => { + ProjectHistoryClient.resyncHistory(this.project_id, cb) + }, + cb => { + const update = { + projectHistoryId: historyId, + resyncProjectStructure: { + docs: [{ path: '/main.tex' }], + files: [], + }, + meta: { + ts: this.timestamp, + }, + } + ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) + }, + cb => { + const update = { + path: '/main.tex', + projectHistoryId: historyId, + resyncDocContent: { + content: 'a\nb', + ranges: { + comments: [ + { + id: commentId, + op: { + c: 'a', + p: 0, + hpos: 1, + hlen: 2, + t: commentId, + }, + meta: { + user_id: 'user-id', + ts: this.timestamp, + }, + }, + ], + changes: [ + { + id: 'id1', + op: { + d: 'a', + p: 0, + }, + metadata: { + user_id: 'user-id', + ts: this.timestamp, + }, + }, + { + id: 'id2', + op: { + i: '\n', + p: 1, + hpos: 2, + }, + metadata: { + user_id: 'user-id', + ts: this.timestamp, + }, + }, + ], + }, + }, + doc: this.doc_id, + meta: { + ts: this.timestamp, + }, + } + ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) + }, + cb => { + ProjectHistoryClient.flushProject(this.project_id, cb) + }, + ], + error => { + if (error) { + return done(error) + } + assert( + addComment.isDone(), + `/api/projects/${historyId}/changes should have been called` + ) + done() + } + ) + }) + + it('should fix resolved state for comments in the history store', function (done) { + const addComment = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([ + { + v2Authors: [], + authors: [], + timestamp: this.timestamp.toJSON(), + operations: [ + { + pathname: 'main.tex', + commentId, + resolved: true, + }, + ], + origin: { kind: 'test-origin' }, + }, + ]) + return true + }) + .query({ end_version: 0 }) + .reply(204) + + async.series( + [ + cb => { + ProjectHistoryClient.resyncHistory(this.project_id, cb) + }, + cb => { + const update = { + projectHistoryId: historyId, + resyncProjectStructure: { + docs: [{ path: '/main.tex' }], + files: [], + }, + meta: { + ts: this.timestamp, + }, + } + ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) + }, + cb => { + const update = { + path: '/main.tex', + projectHistoryId: historyId, + resyncDocContent: { + content: 'a\nb', + resolvedCommentIds: [commentId], + ranges: { + comments: [ + { + id: commentId, + op: { + c: 'a', + p: 0, + hpos: 0, + hlen: 3, + t: commentId, + }, + meta: { + user_id: 'user-id', + ts: this.timestamp, + }, + }, + ], + changes: [ + { + id: 'id1', + op: { + d: 'a', + p: 0, + }, + metadata: { + user_id: 'user-id', + ts: this.timestamp, + }, + }, + { + id: 'id2', + op: { + i: '\n', + p: 1, + hpos: 2, + }, + metadata: { + user_id: 'user-id', + ts: this.timestamp, + }, + }, + ], + }, + }, + doc: this.doc_id, + meta: { + ts: this.timestamp, + }, + } + ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) + }, + cb => { + ProjectHistoryClient.flushProject(this.project_id, cb) + }, + ], + error => { + if (error) { + return done(error) + } + assert( + addComment.isDone(), + `/api/projects/${historyId}/changes should have been called` + ) + done() + } + ) + }) + + it('should fix comments in the history store (history-ot)', function (done) { + const addComment = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([ + { + v2Authors: [], + authors: [], + timestamp: this.timestamp.toJSON(), + operations: [ + { + pathname: 'main.tex', + commentId, + ranges: [{ pos: 1, length: 2 }], + }, + ], + origin: { kind: 'test-origin' }, + }, + ]) + return true + }) + .query({ end_version: 0 }) + .reply(204) + + async.series( + [ + cb => { + ProjectHistoryClient.resyncHistory(this.project_id, cb) + }, + cb => { + const update = { + projectHistoryId: historyId, + resyncProjectStructure: { + docs: [{ path: '/main.tex' }], + files: [], + }, + meta: { + ts: this.timestamp, + }, + } + ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) + }, + cb => { + const update = { + path: '/main.tex', + projectHistoryId: historyId, + resyncDocContent: { + content: 'a\nb', + historyOTRanges: { + comments: [ + { + id: commentId, + ranges: [ + { + pos: 1, + length: 2, + }, + ], + }, + ], + trackedChanges: [ + { + range: { pos: 0, length: 1 }, + tracking: { + ts: this.timestamp.toJSON(), + type: 'delete', + userId: 'user-id', + }, + }, + { + range: { pos: 2, length: 1 }, + tracking: { + ts: this.timestamp.toJSON(), + type: 'insert', + userId: 'user-id', + }, + }, + ], + }, + }, + doc: this.doc_id, + meta: { + ts: this.timestamp, + }, + } + ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) + }, + cb => { + ProjectHistoryClient.flushProject(this.project_id, cb) + }, + ], + error => { + if (error) { + return done(error) + } + assert( + addComment.isDone(), + `/api/projects/${historyId}/changes should have been called` + ) + done() + } + ) + }) + + it('should fix resolved state for comments in the history store (history-ot)', function (done) { + const addComment = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([ + { + v2Authors: [], + authors: [], + timestamp: this.timestamp.toJSON(), + operations: [ + { + pathname: 'main.tex', + commentId, + resolved: true, + }, + ], + origin: { kind: 'test-origin' }, + }, + ]) + return true + }) + .query({ end_version: 0 }) + .reply(204) + + async.series( + [ + cb => { + ProjectHistoryClient.resyncHistory(this.project_id, cb) + }, + cb => { + const update = { + projectHistoryId: historyId, + resyncProjectStructure: { + docs: [{ path: '/main.tex' }], + files: [], + }, + meta: { + ts: this.timestamp, + }, + } + ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) + }, + cb => { + const update = { + path: '/main.tex', + projectHistoryId: historyId, + resyncDocContent: { + content: 'a\nb', + historyOTRanges: { + comments: [ + { + id: commentId, + ranges: [ + { + pos: 0, + length: 3, + }, + ], + resolved: true, + }, + ], + trackedChanges: [ + { + range: { pos: 0, length: 1 }, + tracking: { + ts: this.timestamp.toJSON(), + type: 'delete', + userId: 'user-id', + }, + }, + { + range: { pos: 2, length: 1 }, + tracking: { + ts: this.timestamp.toJSON(), + type: 'insert', + userId: 'user-id', + }, + }, + ], + }, + }, + doc: this.doc_id, + meta: { + ts: this.timestamp, + }, + } + ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) + }, + cb => { + ProjectHistoryClient.flushProject(this.project_id, cb) + }, + ], + error => { + if (error) { + return done(error) + } + assert( + addComment.isDone(), + `/api/projects/${historyId}/changes should have been called` + ) + done() + } + ) + }) + + it('should fix tracked changes in the history store', function (done) { + const fixTrackedChange = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([ + { + v2Authors: [], + authors: [], + timestamp: this.timestamp.toJSON(), + operations: [ + { + pathname: 'main.tex', + textOperation: [ + 1, + { + r: 1, + tracking: { + ts: this.timestamp.toJSON(), + type: 'insert', + userId: 'user-id', + }, + }, + { + r: 1, + tracking: { + type: 'none', + }, + }, + ], + }, + ], + origin: { kind: 'test-origin' }, + }, + ]) + return true + }) + .query({ end_version: 0 }) + .reply(204) + + async.series( + [ + cb => { + ProjectHistoryClient.resyncHistory(this.project_id, cb) + }, + cb => { + const update = { + projectHistoryId: historyId, + resyncProjectStructure: { + docs: [{ path: '/main.tex' }], + files: [], + }, + meta: { + ts: this.timestamp, + }, + } + ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) + }, + cb => { + const update = { + path: '/main.tex', + projectHistoryId: historyId, + resyncDocContent: { + content: 'a\nb', + ranges: { + comments: [ + { + id: commentId, + op: { + c: 'a', + p: 0, + hpos: 0, + hlen: 3, + t: commentId, + }, + meta: { + user_id: 'user-id', + ts: this.timestamp, + }, + }, + ], + changes: [ + { + id: 'id1', + op: { + d: 'a', + p: 0, + }, + metadata: { + user_id: 'user-id', + ts: this.timestamp, + }, + }, + { + id: 'id2', + op: { + i: '\n', + p: 0, + hpos: 1, + }, + metadata: { + user_id: 'user-id', + ts: this.timestamp, + }, + }, + ], + }, + }, + doc: this.doc_id, + meta: { + ts: this.timestamp, + }, + } + ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) + }, + cb => { + ProjectHistoryClient.flushProject(this.project_id, cb) + }, + ], + error => { + if (error) { + return done(error) + } + assert( + fixTrackedChange.isDone(), + `/api/projects/${historyId}/changes should have been called` + ) + done() + } + ) + }) + + it('should fix tracked changes in the history store (history-ot)', function (done) { + const fixTrackedChange = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([ + { + v2Authors: [], + authors: [], + timestamp: this.timestamp.toJSON(), + operations: [ + { + pathname: 'main.tex', + textOperation: [ + 1, + { + r: 1, + tracking: { + ts: this.timestamp.toJSON(), + type: 'insert', + userId: 'user-id', + }, + }, + { + r: 1, + tracking: { + type: 'none', + }, + }, + ], + }, + ], + origin: { kind: 'test-origin' }, + }, + ]) + return true + }) + .query({ end_version: 0 }) + .reply(204) + + async.series( + [ + cb => { + ProjectHistoryClient.resyncHistory(this.project_id, cb) + }, + cb => { + const update = { + projectHistoryId: historyId, + resyncProjectStructure: { + docs: [{ path: '/main.tex' }], + files: [], + }, + meta: { + ts: this.timestamp, + }, + } + ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) + }, + cb => { + const update = { + path: '/main.tex', + projectHistoryId: historyId, + resyncDocContent: { + content: 'a\nb', + historyOTRanges: { + comments: [ + { + id: commentId, + ranges: [ + { + pos: 0, + length: 3, + }, + ], + }, + ], + trackedChanges: [ + { + range: { pos: 0, length: 1 }, + tracking: { + ts: this.timestamp.toJSON(), + type: 'delete', + userId: 'user-id', + }, + }, + { + range: { pos: 1, length: 1 }, + tracking: { + ts: this.timestamp.toJSON(), + type: 'insert', + userId: 'user-id', + }, + }, + ], + }, + }, + doc: this.doc_id, + meta: { + ts: this.timestamp, + }, + } + ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) + }, + cb => { + ProjectHistoryClient.flushProject(this.project_id, cb) + }, + ], + error => { + if (error) { + return done(error) + } + assert( + fixTrackedChange.isDone(), + `/api/projects/${historyId}/changes should have been called` + ) + done() + } + ) + }) + + it('should fix both comments and tracked changes in the history store (history-ot)', function (done) { + const fixTrackedChange = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([ + // not merged due to comment operation using history-ot and tracked-changes operation using sharejs ot + { + v2Authors: [], + authors: [], + timestamp: this.timestamp.toJSON(), + operations: [ + { + pathname: 'main.tex', + commentId, + ranges: [{ pos: 1, length: 2 }], + }, + ], + origin: { kind: 'test-origin' }, + }, + { + v2Authors: [], + authors: [], + timestamp: this.timestamp.toJSON(), + operations: [ + { + pathname: 'main.tex', + textOperation: [ + 1, + { + r: 1, + tracking: { + ts: this.timestamp.toJSON(), + type: 'insert', + userId: 'user-id', + }, + }, + { + r: 1, + tracking: { + type: 'none', + }, + }, + ], + }, + ], + origin: { kind: 'test-origin' }, + }, + ]) + return true + }) + .query({ end_version: 0 }) + .reply(204) + + async.series( + [ + cb => { + ProjectHistoryClient.resyncHistory(this.project_id, cb) + }, + cb => { + const update = { + projectHistoryId: historyId, + resyncProjectStructure: { + docs: [{ path: '/main.tex' }], + files: [], + }, + meta: { + ts: this.timestamp, + }, + } + ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) + }, + cb => { + const update = { + path: '/main.tex', + projectHistoryId: historyId, + resyncDocContent: { + content: 'a\nb', + historyOTRanges: { + comments: [ + { + id: commentId, + ranges: [ + { + pos: 1, + length: 2, + }, + ], + }, + ], + trackedChanges: [ + { + range: { pos: 0, length: 1 }, + tracking: { + ts: this.timestamp.toJSON(), + type: 'delete', + userId: 'user-id', + }, + }, + { + range: { pos: 1, length: 1 }, + tracking: { + ts: this.timestamp.toJSON(), + type: 'insert', + userId: 'user-id', + }, + }, + ], + }, + }, + doc: this.doc_id, + meta: { + ts: this.timestamp, + }, + } + ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) + }, + cb => { + ProjectHistoryClient.flushProject(this.project_id, cb) + }, + ], + error => { + if (error) { + return done(error) + } + assert( + fixTrackedChange.isDone(), + `/api/projects/${historyId}/changes should have been called` + ) + done() + } + ) + }) }) describe('resyncProjectStructureOnly', function () { diff --git a/services/project-history/test/acceptance/js/helpers/ProjectHistoryApp.js b/services/project-history/test/acceptance/js/helpers/ProjectHistoryApp.js index ae453b74f9..6a81221840 100644 --- a/services/project-history/test/acceptance/js/helpers/ProjectHistoryApp.js +++ b/services/project-history/test/acceptance/js/helpers/ProjectHistoryApp.js @@ -9,6 +9,7 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ import { app } from '../../../../app/js/server.js' +import { mongoClient } from '../../../../app/js/mongodb.js' let running = false let initing = false @@ -29,13 +30,16 @@ export function ensureRunning(callback) { if (error != null) { throw error } - running = true - return (() => { - const result = [] - for (callback of Array.from(callbacks)) { - result.push(callback()) + + // Wait for mongo + mongoClient.connect(error => { + if (error != null) { + throw error } - return result - })() + running = true + for (callback of Array.from(callbacks)) { + callback() + } + }) }) } diff --git a/services/project-history/test/unit/js/UpdatesManager/UpdatesProcessorTests.js b/services/project-history/test/unit/js/UpdatesManager/UpdatesProcessorTests.js index 6f148e5a8d..fcc0918e11 100644 --- a/services/project-history/test/unit/js/UpdatesManager/UpdatesProcessorTests.js +++ b/services/project-history/test/unit/js/UpdatesManager/UpdatesProcessorTests.js @@ -6,14 +6,14 @@ import * as Errors from '../../../../app/js/Errors.js' const MODULE_PATH = '../../../../app/js/UpdatesProcessor.js' describe('UpdatesProcessor', function () { - before(async function () { + beforeEach(async function () { this.extendLock = sinon.stub() this.BlobManager = { createBlobsForUpdates: sinon.stub(), } this.HistoryStoreManager = { getMostRecentVersion: sinon.stub(), - sendChanges: sinon.stub().yields(null, {}), + sendChanges: sinon.stub().yields(null, { resyncNeeded: true }), } this.LockManager = { runWithLock: sinon.spy((key, runner, callback) => @@ -22,7 +22,7 @@ describe('UpdatesProcessor', function () { } this.RedisManager = {} this.UpdateCompressor = { - compressRawUpdates: sinon.stub(), + compressRawUpdatesWithMetricsCb: sinon.stub(), } this.UpdateTranslator = { convertToChanges: sinon.stub(), @@ -299,7 +299,10 @@ describe('UpdatesProcessor', function () { null, this.expandedUpdates ) - this.UpdateCompressor.compressRawUpdates.returns(this.compressedUpdates) + this.UpdateCompressor.compressRawUpdatesWithMetricsCb.yields( + null, + this.compressedUpdates + ) this.BlobManager.createBlobsForUpdates.callsArgWith( 4, null, @@ -315,8 +318,8 @@ describe('UpdatesProcessor', function () { this.ol_project_id, this.rawUpdates, this.extendLock, - err => { - this.callback(err) + (err, flushResponse) => { + this.callback(err, flushResponse) done() } ) @@ -347,7 +350,7 @@ describe('UpdatesProcessor', function () { }) it('should compress updates', function () { - this.UpdateCompressor.compressRawUpdates.should.have.been.calledWith( + this.UpdateCompressor.compressRawUpdatesWithMetricsCb.should.have.been.calledWith( this.expandedUpdates ) }) @@ -382,8 +385,74 @@ describe('UpdatesProcessor', function () { ) }) - it('should call the callback with no error', function () { - this.callback.should.have.been.called + it('should call the callback with no error and flush response', function () { + this.callback.should.have.been.calledWith(null, { resyncNeeded: true }) + }) + }) + + describe('no updates', function () { + beforeEach(function (done) { + this.SyncManager.skipUpdatesDuringSync.yields( + null, + [], + this.newSyncState + ) + this.UpdatesProcessor._processUpdates( + this.project_id, + this.ol_project_id, + this.rawUpdates, + this.extendLock, + (err, flushResponse) => { + this.callback(err, flushResponse) + done() + } + ) + }) + + it('should not get the latest version id', function () { + this.HistoryStoreManager.getMostRecentVersion.should.not.have.been.calledWith( + this.project_id, + this.ol_project_id + ) + }) + + it('should skip updates when resyncing', function () { + this.SyncManager.skipUpdatesDuringSync.should.have.been.calledWith( + this.project_id, + this.rawUpdates + ) + }) + + it('should not expand sync updates', function () { + this.SyncManager.expandSyncUpdates.should.not.have.been.called + }) + + it('should not compress updates', function () { + this.UpdateCompressor.compressRawUpdatesWithMetricsCb.should.not.have + .been.called + }) + + it('should not create any blobs for the updates', function () { + this.BlobManager.createBlobsForUpdates.should.not.have.been.called + }) + + it('should not convert the updates into a change requests', function () { + this.UpdateTranslator.convertToChanges.should.not.have.been.called + }) + + it('should not send the change request to the history store', function () { + this.HistoryStoreManager.sendChanges.should.not.have.been.called + }) + + it('should set the sync state', function () { + this.SyncManager.setResyncState.should.have.been.calledWith( + this.project_id, + this.newSyncState + ) + }) + + it('should call the callback with fake flush response', function () { + this.callback.should.have.been.calledWith(null, { resyncNeeded: false }) }) }) @@ -412,7 +481,7 @@ describe('UpdatesProcessor', function () { }) describe('_skipAlreadyAppliedUpdates', function () { - before(function () { + beforeEach(function () { this.UpdateTranslator.isProjectStructureUpdate.callsFake( update => update.version != null ) @@ -420,7 +489,7 @@ describe('UpdatesProcessor', function () { }) describe('with all doc ops in order', function () { - before(function () { + beforeEach(function () { this.updates = [ { doc: 'id', v: 1 }, { doc: 'id', v: 2 }, @@ -440,7 +509,7 @@ describe('UpdatesProcessor', function () { }) describe('with all project ops in order', function () { - before(function () { + beforeEach(function () { this.updates = [ { version: 1 }, { version: 2 }, @@ -460,7 +529,7 @@ describe('UpdatesProcessor', function () { }) describe('with all multiple doc and ops in order', function () { - before(function () { + beforeEach(function () { this.updates = [ { doc: 'id1', v: 1 }, { doc: 'id1', v: 2 }, @@ -488,64 +557,47 @@ describe('UpdatesProcessor', function () { }) describe('with doc ops out of order', function () { - before(function () { + beforeEach(function () { this.updates = [ { doc: 'id', v: 1 }, { doc: 'id', v: 2 }, { doc: 'id', v: 4 }, { doc: 'id', v: 3 }, ] - this.skipFn = sinon.spy( - this.UpdatesProcessor._mocks, - '_skipAlreadyAppliedUpdates' - ) - try { - this.updatesToApply = - this.UpdatesProcessor._skipAlreadyAppliedUpdates( - this.project_id, - this.updates, - { docs: {} } - ) - } catch (error) {} - }) - - after(function () { - this.skipFn.restore() }) it('should throw an exception', function () { - this.skipFn.threw('OpsOutOfOrderError').should.equal(true) + expect(() => { + this.UpdatesProcessor._skipAlreadyAppliedUpdates( + this.project_id, + this.updates, + { docs: {} } + ) + }).to.throw(Errors.OpsOutOfOrderError) }) }) describe('with project ops out of order', function () { - before(function () { + beforeEach(function () { + this.UpdateTranslator.isProjectStructureUpdate.callsFake( + update => update.version != null + ) this.updates = [ { version: 1 }, { version: 2 }, { version: 4 }, { version: 3 }, ] - this.skipFn = sinon.spy( - this.UpdatesProcessor._mocks, - '_skipAlreadyAppliedUpdates' - ) - try { - this.updatesToApply = - this.UpdatesProcessor._skipAlreadyAppliedUpdates( - this.project_id, - this.updates, - { docs: {} } - ) - } catch (error) {} - }) - - after(function () { - this.skipFn.restore() }) it('should throw an exception', function () { - this.skipFn.threw('OpsOutOfOrderError').should.equal(true) + expect(() => { + this.UpdatesProcessor._skipAlreadyAppliedUpdates( + this.project_id, + this.updates, + { docs: {} } + ) + }).to.throw(Errors.OpsOutOfOrderError) }) }) }) diff --git a/services/real-time/docker-compose.ci.yml b/services/real-time/docker-compose.ci.yml index 9011627c06..a5a2292e72 100644 --- a/services/real-time/docker-compose.ci.yml +++ b/services/real-time/docker-compose.ci.yml @@ -43,7 +43,7 @@ services: command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs . user: root redis: - image: redis + image: redis:7.4.3 healthcheck: test: ping="$$(redis-cli ping)" && [ "$$ping" = 'PONG' ] interval: 1s diff --git a/services/real-time/docker-compose.yml b/services/real-time/docker-compose.yml index 9333271dcf..f1041164bc 100644 --- a/services/real-time/docker-compose.yml +++ b/services/real-time/docker-compose.yml @@ -46,7 +46,7 @@ services: command: npm run --silent test:acceptance redis: - image: redis + image: redis:7.4.3 healthcheck: test: ping=$$(redis-cli ping) && [ "$$ping" = 'PONG' ] interval: 1s diff --git a/services/web/.eslintrc.js b/services/web/.eslintrc.js index 3c672de7e7..ef3cf11de5 100644 --- a/services/web/.eslintrc.js +++ b/services/web/.eslintrc.js @@ -64,6 +64,10 @@ module.exports = { { // Test specific rules files: ['**/test/**/*.*'], + excludedFiles: [ + '**/test/unit/src/**/*.test.mjs', + 'test/unit/vitest_bootstrap.mjs', + ], // exclude vitest files plugins: ['mocha', 'chai-expect', 'chai-friendly'], env: { mocha: true, @@ -95,6 +99,30 @@ module.exports = { '@typescript-eslint/no-unused-expressions': 'off', }, }, + { + files: [ + '**/test/unit/src/**/*.test.mjs', + 'test/unit/vitest_bootstrap.mjs', + ], + env: { + jest: true, // best match for vitest API etc. + }, + plugins: ['@vitest', 'chai-expect', 'chai-friendly'], // still using chai for now + rules: { + // vitest-specific rules + '@vitest/no-focused-tests': 'error', + '@vitest/no-disabled-tests': 'error', + + // Swap the no-unused-expressions rule with a more chai-friendly one + 'no-unused-expressions': 'off', + 'chai-friendly/no-unused-expressions': 'error', + + // chai-specific rules + 'chai-expect/missing-assertion': 'error', + 'chai-expect/terminating-properties': 'error', + '@typescript-eslint/no-unused-expressions': 'off', + }, + }, { // ES specific rules files: [ @@ -355,6 +383,18 @@ module.exports = { 'Modify location via customLocalStorage instead of calling window.localStorage methods directly', }, ], + 'no-unused-vars': 'off', + '@typescript-eslint/no-unused-vars': [ + 'error', + { + args: 'after-used', + argsIgnorePattern: '^_', + ignoreRestSiblings: false, + caughtErrors: 'none', + vars: 'all', + varsIgnorePattern: '^_', + }, + ], }, }, { diff --git a/services/web/.prettierignore b/services/web/.prettierignore index f4be187b87..94ab5579c2 100644 --- a/services/web/.prettierignore +++ b/services/web/.prettierignore @@ -6,6 +6,7 @@ frontend/js/vendor modules/**/frontend/js/vendor public/js public/minjs +frontend/stylesheets/bootstrap-5/modules/metrics/nvd3.scss frontend/stylesheets/components/nvd3.less frontend/js/features/source-editor/lezer-latex/latex.mjs frontend/js/features/source-editor/lezer-latex/latex.terms.mjs diff --git a/services/web/.storybook/preview.tsx b/services/web/.storybook/preview.tsx index e3838a6f97..320caac144 100644 --- a/services/web/.storybook/preview.tsx +++ b/services/web/.storybook/preview.tsx @@ -122,6 +122,12 @@ const preview: Preview = { // render stories in iframes, to isolate modals inlineStories: false, }, + options: { + storySort: { + method: 'alphabetical', + order: ['Shared'], + }, + }, }, globalTypes: { theme: { diff --git a/services/web/Makefile b/services/web/Makefile index c6916048d6..6ebbc357c6 100644 --- a/services/web/Makefile +++ b/services/web/Makefile @@ -83,6 +83,21 @@ test_unit_app: $(DOCKER_COMPOSE) run --name unit_test_$(BUILD_DIR_NAME) --rm test_unit $(DOCKER_COMPOSE) down -v -t 0 +test_unit_mocha: export COMPOSE_PROJECT_NAME=unit_test_mocha_$(BUILD_DIR_NAME) +test_unit_mocha: + $(DOCKER_COMPOSE) run --rm test_unit npm run test:unit:mocha + $(DOCKER_COMPOSE) down -v -t 0 + +test_unit_esm: export COMPOSE_PROJECT_NAME=unit_test_esm_$(BUILD_DIR_NAME) +test_unit_esm: + $(DOCKER_COMPOSE) run --rm test_unit npm run test:unit:esm + $(DOCKER_COMPOSE) down -v -t 0 + +test_unit_esm_watch: export COMPOSE_PROJECT_NAME=unit_test_esm_watch_$(BUILD_DIR_NAME) +test_unit_esm_watch: + $(DOCKER_COMPOSE) run --rm test_unit npm run test:unit:esm:watch + $(DOCKER_COMPOSE) down -v -t 0 + TEST_SUITES = $(sort $(filter-out \ $(wildcard test/unit/src/helpers/*), \ $(wildcard test/unit/src/*/*))) diff --git a/services/web/app/src/Features/Authorization/AuthorizationManager.js b/services/web/app/src/Features/Authorization/AuthorizationManager.js index 2f339de83d..22d92ea9d9 100644 --- a/services/web/app/src/Features/Authorization/AuthorizationManager.js +++ b/services/web/app/src/Features/Authorization/AuthorizationManager.js @@ -88,9 +88,54 @@ async function getPrivilegeLevelForProject( opts = {} ) { if (userId) { - return getPrivilegeLevelForProjectWithUser(userId, projectId, opts) + return await getPrivilegeLevelForProjectWithUser( + userId, + projectId, + null, + opts + ) } else { - return getPrivilegeLevelForProjectWithoutUser(projectId, token, opts) + return await getPrivilegeLevelForProjectWithoutUser(projectId, token, opts) + } +} + +/** + * Get the privilege level that the user has for the project. + * + * @param userId - The id of the user that wants to access the project. + * @param projectId - The id of the project to be accessed. + * @param {string} token + * @param {ProjectAccess} projectAccess + * @param {Object} opts + * @param {boolean} opts.ignoreSiteAdmin - Do not consider whether the user is + * a site admin. + * @param {boolean} opts.ignorePublicAccess - Do not consider the project is + * publicly accessible. + * + * @returns {string|boolean} The privilege level. One of "owner", + * "readAndWrite", "readOnly" or false. + */ +async function getPrivilegeLevelForProjectWithProjectAccess( + userId, + projectId, + token, + projectAccess, + opts = {} +) { + if (userId) { + return await getPrivilegeLevelForProjectWithUser( + userId, + projectId, + projectAccess, + opts + ) + } else { + return await _getPrivilegeLevelForProjectWithoutUserWithPublicAccessLevel( + projectId, + token, + projectAccess.publicAccessLevel(), + opts + ) } } @@ -98,6 +143,7 @@ async function getPrivilegeLevelForProject( async function getPrivilegeLevelForProjectWithUser( userId, projectId, + projectAccess, opts = {} ) { if (!opts.ignoreSiteAdmin) { @@ -106,11 +152,11 @@ async function getPrivilegeLevelForProjectWithUser( } } - const privilegeLevel = - await CollaboratorsGetter.promises.getMemberIdPrivilegeLevel( - userId, - projectId - ) + projectAccess = + projectAccess || + (await CollaboratorsGetter.promises.getProjectAccess(projectId)) + + const privilegeLevel = projectAccess.privilegeLevelForUser(userId) if (privilegeLevel && privilegeLevel !== PrivilegeLevels.NONE) { // The user has direct access return privilegeLevel @@ -119,7 +165,7 @@ async function getPrivilegeLevelForProjectWithUser( if (!opts.ignorePublicAccess) { // Legacy public-access system // User is present (not anonymous), but does not have direct access - const publicAccessLevel = await getPublicAccessLevel(projectId) + const publicAccessLevel = projectAccess.publicAccessLevel() if (publicAccessLevel === PublicAccessLevels.READ_ONLY) { return PrivilegeLevels.READ_ONLY } @@ -137,7 +183,21 @@ async function getPrivilegeLevelForProjectWithoutUser( token, opts = {} ) { - const publicAccessLevel = await getPublicAccessLevel(projectId) + return await _getPrivilegeLevelForProjectWithoutUserWithPublicAccessLevel( + projectId, + token, + await getPublicAccessLevel(projectId), + opts + ) +} + +// User is Anonymous, Try Token-based access +async function _getPrivilegeLevelForProjectWithoutUserWithPublicAccessLevel( + projectId, + token, + publicAccessLevel, + opts = {} +) { if (!opts.ignorePublicAccess) { if (publicAccessLevel === PublicAccessLevels.READ_ONLY) { // Legacy public read-only access for anonymous user @@ -149,7 +209,7 @@ async function getPrivilegeLevelForProjectWithoutUser( } } if (publicAccessLevel === PublicAccessLevels.TOKEN_BASED) { - return getPrivilegeLevelForProjectWithToken(projectId, token) + return await getPrivilegeLevelForProjectWithToken(projectId, token) } // Deny anonymous user access @@ -309,6 +369,7 @@ module.exports = { canUserRenameProject, canUserAdminProject, getPrivilegeLevelForProject, + getPrivilegeLevelForProjectWithProjectAccess, isRestrictedUserForProject, isUserSiteAdmin, }, diff --git a/services/web/app/src/Features/Chat/ChatManager.js b/services/web/app/src/Features/Chat/ChatManager.js index 9625881dd8..7eab6039d8 100644 --- a/services/web/app/src/Features/Chat/ChatManager.js +++ b/services/web/app/src/Features/Chat/ChatManager.js @@ -1,61 +1,46 @@ -const async = require('async') -const UserInfoManager = require('../User/UserInfoManager') const UserInfoController = require('../User/UserInfoController') -const { promisify } = require('@overleaf/promise-utils') +const UserGetter = require('../User/UserGetter') +const { callbackify } = require('@overleaf/promise-utils') -function injectUserInfoIntoThreads(threads, callback) { - // There will be a lot of repitition of user_ids, so first build a list - // of unique ones to perform db look ups on, then use these to populate the - // user fields - let message, thread, threadId, userId - if (callback == null) { - callback = function () {} - } - const userIds = {} - for (threadId in threads) { - thread = threads[threadId] +async function injectUserInfoIntoThreads(threads) { + const userIds = new Set() + for (const thread of Object.values(threads)) { if (thread.resolved) { - userIds[thread.resolved_by_user_id] = true + userIds.add(thread.resolved_by_user_id) } - for (message of Array.from(thread.messages)) { - userIds[message.user_id] = true + for (const message of thread.messages) { + userIds.add(message.user_id) } } - const jobs = [] - const users = {} - for (userId in userIds) { - ;(userId => - jobs.push(cb => - UserInfoManager.getPersonalInfo(userId, function (error, user) { - if (error != null) return cb(error) - user = UserInfoController.formatPersonalInfo(user) - users[userId] = user - cb() - }) - ))(userId) + const projection = { + _id: true, + first_name: true, + last_name: true, + email: true, } - - return async.series(jobs, function (error) { - if (error != null) { - return callback(error) + const users = await UserGetter.promises.getUsers(userIds, projection) + const usersById = new Map() + for (const user of users) { + usersById.set( + user._id.toString(), + UserInfoController.formatPersonalInfo(user) + ) + } + for (const thread of Object.values(threads)) { + if (thread.resolved) { + thread.resolved_by_user = usersById.get(thread.resolved_by_user_id) } - for (threadId in threads) { - thread = threads[threadId] - if (thread.resolved) { - thread.resolved_by_user = users[thread.resolved_by_user_id] - } - for (message of Array.from(thread.messages)) { - message.user = users[message.user_id] - } + for (const message of thread.messages) { + message.user = usersById.get(message.user_id) } - return callback(null, threads) - }) + } + return threads } module.exports = { - injectUserInfoIntoThreads, + injectUserInfoIntoThreads: callbackify(injectUserInfoIntoThreads), promises: { - injectUserInfoIntoThreads: promisify(injectUserInfoIntoThreads), + injectUserInfoIntoThreads, }, } diff --git a/services/web/app/src/Features/Collaborators/CollaboratorsGetter.js b/services/web/app/src/Features/Collaborators/CollaboratorsGetter.js index caa6ef159d..a3543ae614 100644 --- a/services/web/app/src/Features/Collaborators/CollaboratorsGetter.js +++ b/services/web/app/src/Features/Collaborators/CollaboratorsGetter.js @@ -1,3 +1,4 @@ +// @ts-check const { callbackify } = require('util') const pLimit = require('p-limit') const { ObjectId } = require('mongodb-legacy') @@ -15,9 +16,6 @@ module.exports = { getMemberIdsWithPrivilegeLevels: callbackify(getMemberIdsWithPrivilegeLevels), getMemberIds: callbackify(getMemberIds), getInvitedMemberIds: callbackify(getInvitedMemberIds), - getInvitedMembersWithPrivilegeLevels: callbackify( - getInvitedMembersWithPrivilegeLevels - ), getInvitedMembersWithPrivilegeLevelsFromFields: callbackify( getInvitedMembersWithPrivilegeLevelsFromFields ), @@ -31,10 +29,10 @@ module.exports = { userIsTokenMember: callbackify(userIsTokenMember), getAllInvitedMembers: callbackify(getAllInvitedMembers), promises: { + getProjectAccess, getMemberIdsWithPrivilegeLevels, getMemberIds, getInvitedMemberIds, - getInvitedMembersWithPrivilegeLevels, getInvitedMembersWithPrivilegeLevelsFromFields, getMemberIdPrivilegeLevel, getInvitedEditCollaboratorCount, @@ -50,7 +48,202 @@ module.exports = { }, } -async function getMemberIdsWithPrivilegeLevels(projectId) { +/** + * @typedef ProjectMember + * @property {string} id + * @property {typeof PrivilegeLevels[keyof PrivilegeLevels]} privilegeLevel + * @property {typeof Sources[keyof Sources]} source + * @property {boolean} [pendingEditor] + * @property {boolean} [pendingReviewer] + */ + +/** + * @typedef LoadedProjectMember + * @property {typeof PrivilegeLevels[keyof PrivilegeLevels]} privilegeLevel + * @property {{_id: ObjectId, email: string, features: any, first_name: string, last_name: string, signUpDate: Date}} user + * @property {boolean} [pendingEditor] + * @property {boolean} [pendingReviewer] + */ + +// Wrapper for determining multiple dimensions of project access. +class ProjectAccess { + /** @type {ProjectMember[]} */ + #members + + /** @type {typeof PublicAccessLevels[keyof PublicAccessLevels]} */ + #publicAccessLevel + + /** + * @param {{ owner_ref: ObjectId; collaberator_refs: ObjectId[]; readOnly_refs: ObjectId[]; tokenAccessReadAndWrite_refs: ObjectId[]; tokenAccessReadOnly_refs: ObjectId[]; publicAccesLevel: typeof PublicAccessLevels[keyof PublicAccessLevels]; pendingEditor_refs: ObjectId[]; reviewer_refs: ObjectId[]; pendingReviewer_refs: ObjectId[]; }} project + */ + constructor(project) { + this.#members = _getMemberIdsWithPrivilegeLevelsFromFields( + project.owner_ref, + project.collaberator_refs, + project.readOnly_refs, + project.tokenAccessReadAndWrite_refs, + project.tokenAccessReadOnly_refs, + project.publicAccesLevel, + project.pendingEditor_refs, + project.reviewer_refs, + project.pendingReviewer_refs + ) + this.#publicAccessLevel = project.publicAccesLevel + } + + /** + * @return {Promise<{ownerMember: LoadedProjectMember|undefined, members: LoadedProjectMember[]}>} + */ + async loadOwnerAndInvitedMembers() { + const all = await _loadMembers( + this.#members.filter(m => m.source !== Sources.TOKEN) + ) + return { + ownerMember: all.find(m => m.privilegeLevel === PrivilegeLevels.OWNER), + members: all.filter(m => m.privilegeLevel !== PrivilegeLevels.OWNER), + } + } + + /** + * @return {Promise} + */ + async loadInvitedMembers() { + return _loadMembers( + this.#members.filter( + m => + m.source !== Sources.TOKEN && + m.privilegeLevel !== PrivilegeLevels.OWNER + ) + ) + } + + /** + * @return {Promise} + */ + async loadOwner() { + const [owner] = await _loadMembers( + this.#members.filter(m => m.privilegeLevel === PrivilegeLevels.OWNER) + ) + return owner + } + + /** + * @return {ProjectMember[]} + */ + allMembers() { + return this.#members + } + + /** + * @return {typeof PublicAccessLevels[keyof PublicAccessLevels]} + */ + publicAccessLevel() { + return this.#publicAccessLevel + } + + /** + * @return {string[]} + */ + memberIds() { + return this.#members.map(m => m.id) + } + + /** + * @return {string[]} + */ + invitedMemberIds() { + return this.#members.filter(m => m.source !== Sources.TOKEN).map(m => m.id) + } + + /** + * @param {string | ObjectId} userId + * @return {typeof PrivilegeLevels[keyof PrivilegeLevels]} + */ + privilegeLevelForUser(userId) { + if (!userId) return PrivilegeLevels.NONE + for (const member of this.#members) { + if (member.id === userId.toString()) { + return member.privilegeLevel + } + } + return PrivilegeLevels.NONE + } + + /** + * @param {string | ObjectId} userId + * @return {boolean} + */ + isUserTokenMember(userId) { + if (!userId) return false + for (const member of this.#members) { + if (member.id === userId.toString() && member.source === Sources.TOKEN) { + return true + } + } + return false + } + + /** + * @param {string | ObjectId} userId + * @return {boolean} + */ + isUserInvitedMember(userId) { + if (!userId) return false + for (const member of this.#members) { + if (member.id === userId.toString() && member.source !== Sources.TOKEN) { + return true + } + } + return false + } + + /** + * @param {string | ObjectId} userId + * @return {boolean} + */ + isUserInvitedReadWriteMember(userId) { + for (const member of this.#members) { + if ( + member.id.toString() === userId.toString() && + member.source !== Sources.TOKEN && + member.privilegeLevel === PrivilegeLevels.READ_AND_WRITE + ) { + return true + } + } + return false + } + + /** + * Counts invited members with editor or reviewer roles + * @return {number} + */ + countInvitedEditCollaborators() { + return this.#members.filter( + m => + m.source === Sources.INVITE && + (m.privilegeLevel === PrivilegeLevels.READ_AND_WRITE || + m.privilegeLevel === PrivilegeLevels.REVIEW) + ).length + } + + /** + * Counts invited members that are readonly pending editors or pending reviewers + * @return {number} + */ + countInvitedPendingEditors() { + return this.#members.filter( + m => + m.source === Sources.INVITE && + m.privilegeLevel === PrivilegeLevels.READ_ONLY && + (m.pendingEditor || m.pendingReviewer) + ).length + } +} + +module.exports.ProjectAccess = ProjectAccess + +async function getProjectAccess(projectId) { const project = await ProjectGetter.promises.getProject(projectId, { owner_ref: 1, collaberator_refs: 1, @@ -65,34 +258,19 @@ async function getMemberIdsWithPrivilegeLevels(projectId) { if (!project) { throw new Errors.NotFoundError(`no project found with id ${projectId}`) } - const memberIds = _getMemberIdsWithPrivilegeLevelsFromFields( - project.owner_ref, - project.collaberator_refs, - project.readOnly_refs, - project.tokenAccessReadAndWrite_refs, - project.tokenAccessReadOnly_refs, - project.publicAccesLevel, - project.pendingEditor_refs, - project.reviewer_refs, - project.pendingReviewer_refs - ) - return memberIds + return new ProjectAccess(project) +} + +async function getMemberIdsWithPrivilegeLevels(projectId) { + return (await getProjectAccess(projectId)).allMembers() } async function getMemberIds(projectId) { - const members = await getMemberIdsWithPrivilegeLevels(projectId) - return members.map(m => m.id) + return (await getProjectAccess(projectId)).memberIds() } async function getInvitedMemberIds(projectId) { - const members = await getMemberIdsWithPrivilegeLevels(projectId) - return members.filter(m => m.source !== Sources.TOKEN).map(m => m.id) -} - -async function getInvitedMembersWithPrivilegeLevels(projectId) { - let members = await getMemberIdsWithPrivilegeLevels(projectId) - members = members.filter(m => m.source !== Sources.TOKEN) - return _loadMembers(members) + return (await getProjectAccess(projectId)).invitedMemberIds() } async function getInvitedMembersWithPrivilegeLevelsFromFields( @@ -107,7 +285,7 @@ async function getInvitedMembersWithPrivilegeLevelsFromFields( readOnlyIds, [], [], - null, + 'private', [], reviewerIds, [] @@ -121,69 +299,31 @@ async function getMemberIdPrivilegeLevel(userId, projectId) { if (userId == null) { return PrivilegeLevels.NONE } - const members = await getMemberIdsWithPrivilegeLevels(projectId) - for (const member of members) { - if (member.id === userId.toString()) { - return member.privilegeLevel - } - } - return PrivilegeLevels.NONE + return (await getProjectAccess(projectId)).privilegeLevelForUser(userId) } async function getInvitedEditCollaboratorCount(projectId) { - // Counts invited members with editor or reviewer roles - const members = await getMemberIdsWithPrivilegeLevels(projectId) - return members.filter( - m => - m.source === Sources.INVITE && - (m.privilegeLevel === PrivilegeLevels.READ_AND_WRITE || - m.privilegeLevel === PrivilegeLevels.REVIEW) - ).length + return (await getProjectAccess(projectId)).countInvitedEditCollaborators() } async function getInvitedPendingEditorCount(projectId) { - // Only counts invited members that are readonly pending editors or pending - // reviewers - const members = await getMemberIdsWithPrivilegeLevels(projectId) - return members.filter( - m => - m.source === Sources.INVITE && - m.privilegeLevel === PrivilegeLevels.READ_ONLY && - (m.pendingEditor || m.pendingReviewer) - ).length + return (await getProjectAccess(projectId)).countInvitedPendingEditors() } async function isUserInvitedMemberOfProject(userId, projectId) { if (!userId) { return false } - const members = await getMemberIdsWithPrivilegeLevels(projectId) - for (const member of members) { - if ( - member.id.toString() === userId.toString() && - member.source !== Sources.TOKEN - ) { - return true - } - } - return false + return (await getProjectAccess(projectId)).isUserInvitedMember(userId) } async function isUserInvitedReadWriteMemberOfProject(userId, projectId) { if (!userId) { return false } - const members = await getMemberIdsWithPrivilegeLevels(projectId) - for (const member of members) { - if ( - member.id.toString() === userId.toString() && - member.source !== Sources.TOKEN && - member.privilegeLevel === PrivilegeLevels.READ_AND_WRITE - ) { - return true - } - } - return false + return (await getProjectAccess(projectId)).isUserInvitedReadWriteMember( + userId + ) } async function getPublicShareTokens(userId, projectId) { @@ -209,10 +349,13 @@ async function getPublicShareTokens(userId, projectId) { return null } + // @ts-ignore if (memberInfo.isOwner) { return memberInfo.tokens + // @ts-ignore } else if (memberInfo.hasTokenReadOnlyAccess) { return { + // @ts-ignore readOnly: memberInfo.tokens.readOnly, } } else { @@ -224,6 +367,7 @@ async function getPublicShareTokens(userId, projectId) { // excluding projects where the user is listed in the token access fields when // token access has been disabled. async function getProjectsUserIsMemberOf(userId, fields) { + // @ts-ignore const limit = pLimit(2) const [readAndWrite, review, readOnly, tokenReadAndWrite, tokenReadOnly] = await Promise.all([ @@ -274,10 +418,9 @@ async function dangerouslyGetAllProjectsUserIsMemberOf(userId, fields) { async function getAllInvitedMembers(projectId) { try { - const rawMembers = await getInvitedMembersWithPrivilegeLevels(projectId) - const { members } = - ProjectEditorHandler.buildOwnerAndMembersViews(rawMembers) - return members + const projectAccess = await getProjectAccess(projectId) + const invitedMembers = await projectAccess.loadInvitedMembers() + return invitedMembers.map(ProjectEditorHandler.buildUserModelView) } catch (err) { throw OError.tag(err, 'error getting members for project', { projectId }) } @@ -316,6 +459,19 @@ async function userIsReadWriteTokenMember(userId, projectId) { return project != null } +/** + * @param {ObjectId} ownerId + * @param {ObjectId[]} collaboratorIds + * @param {ObjectId[]} readOnlyIds + * @param {ObjectId[]} tokenAccessIds + * @param {ObjectId[]} tokenAccessReadOnlyIds + * @param {typeof PublicAccessLevels[keyof PublicAccessLevels]} publicAccessLevel + * @param {ObjectId[]} pendingEditorIds + * @param {ObjectId[]} reviewerIds + * @param {ObjectId[]} pendingReviewerIds + * @return {ProjectMember[]} + * @private + */ function _getMemberIdsWithPrivilegeLevelsFromFields( ownerId, collaboratorIds, @@ -384,7 +540,13 @@ function _getMemberIdsWithPrivilegeLevelsFromFields( return members } +/** + * @param {ProjectMember[]} members + * @return {Promise} + * @private + */ async function _loadMembers(members) { + if (members.length === 0) return [] const userIds = Array.from(new Set(members.map(m => m.id))) const users = new Map() for (const user of await UserGetter.promises.getUsers(userIds, { diff --git a/services/web/app/src/Features/Collaborators/CollaboratorsHandler.js b/services/web/app/src/Features/Collaborators/CollaboratorsHandler.js index 96b4cd6e37..8b5b1bc3c2 100644 --- a/services/web/app/src/Features/Collaborators/CollaboratorsHandler.js +++ b/services/web/app/src/Features/Collaborators/CollaboratorsHandler.js @@ -161,6 +161,7 @@ async function addUserIdToProject( }) let level let existingUsers = project.collaberator_refs || [] + existingUsers = existingUsers.concat(project.reviewer_refs || []) existingUsers = existingUsers.concat(project.readOnly_refs || []) existingUsers = existingUsers.map(u => u.toString()) if (existingUsers.includes(userId.toString())) { diff --git a/services/web/app/src/Features/Collaborators/CollaboratorsInviteController.mjs b/services/web/app/src/Features/Collaborators/CollaboratorsInviteController.mjs index 4c2d911709..db853afac3 100644 --- a/services/web/app/src/Features/Collaborators/CollaboratorsInviteController.mjs +++ b/services/web/app/src/Features/Collaborators/CollaboratorsInviteController.mjs @@ -16,7 +16,6 @@ import ProjectAuditLogHandler from '../Project/ProjectAuditLogHandler.js' import Errors from '../Errors/Errors.js' import AuthenticationController from '../Authentication/AuthenticationController.js' import PrivilegeLevels from '../Authorization/PrivilegeLevels.js' -import SplitTestHandler from '../SplitTests/SplitTestHandler.js' // This rate limiter allows a different number of requests depending on the // number of callaborators a user is allowed. This is implemented by providing @@ -246,9 +245,6 @@ async function viewInvite(req, res) { const projectId = req.params.Project_id const { token } = req.params - // Read split test assignment so that it's available for Pug to read - await SplitTestHandler.promises.getAssignment(req, res, 'core-pug-bs5') - const _renderInvalidPage = function () { res.status(404) logger.debug({ projectId }, 'invite not valid, rendering not-valid page') diff --git a/services/web/app/src/Features/Docstore/DocstoreManager.js b/services/web/app/src/Features/Docstore/DocstoreManager.js index 5fe0f27dc9..4074b90605 100644 --- a/services/web/app/src/Features/Docstore/DocstoreManager.js +++ b/services/web/app/src/Features/Docstore/DocstoreManager.js @@ -1,10 +1,11 @@ const { promisify } = require('util') -const { promisifyMultiResult } = require('@overleaf/promise-utils') +const { promisifyMultiResult, callbackify } = require('@overleaf/promise-utils') const request = require('request').defaults({ jar: false }) const OError = require('@overleaf/o-error') const logger = require('@overleaf/logger') const settings = require('@overleaf/settings') const Errors = require('../Errors/Errors') +const { fetchJson } = require('@overleaf/fetch-utils') const TIMEOUT = 30 * 1000 // request timeout @@ -86,6 +87,22 @@ function getAllDeletedDocs(projectId, callback) { }) } +/** + * @param {string} projectId + */ +async function getCommentThreadIds(projectId) { + const url = `${settings.apis.docstore.url}/project/${projectId}/comment-thread-ids` + return fetchJson(url, { signal: AbortSignal.timeout(TIMEOUT) }) +} + +/** + * @param {string} projectId + */ +async function getTrackedChangesUserIds(projectId) { + const url = `${settings.apis.docstore.url}/project/${projectId}/tracked-changes-user-ids` + return fetchJson(url, { signal: AbortSignal.timeout(TIMEOUT) }) +} + /** * @param {string} projectId * @param {Callback} callback @@ -292,6 +309,8 @@ module.exports = { getAllDeletedDocs, getAllRanges, getDoc, + getCommentThreadIds: callbackify(getCommentThreadIds), + getTrackedChangesUserIds: callbackify(getTrackedChangesUserIds), isDocDeleted, updateDoc, projectHasRanges, @@ -304,6 +323,8 @@ module.exports = { getAllDeletedDocs: promisify(getAllDeletedDocs), getAllRanges: promisify(getAllRanges), getDoc: promisifyMultiResult(getDoc, ['lines', 'rev', 'version', 'ranges']), + getCommentThreadIds, + getTrackedChangesUserIds, isDocDeleted: promisify(isDocDeleted), updateDoc: promisifyMultiResult(updateDoc, ['modified', 'rev']), projectHasRanges: promisify(projectHasRanges), diff --git a/services/web/app/src/Features/Editor/EditorHttpController.js b/services/web/app/src/Features/Editor/EditorHttpController.js index 8128a95b26..f44b57f069 100644 --- a/services/web/app/src/Features/Editor/EditorHttpController.js +++ b/services/web/app/src/Features/Editor/EditorHttpController.js @@ -4,14 +4,13 @@ const ProjectGetter = require('../Project/ProjectGetter') const AuthorizationManager = require('../Authorization/AuthorizationManager') const ProjectEditorHandler = require('../Project/ProjectEditorHandler') const Metrics = require('@overleaf/metrics') -const CollaboratorsGetter = require('../Collaborators/CollaboratorsGetter') const CollaboratorsInviteGetter = require('../Collaborators/CollaboratorsInviteGetter') -const CollaboratorsHandler = require('../Collaborators/CollaboratorsHandler') const PrivilegeLevels = require('../Authorization/PrivilegeLevels') const SessionManager = require('../Authentication/SessionManager') const Errors = require('../Errors/Errors') const { expressify } = require('@overleaf/promise-utils') const Settings = require('@overleaf/settings') +const { ProjectAccess } = require('../Collaborators/CollaboratorsGetter') module.exports = { joinProject: expressify(joinProject), @@ -43,12 +42,6 @@ async function joinProject(req, res, next) { if (!project) { return res.sendStatus(403) } - // Hide sensitive data if the user is restricted - if (isRestrictedUser) { - project.owner = { _id: project.owner._id } - project.members = [] - project.invites = [] - } // Only show the 'renamed or deleted' message once if (project.deletedByExternalDataSource) { await ProjectDeleter.promises.unmarkAsDeletedByExternalSource(projectId) @@ -75,42 +68,43 @@ async function _buildJoinProjectView(req, projectId, userId) { if (project == null) { throw new Errors.NotFoundError('project not found') } - const members = - await CollaboratorsGetter.promises.getInvitedMembersWithPrivilegeLevels( - projectId - ) + const projectAccess = new ProjectAccess(project) const token = req.body.anonymousAccessToken const privilegeLevel = - await AuthorizationManager.promises.getPrivilegeLevelForProject( + await AuthorizationManager.promises.getPrivilegeLevelForProjectWithProjectAccess( userId, projectId, - token + token, + projectAccess ) if (privilegeLevel == null || privilegeLevel === PrivilegeLevels.NONE) { return { project: null, privilegeLevel: null, isRestrictedUser: false } } - const invites = - await CollaboratorsInviteGetter.promises.getAllInvites(projectId) - const isTokenMember = await CollaboratorsHandler.promises.userIsTokenMember( - userId, - projectId - ) - const isInvitedMember = - await CollaboratorsGetter.promises.isUserInvitedMemberOfProject( - userId, - projectId - ) + const isTokenMember = projectAccess.isUserTokenMember(userId) + const isInvitedMember = projectAccess.isUserInvitedMember(userId) const isRestrictedUser = AuthorizationManager.isRestrictedUser( userId, privilegeLevel, isTokenMember, isInvitedMember ) + let ownerMember + let members = [] + let invites = [] + if (isRestrictedUser) { + ownerMember = await projectAccess.loadOwner() + } else { + ;({ ownerMember, members } = + await projectAccess.loadOwnerAndInvitedMembers()) + invites = await CollaboratorsInviteGetter.promises.getAllInvites(projectId) + } return { project: ProjectEditorHandler.buildProjectModelView( project, + ownerMember, members, - invites + invites, + isRestrictedUser ), privilegeLevel, isTokenMember, diff --git a/services/web/app/src/Features/Email/EmailBuilder.js b/services/web/app/src/Features/Email/EmailBuilder.js index 01565201ac..4741838b15 100644 --- a/services/web/app/src/Features/Email/EmailBuilder.js +++ b/services/web/app/src/Features/Email/EmailBuilder.js @@ -949,6 +949,33 @@ templates.welcomeWithoutCTA = NoCTAEmailTemplate({ }, }) +templates.removeGroupMember = NoCTAEmailTemplate({ + subject(opts) { + return `Your ${settings.appName} account has been removed from ${opts.adminName}’s group` + }, + title(opts) { + return `Your ${settings.appName} account has been removed from ${opts.adminName}’s group` + }, + greeting() { + return '' + }, + message() { + const passwordResetUrl = `${settings.siteUrl}/user/password/reset` + + return [ + 'Don’t worry, your account and projects are still accessible. But there are a few changes to be aware of:', + '
    ' + + `
  • Your account will have reverted to a free ${settings.appName} plan.
  • `, + `
  • Any project collaborators have been set to read-only (you can invite one collaborator per project on the free plan).
  • `, + `
  • If you previously logged in via SSO, you’ll need to set a password to access your account.
  • ` + + '
', + `If you think this has been done in error, please contact your group admin.`, + `Thanks!`, + `Team ${settings.appName}`, + ] + }, +}) + function _formatUserNameAndEmail(user, placeholder) { if (user.first_name && user.last_name) { const fullName = `${user.first_name} ${user.last_name}` diff --git a/services/web/app/src/Features/Email/EmailSender.js b/services/web/app/src/Features/Email/EmailSender.js index c11369cb93..bb9374c2bb 100644 --- a/services/web/app/src/Features/Email/EmailSender.js +++ b/services/web/app/src/Features/Email/EmailSender.js @@ -48,6 +48,7 @@ function getClient() { 'secure', 'auth', 'ignoreTLS', + 'tls', 'logger', 'name' ) diff --git a/services/web/app/src/Features/Errors/Errors.js b/services/web/app/src/Features/Errors/Errors.js index 4b1b7dd064..487b8cbd03 100644 --- a/services/web/app/src/Features/Errors/Errors.js +++ b/services/web/app/src/Features/Errors/Errors.js @@ -47,6 +47,8 @@ class DuplicateNameError extends OError {} class InvalidNameError extends BackwardCompatibleError {} +class IndeterminateInvoiceError extends OError {} + class UnsupportedFileTypeError extends BackwardCompatibleError {} class FileTooLargeError extends BackwardCompatibleError {} @@ -333,6 +335,7 @@ module.exports = { UnconfirmedEmailError, EmailExistsError, InvalidError, + IndeterminateInvoiceError, NotInV2Error, OutputFileFetchFailedError, SAMLAssertionAudienceMismatch, diff --git a/services/web/app/src/Features/Helpers/AuthorizationHelper.js b/services/web/app/src/Features/Helpers/AuthorizationHelper.js index f193398b87..8369f2d321 100644 --- a/services/web/app/src/Features/Helpers/AuthorizationHelper.js +++ b/services/web/app/src/Features/Helpers/AuthorizationHelper.js @@ -1,14 +1,7 @@ const { UserSchema } = require('../../models/User') -const SplitTestHandler = require('../SplitTests/SplitTestHandler') -const ProjectGetter = require('../Project/ProjectGetter') -const { callbackify } = require('@overleaf/promise-utils') module.exports = { hasAnyStaffAccess, - isReviewerRoleEnabled: callbackify(isReviewerRoleEnabled), - promises: { - isReviewerRoleEnabled, - }, } function hasAnyStaffAccess(user) { @@ -21,24 +14,3 @@ function hasAnyStaffAccess(user) { } return false } - -async function isReviewerRoleEnabled(projectId) { - const project = await ProjectGetter.promises.getProject(projectId, { - reviewer_refs: 1, - owner_ref: 1, - }) - - // if there are reviewers, it means the role is enabled - if (Object.keys(project.reviewer_refs || {}).length > 0) { - return true - } - - // if there are no reviewers, check split test from project owner - const reviewerRoleAssigment = - await SplitTestHandler.promises.getAssignmentForUser( - project.owner_ref, - 'reviewer-role' - ) - - return reviewerRoleAssigment.variant === 'enabled' -} diff --git a/services/web/app/src/Features/History/HistoryManager.js b/services/web/app/src/Features/History/HistoryManager.js index fe9e6e86a7..42d7e229bf 100644 --- a/services/web/app/src/Features/History/HistoryManager.js +++ b/services/web/app/src/Features/History/HistoryManager.js @@ -11,7 +11,7 @@ const OError = require('@overleaf/o-error') const UserGetter = require('../User/UserGetter') const ProjectGetter = require('../Project/ProjectGetter') const HistoryBackupDeletionHandler = require('./HistoryBackupDeletionHandler') -const { db, ObjectId } = require('../../infrastructure/mongodb') +const { db, ObjectId, waitForDb } = require('../../infrastructure/mongodb') const Metrics = require('@overleaf/metrics') const logger = require('@overleaf/logger') const { NotFoundError } = require('../Errors/Errors') @@ -50,6 +50,7 @@ function getBlobLocation(projectId, hash) { } async function loadGlobalBlobs() { + await waitForDb() // CHANGE FROM SOURCE: wait for db before running query. const blobs = db.projectHistoryGlobalBlobs.find() for await (const blob of blobs) { GLOBAL_BLOBS.add(blob._id) // CHANGE FROM SOURCE: only store hashes. diff --git a/services/web/app/src/Features/History/RestoreManager.js b/services/web/app/src/Features/History/RestoreManager.js index 8c73695eed..16ef2024f6 100644 --- a/services/web/app/src/Features/History/RestoreManager.js +++ b/services/web/app/src/Features/History/RestoreManager.js @@ -18,6 +18,12 @@ const OError = require('@overleaf/o-error') const ProjectGetter = require('../Project/ProjectGetter') const ProjectEntityHandler = require('../Project/ProjectEntityHandler') +async function getCommentThreadIds(projectId) { + await DocumentUpdaterHandler.promises.flushProjectToMongo(projectId) + const raw = await DocstoreManager.promises.getCommentThreadIds(projectId) + return new Map(Object.entries(raw).map(([doc, ids]) => [doc, new Set(ids)])) +} + const RestoreManager = { async restoreFileFromV2(userId, projectId, version, pathname) { const fsPath = await RestoreManager._writeFileVersionToDisk( @@ -52,6 +58,25 @@ const RestoreManager = { }, async revertFile(userId, projectId, version, pathname, options = {}) { + const threadIds = await getCommentThreadIds(projectId) + return await RestoreManager._revertSingleFile( + userId, + projectId, + version, + pathname, + threadIds, + options + ) + }, + + async _revertSingleFile( + userId, + projectId, + version, + pathname, + threadIds, + options = {} + ) { const project = await ProjectGetter.promises.getProject(projectId, { overleaf: true, }) @@ -115,6 +140,7 @@ const RestoreManager = { origin, userId ) + threadIds.delete(file.element._id.toString()) } const { metadata } = await RestoreManager._getMetadataFromHistory( @@ -154,22 +180,12 @@ const RestoreManager = { const documentCommentIds = new Set( ranges.comments?.map(({ op: { t } }) => t) ) - - await DocumentUpdaterHandler.promises.flushProjectToMongo(projectId) - - const docsWithRanges = - await DocstoreManager.promises.getAllRanges(projectId) - - const nonOrphanedThreadIds = new Set() - for (const { ranges } of docsWithRanges) { - for (const comment of ranges.comments ?? []) { - nonOrphanedThreadIds.add(comment.op.t) + const commentIdsToDuplicate = Array.from(documentCommentIds).filter(id => { + for (const ids of threadIds.values()) { + if (ids.has(id)) return true } - } - - const commentIdsToDuplicate = Array.from(documentCommentIds).filter(id => - nonOrphanedThreadIds.has(id) - ) + return false + }) const newRanges = { changes: ranges.changes, comments: [] } @@ -191,6 +207,7 @@ const RestoreManager = { continue } // We have a new id for this comment thread + comment.id = result.duplicateId comment.op.t = result.duplicateId } newRanges.comments.push(comment) @@ -231,8 +248,6 @@ const RestoreManager = { delete threadData.resolved_by_user_id delete threadData.resolved_at } - // remove the resolved property from the comment range as the chat service is synced at this point - delete commentRange.op.resolved } await ChatManager.promises.injectUserInfoIntoThreads(newCommentThreadData) @@ -259,6 +274,11 @@ const RestoreManager = { origin, userId ) + // For revertProject: The next doc that gets reverted will need to duplicate all the threads seen here. + threadIds.set( + _id.toString(), + new Set(newRanges.comments.map(({ op: { t } }) => t)) + ) return { _id, @@ -321,11 +341,17 @@ const RestoreManager = { version, timestamp: new Date(updateAtVersion.meta.end_ts).toISOString(), } + const threadIds = await getCommentThreadIds(projectId) for (const pathname of pathsAtPastVersion) { - await RestoreManager.revertFile(userId, projectId, version, pathname, { - origin, - }) + await RestoreManager._revertSingleFile( + userId, + projectId, + version, + pathname, + threadIds, + { origin } + ) } const entitiesAtLiveVersion = diff --git a/services/web/app/src/Features/Notifications/NotificationsController.mjs b/services/web/app/src/Features/Notifications/NotificationsController.mjs index ae1d9208f3..35b5f0a677 100644 --- a/services/web/app/src/Features/Notifications/NotificationsController.mjs +++ b/services/web/app/src/Features/Notifications/NotificationsController.mjs @@ -33,4 +33,26 @@ export default { res.sendStatus(200) ) }, + + getNotification(req, res, next) { + const userId = SessionManager.getLoggedInUserId(req.session) + const { notificationId } = req.params + NotificationsHandler.getUserNotifications( + userId, + function (err, unreadNotifications) { + if (err) { + return next(err) + } + const notification = unreadNotifications.find( + n => n._id === notificationId + ) + + if (!notification) { + return res.status(404).end() + } + + res.json(notification) + } + ) + }, } diff --git a/services/web/app/src/Features/PasswordReset/PasswordResetController.mjs b/services/web/app/src/Features/PasswordReset/PasswordResetController.mjs index 40e3a06e44..771782c302 100644 --- a/services/web/app/src/Features/PasswordReset/PasswordResetController.mjs +++ b/services/web/app/src/Features/PasswordReset/PasswordResetController.mjs @@ -119,7 +119,11 @@ async function requestReset(req, res, next) { OError.tag(err, 'failed to generate and email password reset token', { email, }) - if (err.message === 'user does not have permission for change-password') { + + if ( + err.message === + 'user does not have one or more permissions within change-password' + ) { return res.status(403).json({ message: { key: 'no-password-allowed-due-to-sso', @@ -148,7 +152,7 @@ async function renderSetPasswordForm(req, res, next) { const { variant } = await SplitTestHandler.promises.getAssignment( req, res, - 'auth-pages-bs5' + 'bs5-auth-pages' ) if (req.query.passwordResetToken != null) { @@ -217,7 +221,7 @@ async function renderRequestResetForm(req, res) { const { variant } = await SplitTestHandler.promises.getAssignment( req, res, - 'auth-pages-bs5' + 'bs5-auth-pages' ) const template = diff --git a/services/web/app/src/Features/Project/ProjectController.js b/services/web/app/src/Features/Project/ProjectController.js index 160914db81..6744fb8d78 100644 --- a/services/web/app/src/Features/Project/ProjectController.js +++ b/services/web/app/src/Features/Project/ProjectController.js @@ -14,6 +14,7 @@ const ProjectHelper = require('./ProjectHelper') const metrics = require('@overleaf/metrics') const { User } = require('../../models/User') const SubscriptionLocator = require('../Subscription/SubscriptionLocator') +const { isPaidSubscription } = require('../Subscription/SubscriptionHelper') const LimitationsManager = require('../Subscription/LimitationsManager') const Settings = require('@overleaf/settings') const AuthorizationManager = require('../Authorization/AuthorizationManager') @@ -347,12 +348,12 @@ const _ProjectController = { 'track-pdf-download', !anonymous && 'writefull-oauth-promotion', 'hotjar', - 'reviewer-role', 'editor-redesign', 'paywall-change-compile-timeout', 'overleaf-assist-bundle', 'word-count-client', 'editor-popup-ux-survey', + 'new-editor-error-logs-redesign', ].filter(Boolean) const getUserValues = async userId => @@ -482,12 +483,6 @@ const _ProjectController = { anonRequestToken ) - const reviewerRoleAssignment = - await SplitTestHandler.promises.getAssignmentForUser( - project.owner_ref, - 'reviewer-role' - ) - await Modules.promises.hooks.fire('enforceCollaboratorLimit', projectId) if (isTokenMember) { // Check explicitly that the user is in read write token refs, while this could be inferred @@ -661,17 +656,12 @@ const _ProjectController = { } } - const hasNonRecurlySubscription = - subscription && !subscription.recurlySubscription_id + const hasPaidSubscription = isPaidSubscription(subscription) const hasManuallyCollectedSubscription = subscription?.collectionMethod === 'manual' - const canPurchaseAddons = !( - hasNonRecurlySubscription || hasManuallyCollectedSubscription - ) const assistantDisabled = user.aiErrorAssistant?.enabled === false // the assistant has been manually disabled by the user const canUseErrorAssistant = - (user.features?.aiErrorAssistant || canPurchaseAddons) && - !assistantDisabled + !hasManuallyCollectedSubscription && !assistantDisabled let featureUsage = {} @@ -738,12 +728,11 @@ const _ProjectController = { ? 'project/ide-react-detached' : 'project/ide-react' - let chatEnabled - if (Features.hasFeature('saas')) { - chatEnabled = - Features.hasFeature('chat') && req.capabilitySet.has('chat') - } else { - chatEnabled = Features.hasFeature('chat') + const capabilities = [...req.capabilitySet] + + // make sure the capability is added to CE/SP when the feature is enabled + if (!Features.hasFeature('saas') && Features.hasFeature('chat')) { + capabilities.push('chat') } const isOverleafAssistBundleEnabled = @@ -775,6 +764,12 @@ const _ProjectController = { isOverleafAssistBundleEnabled && (await ProjectController._getAddonPrices(req, res)) + const reducedTimeoutWarning = + await SplitTestHandler.promises.getAssignmentForUser( + project.owner_ref, + '10s-timeout-warning' + ) + let planCode = subscription?.planCode if (!planCode && !userInNonIndividualSub) { planCode = 'personal' @@ -798,7 +793,7 @@ const _ProjectController = { referal_id: user.referal_id, signUpDate: user.signUpDate, allowedFreeTrial, - hasRecurlySubscription: subscription?.recurlySubscription_id != null, + hasPaidSubscription, featureSwitches: user.featureSwitches, features: fullFeatureSet, featureUsage, @@ -831,6 +826,7 @@ const _ProjectController = { lineHeight: user.ace.lineHeight || 'normal', overallTheme: user.ace.overallTheme, mathPreview: user.ace.mathPreview, + breadcrumbs: user.ace.breadcrumbs, referencesSearchMode: user.ace.referencesSearchMode, enableNewEditor: user.ace.enableNewEditor ?? true, }, @@ -844,7 +840,7 @@ const _ProjectController = { isTokenMember, isInvitedMember ), - chatEnabled, + capabilities, projectHistoryBlobsEnabled: Features.hasFeature( 'project-history-blobs' ), @@ -883,14 +879,15 @@ const _ProjectController = { : null, isSaas: Features.hasFeature('saas'), shouldLoadHotjar: splitTestAssignments.hotjar?.variant === 'enabled', - isReviewerRoleEnabled: - reviewerRoleAssignment?.variant === 'enabled' || - Object.keys(project.reviewer_refs || {}).length > 0, isPaywallChangeCompileTimeoutEnabled, isOverleafAssistBundleEnabled, paywallPlans, customerIoEnabled, addonPrices, + compileSettings: { + reducedTimeoutWarning: reducedTimeoutWarning?.variant, + compileTimeout: ownerFeatures?.compileTimeout, + }, }) timer.done() } catch (err) { diff --git a/services/web/app/src/Features/Project/ProjectDeleter.js b/services/web/app/src/Features/Project/ProjectDeleter.js index c5dcafd335..b81281e319 100644 --- a/services/web/app/src/Features/Project/ProjectDeleter.js +++ b/services/web/app/src/Features/Project/ProjectDeleter.js @@ -106,8 +106,24 @@ async function expireDeletedProjectsAfterDuration() { deletedProject => deletedProject.deleterData.deletedProjectId ) ) - for (const projectId of projectIds) { - await expireDeletedProject(projectId) + logger.info( + { projectCount: projectIds.length }, + 'expiring batch of deleted projects' + ) + try { + for (const projectId of projectIds) { + await expireDeletedProject(projectId) + } + logger.info( + { projectCount: projectIds.length }, + 'batch of deleted projects expired successfully' + ) + } catch (error) { + logger.warn( + { error }, + 'something went wrong expiring batch of deleted projects' + ) + throw error } } @@ -276,12 +292,15 @@ async function deleteProject(projectId, options = {}) { ) await Project.deleteOne({ _id: projectId }).exec() + + logger.info( + { projectId, userId: project.owner_ref }, + 'successfully deleted project' + ) } catch (err) { logger.warn({ err }, 'problem deleting project') throw err } - - logger.debug({ projectId }, 'successfully deleted project') } async function undeleteProject(projectId, options = {}) { @@ -324,19 +343,6 @@ async function undeleteProject(projectId, options = {}) { }) restored.deletedDocs = [] } - if (restored.deletedFiles && restored.deletedFiles.length > 0) { - filterDuplicateDeletedFilesInPlace(restored) - const deletedFiles = restored.deletedFiles.map(file => { - // break free from the model - file = file.toObject() - - // add projectId - file.projectId = projectId - return file - }) - await db.deletedFiles.insertMany(deletedFiles) - restored.deletedFiles = [] - } // we can't use Mongoose to re-insert the project, as it won't // create a new document with an _id already specified. We need to @@ -348,17 +354,22 @@ async function undeleteProject(projectId, options = {}) { async function expireDeletedProject(projectId) { try { + logger.info({ projectId }, 'expiring deleted project') const activeProject = await Project.findById(projectId).exec() if (activeProject) { // That project is active. The deleted project record might be there // because of an incomplete delete or undelete operation. Clean it up and // return. + logger.info( + { projectId }, + 'deleted project record found but project is active' + ) await DeletedProject.deleteOne({ 'deleterData.deletedProjectId': projectId, }) - await ProjectAuditLogEntry.deleteMany({ projectId }) return } + const deletedProject = await DeletedProject.findOne({ 'deleterData.deletedProjectId': projectId, }).exec() @@ -374,12 +385,14 @@ async function expireDeletedProject(projectId) { ) return } - + const userId = deletedProject.deletedProjectOwnerId const historyId = deletedProject.project.overleaf && deletedProject.project.overleaf.history && deletedProject.project.overleaf.history.id + logger.info({ projectId, userId }, 'destroying expired project data') + await Promise.all([ DocstoreManager.promises.destroyProject(deletedProject.project._id), HistoryManager.promises.deleteProject( @@ -388,11 +401,14 @@ async function expireDeletedProject(projectId) { ), FilestoreHandler.promises.deleteProject(deletedProject.project._id), ChatApiHandler.promises.destroyProject(deletedProject.project._id), - hardDeleteDeletedFiles(deletedProject.project._id), ProjectAuditLogEntry.deleteMany({ projectId }), Modules.promises.hooks.fire('projectExpired', deletedProject.project._id), ]) + logger.info( + { projectId, userId }, + 'redacting PII from the deleted project record' + ) await DeletedProject.updateOne( { _id: deletedProject._id, @@ -404,36 +420,9 @@ async function expireDeletedProject(projectId) { }, } ).exec() + logger.info({ projectId, userId }, 'expired deleted project successfully') } catch (error) { logger.warn({ projectId, error }, 'error expiring deleted project') throw error } } - -function filterDuplicateDeletedFilesInPlace(project) { - const fileIds = new Set() - project.deletedFiles = project.deletedFiles.filter(file => { - const id = file._id.toString() - if (fileIds.has(id)) return false - fileIds.add(id) - return true - }) -} - -let deletedFilesProjectIdIndexExist -async function doesDeletedFilesProjectIdIndexExist() { - if (typeof deletedFilesProjectIdIndexExist !== 'boolean') { - // Resolve this about once. No need for locking or retry handling. - deletedFilesProjectIdIndexExist = - await db.deletedFiles.indexExists('projectId_1') - } - return deletedFilesProjectIdIndexExist -} - -async function hardDeleteDeletedFiles(projectId) { - if (!(await doesDeletedFilesProjectIdIndexExist())) { - // Running the deletion command w/o index would kill mongo performance - return - } - return db.deletedFiles.deleteMany({ projectId }) -} diff --git a/services/web/app/src/Features/Project/ProjectEditorHandler.js b/services/web/app/src/Features/Project/ProjectEditorHandler.js index 05e5beba09..3d3d300e66 100644 --- a/services/web/app/src/Features/Project/ProjectEditorHandler.js +++ b/services/web/app/src/Features/Project/ProjectEditorHandler.js @@ -6,8 +6,13 @@ const Features = require('../../infrastructure/Features') module.exports = ProjectEditorHandler = { trackChangesAvailable: false, - buildProjectModelView(project, members, invites) { - let owner, ownerFeatures + buildProjectModelView( + project, + ownerMember, + members, + invites, + isRestrictedUser + ) { const result = { _id: project._id, name: project.name, @@ -20,20 +25,23 @@ module.exports = ProjectEditorHandler = { description: project.description, spellCheckLanguage: project.spellCheckLanguage, deletedByExternalDataSource: project.deletedByExternalDataSource || false, - members: [], - invites: this.buildInvitesView(invites), imageName: project.imageName != null ? Path.basename(project.imageName) : undefined, } - ;({ owner, ownerFeatures, members } = - this.buildOwnerAndMembersViews(members)) - result.owner = owner - result.members = members + if (isRestrictedUser) { + result.owner = { _id: project.owner_ref } + result.members = [] + result.invites = [] + } else { + result.owner = this.buildUserModelView(ownerMember) + result.members = members.map(this.buildUserModelView) + result.invites = this.buildInvitesView(invites) + } - result.features = _.defaults(ownerFeatures || {}, { + result.features = _.defaults(ownerMember?.user?.features || {}, { collaborators: -1, // Infinite versioning: false, dropbox: false, @@ -62,25 +70,6 @@ module.exports = ProjectEditorHandler = { return result }, - buildOwnerAndMembersViews(members) { - let owner = null - let ownerFeatures = null - const filteredMembers = [] - for (const member of members || []) { - if (member.privilegeLevel === 'owner') { - ownerFeatures = member.user.features - owner = this.buildUserModelView(member) - } else { - filteredMembers.push(this.buildUserModelView(member)) - } - } - return { - owner, - ownerFeatures, - members: filteredMembers, - } - }, - buildUserModelView(member) { const user = member.user return { diff --git a/services/web/app/src/Features/Project/ProjectEntityMongoUpdateHandler.js b/services/web/app/src/Features/Project/ProjectEntityMongoUpdateHandler.js index 84002f1a38..895350bf37 100644 --- a/services/web/app/src/Features/Project/ProjectEntityMongoUpdateHandler.js +++ b/services/web/app/src/Features/Project/ProjectEntityMongoUpdateHandler.js @@ -15,7 +15,6 @@ const ProjectGetter = require('./ProjectGetter') const ProjectLocator = require('./ProjectLocator') const FolderStructureBuilder = require('./FolderStructureBuilder') const SafePath = require('./SafePath') -const { DeletedFile } = require('../../models/DeletedFile') const { iterablePaths } = require('./IterablePath') const LOCK_NAMESPACE = 'mongoTransaction' @@ -72,7 +71,6 @@ module.exports = { 'changes', ]), createNewFolderStructure: callbackify(wrapWithLock(createNewFolderStructure)), - _insertDeletedFileReference: callbackify(_insertDeletedFileReference), _putElement: callbackifyMultiResult(_putElement, ['result', 'project']), _confirmFolder, promises: { @@ -87,7 +85,6 @@ module.exports = { deleteEntity: wrapWithLock(deleteEntity), renameEntity: wrapWithLock(renameEntity), createNewFolderStructure: wrapWithLock(createNewFolderStructure), - _insertDeletedFileReference, _putElement, }, } @@ -162,7 +159,6 @@ async function replaceFileWithNew(projectId, fileId, newFileRef, userId) { element_id: fileId, type: 'file', }) - await _insertDeletedFileReference(projectId, fileRef) const newProject = await Project.findOneAndUpdate( { _id: project._id, [path.mongo]: { $exists: true } }, { @@ -480,17 +476,6 @@ async function renameEntity(projectId, entityId, entityType, newName, userId) { } } -async function _insertDeletedFileReference(projectId, fileRef) { - await DeletedFile.create({ - projectId, - _id: fileRef._id, - name: fileRef.name, - linkedFileData: fileRef.linkedFileData, - hash: fileRef.hash, - deletedAt: new Date(), - }) -} - async function _removeElementFromMongoArray( modelId, path, diff --git a/services/web/app/src/Features/Project/ProjectEntityUpdateHandler.js b/services/web/app/src/Features/Project/ProjectEntityUpdateHandler.js index 585c2d2698..d03cb7f95a 100644 --- a/services/web/app/src/Features/Project/ProjectEntityUpdateHandler.js +++ b/services/web/app/src/Features/Project/ProjectEntityUpdateHandler.js @@ -1627,8 +1627,6 @@ const ProjectEntityUpdateHandler = { entry.path, userId ) - } else if (entry.type === 'file') { - await ProjectEntityUpdateHandler._cleanUpFile(project, entry.entity) } } return subtreeListing @@ -1679,13 +1677,6 @@ const ProjectEntityUpdateHandler = { return await DocumentUpdaterHandler.promises.deleteDoc(projectId, docId) }, - - async _cleanUpFile(project, file) { - return await ProjectEntityMongoUpdateHandler.promises._insertDeletedFileReference( - project._id, - file - ) - }, } /** diff --git a/services/web/app/src/Features/Project/ProjectListController.mjs b/services/web/app/src/Features/Project/ProjectListController.mjs index c62396e153..ab2b0e3082 100644 --- a/services/web/app/src/Features/Project/ProjectListController.mjs +++ b/services/web/app/src/Features/Project/ProjectListController.mjs @@ -26,6 +26,7 @@ import GeoIpLookup from '../../infrastructure/GeoIpLookup.js' import SplitTestHandler from '../SplitTests/SplitTestHandler.js' import SplitTestSessionHandler from '../SplitTests/SplitTestSessionHandler.js' import TutorialHandler from '../Tutorial/TutorialHandler.js' +import SubscriptionHelper from '../Subscription/SubscriptionHelper.js' /** * @import { GetProjectsRequest, GetProjectsResponse, AllUsersProjects, MongoProject } from "./types" @@ -388,13 +389,13 @@ async function projectListPage(req, res, next) { } } - let hasIndividualRecurlySubscription = false + let hasIndividualPaidSubscription = false try { - hasIndividualRecurlySubscription = - usersIndividualSubscription?.groupPlan === false && - usersIndividualSubscription?.recurlyStatus?.state !== 'canceled' && - usersIndividualSubscription?.recurlySubscription_id !== '' + hasIndividualPaidSubscription = + SubscriptionHelper.isIndividualActivePaidSubscription( + usersIndividualSubscription + ) } catch (error) { logger.error({ err: error }, 'Failed to get individual subscription') } @@ -408,6 +409,15 @@ async function projectListPage(req, res, next) { 'papers-notification-banner' ) + const customerIoEnabled = + await SplitTestHandler.promises.hasUserBeenAssignedToVariant( + req, + userId, + 'customer-io-trial-conversion', + 'enabled', + true + ) + res.render('project/list-react', { title: 'your_projects', usersBestSubscription, @@ -437,8 +447,9 @@ async function projectListPage(req, res, next) { groupId: subscription._id, groupName: subscription.teamName, })), - hasIndividualRecurlySubscription, + hasIndividualPaidSubscription, userRestrictions: Array.from(req.userRestrictions || []), + customerIoEnabled, }) } diff --git a/services/web/app/src/Features/Subscription/Errors.js b/services/web/app/src/Features/Subscription/Errors.js index cbcd0014f7..9ebb08c6db 100644 --- a/services/web/app/src/Features/Subscription/Errors.js +++ b/services/web/app/src/Features/Subscription/Errors.js @@ -26,10 +26,17 @@ class SubtotalLimitExceededError extends OError {} class HasPastDueInvoiceError extends OError {} +class PaymentActionRequiredError extends OError { + constructor(info) { + super('Payment action required', info) + } +} + module.exports = { RecurlyTransactionError, DuplicateAddOnError, AddOnNotPresentError, + PaymentActionRequiredError, MissingBillingInfoError, ManuallyCollectedError, PendingChangeError, diff --git a/services/web/app/src/Features/Subscription/FeaturesUpdater.js b/services/web/app/src/Features/Subscription/FeaturesUpdater.js index a8c27f705f..16413c501c 100644 --- a/services/web/app/src/Features/Subscription/FeaturesUpdater.js +++ b/services/web/app/src/Features/Subscription/FeaturesUpdater.js @@ -3,6 +3,7 @@ const { callbackify } = require('util') const { callbackifyMultiResult } = require('@overleaf/promise-utils') const PlansLocator = require('./PlansLocator') const SubscriptionLocator = require('./SubscriptionLocator') +const SubscriptionHelper = require('./SubscriptionHelper') const UserFeaturesUpdater = require('./UserFeaturesUpdater') const FeaturesHelper = require('./FeaturesHelper') const Settings = require('@overleaf/settings') @@ -117,7 +118,10 @@ async function computeFeatures(userId) { async function _getIndividualFeatures(userId) { const subscription = await SubscriptionLocator.promises.getUsersSubscription(userId) - if (subscription == null || subscription?.recurlyStatus?.state === 'paused') { + if ( + subscription == null || + SubscriptionHelper.getPaidSubscriptionState(subscription) === 'paused' + ) { return {} } diff --git a/services/web/app/src/Features/Subscription/PaymentProviderEntities.js b/services/web/app/src/Features/Subscription/PaymentProviderEntities.js index f6a8af4aa5..21bd504caf 100644 --- a/services/web/app/src/Features/Subscription/PaymentProviderEntities.js +++ b/services/web/app/src/Features/Subscription/PaymentProviderEntities.js @@ -2,16 +2,19 @@ /** * @import { PaymentProvider } from '../../../../types/subscription/dashboard/subscription' + * @import { AddOn } from '../../../../types/subscription/plan' */ const OError = require('@overleaf/o-error') const { DuplicateAddOnError, AddOnNotPresentError } = require('./Errors') const PlansLocator = require('./PlansLocator') -const SubscriptionHelper = require('./SubscriptionHelper') -const AI_ADD_ON_CODE = 'assistant' +let SubscriptionHelper = null // Work around circular import (loaded at the bottom of the file) + const MEMBERS_LIMIT_ADD_ON_CODE = 'additional-license' -const STANDALONE_AI_ADD_ON_CODES = ['assistant', 'assistant-annual'] +const AI_ASSIST_STANDALONE_MONTHLY_PLAN_CODE = 'assistant' +const AI_ASSIST_STANDALONE_ANNUAL_PLAN_CODE = 'assistant-annual' +const AI_ADD_ON_CODE = 'assistant' class PaymentProviderSubscription { /** @@ -131,9 +134,11 @@ class PaymentProviderSubscription { if (newPlan == null) { throw new OError('Unable to find plan in settings', { planCode }) } + const isInTrial = SubscriptionHelper.isInTrial(this.trialPeriodEnd) const shouldChangeAtTermEnd = SubscriptionHelper.shouldPlanChangeAtTermEnd( currentPlan, - newPlan + newPlan, + isInTrial ) const changeRequest = new PaymentProviderSubscriptionChangeRequest({ @@ -247,13 +252,51 @@ class PaymentProviderSubscription { const addOnUpdates = this.addOns .filter(addOn => addOn.code !== code) .map(addOn => addOn.toAddOnUpdate()) + const isInTrial = SubscriptionHelper.isInTrial(this.trialPeriodEnd) return new PaymentProviderSubscriptionChangeRequest({ subscription: this, - timeframe: 'term_end', + timeframe: isInTrial ? 'now' : 'term_end', addOnUpdates, }) } + /** + * Form a request to revert the plan to it's last saved backup state + * + * @param {string} previousPlanCode + * @param {Array | null} previousAddOns + * @return {PaymentProviderSubscriptionChangeRequest} + * + * @throws {OError} if the restore point plan doesnt exist + */ + getRequestForPlanRevert(previousPlanCode, previousAddOns) { + const lastSuccessfulPlan = + PlansLocator.findLocalPlanInSettings(previousPlanCode) + if (lastSuccessfulPlan == null) { + throw new OError('Unable to find plan in settings', { previousPlanCode }) + } + const changeRequest = new PaymentProviderSubscriptionChangeRequest({ + subscription: this, + timeframe: 'now', + planCode: previousPlanCode, + }) + + // defaulting to empty array is important, as that will wipe away any add-ons that were added in the failed payment + // but were not part of the last successful subscription + const addOns = [] + for (const previousAddon of previousAddOns || []) { + const addOnUpdate = new PaymentProviderSubscriptionAddOnUpdate({ + code: previousAddon.addOnCode, + quantity: previousAddon.quantity, + unitPrice: previousAddon.unitAmountInCents / 100, + }) + addOns.push(addOnUpdate) + } + changeRequest.addOnUpdates = addOns + + return changeRequest + } + /** * Upgrade group plan with the plan code provided * @@ -549,7 +592,10 @@ class PaymentProviderAccount { * @param {string} planCode */ function isStandaloneAiAddOnPlanCode(planCode) { - return STANDALONE_AI_ADD_ON_CODES.includes(planCode) + return ( + planCode === AI_ASSIST_STANDALONE_MONTHLY_PLAN_CODE || + planCode === AI_ASSIST_STANDALONE_ANNUAL_PLAN_CODE + ) } /** @@ -580,7 +626,8 @@ function subscriptionChangeIsAiAssistUpgrade(subscriptionChange) { module.exports = { AI_ADD_ON_CODE, MEMBERS_LIMIT_ADD_ON_CODE, - STANDALONE_AI_ADD_ON_CODES, + AI_ASSIST_STANDALONE_MONTHLY_PLAN_CODE, + AI_ASSIST_STANDALONE_ANNUAL_PLAN_CODE, PaymentProviderSubscription, PaymentProviderSubscriptionAddOn, PaymentProviderSubscriptionChange, @@ -598,3 +645,5 @@ module.exports = { subscriptionChangeIsAiAssistUpgrade, PaymentProviderImmediateCharge, } + +SubscriptionHelper = require('./SubscriptionHelper') diff --git a/services/web/app/src/Features/Subscription/PlansLocator.js b/services/web/app/src/Features/Subscription/PlansLocator.js index 24343e1109..67d2f31c52 100644 --- a/services/web/app/src/Features/Subscription/PlansLocator.js +++ b/services/web/app/src/Features/Subscription/PlansLocator.js @@ -1,10 +1,15 @@ -// TODO: This file may be deleted when Stripe is fully implemented to all users, so, consider deleting it +// @ts-check + const Settings = require('@overleaf/settings') const logger = require('@overleaf/logger') /** * @typedef {import('../../../../types/subscription/plan').RecurlyPlanCode} RecurlyPlanCode * @typedef {import('../../../../types/subscription/plan').StripeLookupKey} StripeLookupKey + * @typedef {import('../../../../types/subscription/plan').StripeBaseLookupKey} StripeBaseLookupKey + * @typedef {import('../../../../types/subscription/plan').Plan} Plan + * @typedef {import('../../../../types/subscription/currency').StripeCurrencyCode} StripeCurrencyCode + * @typedef {import('stripe').Stripe.Price.Recurring.Interval} BillingCycleInterval */ function ensurePlansAreSetupCorrectly() { @@ -24,61 +29,105 @@ function ensurePlansAreSetupCorrectly() { }) } -const recurlyPlanCodeToStripeLookupKey = { - 'professional-annual': 'professional_annual', - professional: 'professional_monthly', - professional_free_trial_7_days: 'professional_monthly', - 'collaborator-annual': 'standard_annual', +/** + * @type {Record} + */ +const recurlyCodeToStripeBaseLookupKey = { collaborator: 'standard_monthly', + 'collaborator-annual': 'standard_annual', collaborator_free_trial_7_days: 'standard_monthly', - 'student-annual': 'student_annual', + + professional: 'professional_monthly', + 'professional-annual': 'professional_annual', + professional_free_trial_7_days: 'professional_monthly', + student: 'student_monthly', + 'student-annual': 'student_annual', student_free_trial_7_days: 'student_monthly', - group_professional: 'group_professional_enterprise', - group_professional_educational: 'group_professional_educational', + + // TODO: change all group plans' lookup_keys to match the UK account after they have been added group_collaborator: 'group_standard_enterprise', group_collaborator_educational: 'group_standard_educational', - assistant_annual: 'error_assist_annual', - assistant: 'error_assist_monthly', + group_professional: 'group_professional_enterprise', + group_professional_educational: 'group_professional_educational', + + assistant: 'assistant_monthly', + 'assistant-annual': 'assistant_annual', +} + +const LATEST_STRIPE_LOOKUP_KEY_VERSION = 'jun2025' + +/** + * Build the Stripe lookup key, will be in this format: + * `${productCode}_${billingInterval}_${latestVersion}_${currency}` + * (for example: 'assistant_annual_jun2025_clp') + * + * @param {RecurlyPlanCode} recurlyCode + * @param {StripeCurrencyCode} currency + * @param {BillingCycleInterval} [billingCycleInterval] -- needed for handling 'assistant' add-on + * @returns {StripeLookupKey|null} + */ +function buildStripeLookupKey(recurlyCode, currency, billingCycleInterval) { + let stripeBaseLookupKey = recurlyCodeToStripeBaseLookupKey[recurlyCode] + + // Recurly always uses 'assistant' as the code regardless of the subscription duration + if (recurlyCode === 'assistant' && billingCycleInterval) { + if (billingCycleInterval === 'month') { + stripeBaseLookupKey = 'assistant_monthly' + } + if (billingCycleInterval === 'year') { + stripeBaseLookupKey = 'assistant_annual' + } + } + + if (stripeBaseLookupKey == null) { + return null + } + + return `${stripeBaseLookupKey}_${LATEST_STRIPE_LOOKUP_KEY_VERSION}_${currency}` } /** - * - * @param {RecurlyPlanCode} recurlyPlanCode - * @returns {StripeLookupKey} + * @typedef {{ planType: 'individual' | 'group' | 'student' | null, period: 'annual' | 'monthly' }} PlanTypeAndPeriod + * @type {Record} */ -function mapRecurlyPlanCodeToStripeLookupKey(recurlyPlanCode) { - return recurlyPlanCodeToStripeLookupKey[recurlyPlanCode] -} - const recurlyPlanCodeToPlanTypeAndPeriod = { collaborator: { planType: 'individual', period: 'monthly' }, - collaborator_free_trial_7_days: { planType: 'individual', period: 'monthly' }, 'collaborator-annual': { planType: 'individual', period: 'annual' }, + collaborator_free_trial_7_days: { planType: 'individual', period: 'monthly' }, + professional: { planType: 'individual', period: 'monthly' }, + 'professional-annual': { planType: 'individual', period: 'annual' }, professional_free_trial_7_days: { planType: 'individual', period: 'monthly', }, - 'professional-annual': { planType: 'individual', period: 'annual' }, + student: { planType: 'student', period: 'monthly' }, - student_free_trial_7_days: { planType: 'student', period: 'monthly' }, 'student-annual': { planType: 'student', period: 'annual' }, - group_professional: { planType: 'group', period: 'annual' }, - group_professional_educational: { planType: 'group', period: 'annual' }, + student_free_trial_7_days: { planType: 'student', period: 'monthly' }, + group_collaborator: { planType: 'group', period: 'annual' }, group_collaborator_educational: { planType: 'group', period: 'annual' }, + group_professional: { planType: 'group', period: 'annual' }, + group_professional_educational: { planType: 'group', period: 'annual' }, + + assistant: { planType: null, period: 'monthly' }, + 'assistant-annual': { planType: null, period: 'annual' }, } /** - * * @param {RecurlyPlanCode} recurlyPlanCode - * @returns {{ planType: 'individual' | 'group' | 'student', period: 'annual' | 'monthly'}} + * @returns {PlanTypeAndPeriod} */ function getPlanTypeAndPeriodFromRecurlyPlanCode(recurlyPlanCode) { return recurlyPlanCodeToPlanTypeAndPeriod[recurlyPlanCode] } +/** + * @param {string|null} [planCode] + * @returns {Plan|null} + */ function findLocalPlanInSettings(planCode) { for (const plan of Settings.plans) { if (plan.planCode === planCode) { @@ -91,6 +140,6 @@ function findLocalPlanInSettings(planCode) { module.exports = { ensurePlansAreSetupCorrectly, findLocalPlanInSettings, - mapRecurlyPlanCodeToStripeLookupKey, + buildStripeLookupKey, getPlanTypeAndPeriodFromRecurlyPlanCode, } diff --git a/services/web/app/src/Features/Subscription/RecurlyClient.js b/services/web/app/src/Features/Subscription/RecurlyClient.js index fdb3b023e6..25332a9c34 100644 --- a/services/web/app/src/Features/Subscription/RecurlyClient.js +++ b/services/web/app/src/Features/Subscription/RecurlyClient.js @@ -22,6 +22,7 @@ const { MissingBillingInfoError, SubtotalLimitExceededError, } = require('./Errors') +const RecurlyMetrics = require('./RecurlyMetrics') /** * @import { PaymentProviderSubscriptionChangeRequest } from './PaymentProviderEntities' @@ -29,10 +30,28 @@ const { * @import { PaymentMethod } from './types' */ +class RecurlyClientWithErrorHandling extends recurly.Client { + /** + * @param {import('recurly/lib/recurly/Http').Response} response + * @return {Error | null} + * @private + */ + _errorFromResponse(response) { + RecurlyMetrics.recordMetrics( + response.status, + response.rateLimit, + response.rateLimitRemaining, + response.rateLimitReset.getTime() + ) + // @ts-ignore + return super._errorFromResponse(response) + } +} + const recurlySettings = Settings.apis.recurly const recurlyApiKey = recurlySettings ? recurlySettings.apiKey : undefined -const client = new recurly.Client(recurlyApiKey) +const client = new RecurlyClientWithErrorHandling(recurlyApiKey) /** * Get account for a given user @@ -685,6 +704,53 @@ function subscriptionUpdateRequestToApi(updateRequest) { return requestBody } +/** + * Retrieves a list of failed invoices for a given Recurly subscription ID. + * + * @async + * @function + * @param {string} subscriptionId - The ID of the Recurly subscription to fetch failed invoices for. + * @returns {Promise>} A promise that resolves to an array of failed invoice objects. + */ +async function getPastDueInvoices(subscriptionId) { + const failed = [] + const invoices = client.listSubscriptionInvoices(`uuid-${subscriptionId}`, { + params: { state: 'past_due' }, + }) + + for await (const invoice of invoices.each()) { + failed.push(invoice) + } + return failed +} + +/** + * Marks an invoice as failed using the Recurly client. + * + * @async + * @function failInvoice + * @param {string} invoiceId - The ID of the invoice to be marked as failed. + * @returns {Promise} Resolves when the invoice has been successfully marked as failed. + */ +async function failInvoice(invoiceId) { + await client.markInvoiceFailed(invoiceId) +} + +async function terminateSubscriptionByUuid(subscriptionUuid) { + const subscription = await client.terminateSubscription( + 'uuid-' + subscriptionUuid, + { + body: { + refund: 'none', + }, + } + ) + + logger.debug({ subscriptionUuid }, 'subscription terminated') + + return subscription +} + module.exports = { errors: recurly.errors, @@ -706,6 +772,9 @@ module.exports = { subscriptionIsCanceledOrExpired, pauseSubscriptionByUuid: callbackify(pauseSubscriptionByUuid), resumeSubscriptionByUuid: callbackify(resumeSubscriptionByUuid), + getPastDueInvoices: callbackify(getPastDueInvoices), + failInvoice: callbackify(failInvoice), + terminateSubscriptionByUuid: callbackify(terminateSubscriptionByUuid), promises: { getSubscription, @@ -726,5 +795,8 @@ module.exports = { getPaymentMethod, getAddOn, getPlan, + getPastDueInvoices, + failInvoice, + terminateSubscriptionByUuid, }, } diff --git a/services/web/app/src/Features/Subscription/RecurlyMetrics.js b/services/web/app/src/Features/Subscription/RecurlyMetrics.js new file mode 100644 index 0000000000..1b709d7dc4 --- /dev/null +++ b/services/web/app/src/Features/Subscription/RecurlyMetrics.js @@ -0,0 +1,38 @@ +const Metrics = require('@overleaf/metrics') + +/** + * @param {number} status + * @param {number} rateLimit + * @param {number} rateLimitRemaining + * @param {number} rateLimitReset + */ +function recordMetrics(status, rateLimit, rateLimitRemaining, rateLimitReset) { + Metrics.inc('recurly_request', 1, { status }) + const metrics = { rateLimit, rateLimitRemaining, rateLimitReset } + for (const [method, v] of Object.entries(metrics)) { + if (Number.isNaN(v)) continue + Metrics.gauge('recurly_request_rate_limiting', v, 1, { method }) + } +} + +/** + * @param {Response} response + */ +function recordMetricsFromResponse(response) { + const rateLimit = parseInt( + response.headers.get('X-RateLimit-Limit') || '', + 10 + ) + const rateLimitRemaining = parseInt( + response.headers.get('X-RateLimit-Remaining') || '', + 10 + ) + const rateLimitReset = + parseInt(response.headers.get('X-RateLimit-Reset') || '', 10) * 1000 + recordMetrics(response.status, rateLimit, rateLimitRemaining, rateLimitReset) +} + +module.exports = { + recordMetrics, + recordMetricsFromResponse, +} diff --git a/services/web/app/src/Features/Subscription/RecurlyWrapper.js b/services/web/app/src/Features/Subscription/RecurlyWrapper.js index 2227597737..243da6edce 100644 --- a/services/web/app/src/Features/Subscription/RecurlyWrapper.js +++ b/services/web/app/src/Features/Subscription/RecurlyWrapper.js @@ -9,24 +9,30 @@ const logger = require('@overleaf/logger') const Errors = require('../Errors/Errors') const SubscriptionErrors = require('./Errors') const { callbackify } = require('@overleaf/promise-utils') +const RecurlyMetrics = require('./RecurlyMetrics') /** - * @param accountId - * @param newEmail + * Updates the email address of a Recurly account + * + * @param userId + * @param newAccountEmail - the new email address to set for the Recurly account */ -async function updateAccountEmailAddress(accountId, newEmail) { +async function updateAccountEmailAddress(userId, newAccountEmail) { const data = { - email: newEmail, + email: newAccountEmail, } let requestBody try { requestBody = RecurlyWrapper._buildXml('account', data) } catch (error) { - throw OError.tag(error, 'error building xml', { accountId, newEmail }) + throw OError.tag(error, 'error building xml', { + accountId: userId, + newEmail: newAccountEmail, + }) } const { body } = await RecurlyWrapper.promises.apiRequest({ - url: `accounts/${accountId}`, + url: `accounts/${userId}`, method: 'PUT', body: requestBody, }) @@ -412,9 +418,15 @@ const promises = { } try { - return await fetchStringWithResponse(fetchUrl, fetchOptions) + const { body, response } = await fetchStringWithResponse( + fetchUrl, + fetchOptions + ) + RecurlyMetrics.recordMetricsFromResponse(response) + return { body, response } } catch (error) { if (error instanceof RequestFailedError) { + RecurlyMetrics.recordMetricsFromResponse(error.response) if (error.response.status === 404 && expect404) { return { response: error.response, body: null } } else if (error.response.status === 422 && expect422) { @@ -681,12 +693,15 @@ const promises = { } }, - async extendTrial(subscriptionId, daysUntilExpire) { + async extendTrial(subscriptionId, trialEndsAt, daysUntilExpire) { if (daysUntilExpire == null) { daysUntilExpire = 7 } + if (trialEndsAt == null) { + trialEndsAt = new Date() + } const nextRenewalDate = new Date() - nextRenewalDate.setDate(nextRenewalDate.getDate() + daysUntilExpire) + nextRenewalDate.setDate(trialEndsAt.getDate() + daysUntilExpire) logger.debug( { subscriptionId, daysUntilExpire }, 'Exending Free trial for user' diff --git a/services/web/app/src/Features/Subscription/SubscriptionController.js b/services/web/app/src/Features/Subscription/SubscriptionController.js index db278b23c0..5856682166 100644 --- a/services/web/app/src/Features/Subscription/SubscriptionController.js +++ b/services/web/app/src/Features/Subscription/SubscriptionController.js @@ -2,6 +2,7 @@ const SessionManager = require('../Authentication/SessionManager') const SubscriptionHandler = require('./SubscriptionHandler') +const SubscriptionHelper = require('./SubscriptionHelper') const SubscriptionViewModelBuilder = require('./SubscriptionViewModelBuilder') const LimitationsManager = require('./LimitationsManager') const RecurlyWrapper = require('./RecurlyWrapper') @@ -15,7 +16,11 @@ const AnalyticsManager = require('../Analytics/AnalyticsManager') const RecurlyEventHandler = require('./RecurlyEventHandler') const { expressify } = require('@overleaf/promise-utils') const OError = require('@overleaf/o-error') -const { DuplicateAddOnError, AddOnNotPresentError } = require('./Errors') +const { + DuplicateAddOnError, + AddOnNotPresentError, + PaymentActionRequiredError, +} = require('./Errors') const SplitTestHandler = require('../SplitTests/SplitTestHandler') const AuthorizationManager = require('../Authorization/AuthorizationManager') const Modules = require('../../infrastructure/Modules') @@ -27,6 +32,11 @@ const PlansLocator = require('./PlansLocator') const PaymentProviderEntities = require('./PaymentProviderEntities') const { User } = require('../../models/User') const UserGetter = require('../User/UserGetter') +const PermissionsManager = require('../Authorization/PermissionsManager') +const { + sanitizeSessionUserForFrontEnd, +} = require('../../infrastructure/FrontEndUser') +const { IndeterminateInvoiceError } = require('../Errors/Errors') /** * @import { SubscriptionChangeDescription } from '../../../../types/subscription/subscription-change-preview' @@ -78,9 +88,13 @@ async function userSubscriptionPage(req, res) { await Modules.promises.hooks.fire('userCanExtendTrial', user) )?.[0] const fromPlansPage = req.query.hasSubscription + const isInTrial = SubscriptionHelper.isInTrial( + personalSubscription?.payment?.trialEndsAt + ) const plansData = SubscriptionViewModelBuilder.buildPlansListForSubscriptionDash( - personalSubscription?.plan + personalSubscription?.plan, + isInTrial ) AnalyticsManager.recordEventForSession(req.session, 'subscription-page-view') @@ -258,7 +272,8 @@ async function pauseSubscription(req, res, next) { { pause_length: pauseCycles, plan_code: subscription?.planCode, - subscriptionId: subscription?.recurlySubscription_id, + subscriptionId: + SubscriptionHelper.getPaymentProviderSubscriptionId(subscription), } ) @@ -311,7 +326,9 @@ function cancelSubscription(req, res, next) { async function canceledSubscription(req, res, next) { return res.render('subscriptions/canceled-subscription-react', { title: 'subscription_canceled', - user: SessionManager.getSessionUser(req.session), + user: sanitizeSessionUserForFrontEnd( + SessionManager.getSessionUser(req.session) + ), }) } @@ -330,7 +347,8 @@ function cancelV1Subscription(req, res, next) { } async function previewAddonPurchase(req, res) { - const userId = SessionManager.getLoggedInUserId(req.session) + const user = SessionManager.getSessionUser(req.session) + const userId = user._id const addOnCode = req.params.addOnCode const purchaseReferrer = req.query.purchaseReferrer @@ -338,6 +356,16 @@ async function previewAddonPurchase(req, res) { return HttpErrorHandler.notFound(req, res, `Unknown add-on: ${addOnCode}`) } + const canUseAi = await PermissionsManager.promises.checkUserPermissions( + user, + ['use-ai'] + ) + if (!canUseAi) { + return res.redirect( + '/user/subscription?redirect-reason=ai-assist-unavailable' + ) + } + /** @type {PaymentMethod[]} */ const paymentMethod = await Modules.promises.hooks.fire( 'getPaymentMethod', @@ -423,6 +451,11 @@ async function purchaseAddon(req, res, next) { 'Your subscription already includes this add-on', { addon: addOnCode } ) + } else if (err instanceof PaymentActionRequiredError) { + return res.status(402).json({ + message: 'Payment action required', + clientSecret: err.info.clientSecret, + }) } else { if (err instanceof Error) { OError.tag(err, 'something went wrong purchasing add-ons', { @@ -524,18 +557,18 @@ function cancelPendingSubscriptionChange(req, res, next) { }) } -function updateAccountEmailAddress(req, res, next) { +async function updateAccountEmailAddress(req, res, next) { const user = SessionManager.getSessionUser(req.session) - RecurlyWrapper.updateAccountEmailAddress( - user._id, - user.email, - function (error) { - if (error) { - return next(error) - } - res.sendStatus(200) - } - ) + try { + await Modules.promises.hooks.fire( + 'updateAccountEmailAddress', + user._id, + user.email + ) + return res.sendStatus(200) + } catch (error) { + return next(error) + } } function reactivateSubscription(req, res, next) { @@ -574,7 +607,42 @@ function recurlyCallback(req, res, next) { ) ) - if ( + // this is a recurly only case which is required since Recurly does not have a reliable way to check credit info pre-upgrade purchase + if (event === 'failed_payment_notification') { + if (!Settings.planReverts?.enabled) { + return res.sendStatus(200) + } + + SubscriptionHandler.getSubscriptionRestorePoint( + eventData.transaction.subscription_id, + function (err, lastSubscription) { + if (err) { + return next(err) + } + // if theres no restore point it could be a failed renewal, or no restore set. Either way it will be handled through dunning automatically + if (!lastSubscription || !lastSubscription?.planCode) { + return res.sendStatus(200) + } + SubscriptionHandler.revertPlanChange( + eventData.transaction.subscription_id, + lastSubscription, + function (err) { + if (err instanceof IndeterminateInvoiceError) { + logger.warn( + { recurlySubscriptionId: err.info.recurlySubscriptionId }, + 'could not determine invoice to fail for subscription' + ) + return res.sendStatus(200) + } + if (err) { + return next(err) + } + return res.sendStatus(200) + } + ) + } + ) + } else if ( [ 'new_subscription_notification', 'updated_subscription_notification', @@ -665,7 +733,7 @@ async function getRecommendedCurrency(req, res) { ip = req.query.ip } const currencyLookup = await GeoIpLookup.promises.getCurrencyCode(ip) - let countryCode = currencyLookup.countryCode + const countryCode = currencyLookup.countryCode const recommendedCurrency = currencyLookup.currencyCode let currency = null @@ -676,13 +744,6 @@ async function getRecommendedCurrency(req, res) { currency = recommendedCurrency } - const queryCountryCode = req.query.countryCode?.toUpperCase() - - // only enable countryCode testing flag on staging or dev environments - if (queryCountryCode && process.env.NODE_ENV !== 'production') { - countryCode = queryCountryCode - } - return { currency, recommendedCurrency, @@ -782,7 +843,7 @@ function makeChangePreview( paymentMethod: paymentMethod?.toString(), netTerms: subscription.netTerms, nextPlan: { - annual: nextPlan.annual ?? false, + annual: nextPlan?.annual ?? false, }, nextInvoice: { date: subscription.periodEnd.toISOString(), @@ -820,7 +881,7 @@ module.exports = { cancelV1Subscription, previewSubscription: expressify(previewSubscription), cancelPendingSubscriptionChange, - updateAccountEmailAddress, + updateAccountEmailAddress: expressify(updateAccountEmailAddress), reactivateSubscription, recurlyCallback, extendTrial: expressify(extendTrial), diff --git a/services/web/app/src/Features/Subscription/SubscriptionGroupController.mjs b/services/web/app/src/Features/Subscription/SubscriptionGroupController.mjs index ce1207cded..90ecd51091 100644 --- a/services/web/app/src/Features/Subscription/SubscriptionGroupController.mjs +++ b/services/web/app/src/Features/Subscription/SubscriptionGroupController.mjs @@ -108,10 +108,16 @@ async function _removeUserFromGroup( }) } + const groupAuditLog = { + initiatorId: loggedInUserId, + ipAddress: req.ip, + } + try { await SubscriptionGroupHandler.promises.removeUserFromGroup( subscriptionId, - userToRemoveId + userToRemoveId, + groupAuditLog ) } catch (error) { logger.err( diff --git a/services/web/app/src/Features/Subscription/SubscriptionGroupHandler.js b/services/web/app/src/Features/Subscription/SubscriptionGroupHandler.js index 5772946b8a..ba862baa67 100644 --- a/services/web/app/src/Features/Subscription/SubscriptionGroupHandler.js +++ b/services/web/app/src/Features/Subscription/SubscriptionGroupHandler.js @@ -4,6 +4,7 @@ const OError = require('@overleaf/o-error') const SubscriptionUpdater = require('./SubscriptionUpdater') const SubscriptionLocator = require('./SubscriptionLocator') const SubscriptionController = require('./SubscriptionController') +const SubscriptionHelper = require('./SubscriptionHelper') const { Subscription } = require('../../models/Subscription') const { User } = require('../../models/User') const RecurlyClient = require('./RecurlyClient') @@ -22,10 +23,11 @@ const { const EmailHelper = require('../Helpers/EmailHelper') const { InvalidEmailError } = require('../Errors/Errors') -async function removeUserFromGroup(subscriptionId, userIdToRemove) { +async function removeUserFromGroup(subscriptionId, userIdToRemove, auditLog) { await SubscriptionUpdater.promises.removeUserFromGroup( subscriptionId, - userIdToRemove + userIdToRemove, + auditLog ) } @@ -76,7 +78,7 @@ async function ensureFlexibleLicensingEnabled(plan) { } async function ensureSubscriptionIsActive(subscription) { - if (subscription?.recurlyStatus?.state !== 'active') { + if (SubscriptionHelper.getPaidSubscriptionState(subscription) !== 'active') { throw new InactiveError('The subscription is not active', { subscriptionId: subscription._id.toString(), }) @@ -463,7 +465,9 @@ async function updateGroupMembersBulk( ) } for (const user of membersToRemove) { - await removeUserFromGroup(subscription._id, user._id) + await removeUserFromGroup(subscription._id, user._id, { + initiatorId: inviterId, + }) } } diff --git a/services/web/app/src/Features/Subscription/SubscriptionHandler.js b/services/web/app/src/Features/Subscription/SubscriptionHandler.js index 39a44f305f..104acd8783 100644 --- a/services/web/app/src/Features/Subscription/SubscriptionHandler.js +++ b/services/web/app/src/Features/Subscription/SubscriptionHandler.js @@ -1,21 +1,21 @@ // @ts-check -const recurly = require('recurly') const RecurlyWrapper = require('./RecurlyWrapper') const RecurlyClient = require('./RecurlyClient') const { User } = require('../../models/User') const logger = require('@overleaf/logger') +const SubscriptionHelper = require('./SubscriptionHelper') const SubscriptionUpdater = require('./SubscriptionUpdater') const SubscriptionLocator = require('./SubscriptionLocator') const LimitationsManager = require('./LimitationsManager') const EmailHandler = require('../Email/EmailHandler') const { callbackify } = require('@overleaf/promise-utils') const UserUpdater = require('../User/UserUpdater') -const { NotFoundError } = require('../Errors/Errors') +const { IndeterminateInvoiceError } = require('../Errors/Errors') const Modules = require('../../infrastructure/Modules') /** - * @import { PaymentProviderSubscription, PaymentProviderSubscriptionChange } from './PaymentProviderEntities' + * @import { PaymentProviderSubscriptionChange } from './PaymentProviderEntities' */ async function validateNoSubscriptionInRecurly(userId) { @@ -102,8 +102,7 @@ async function updateSubscription(user, planCode) { if ( !hasSubscription || subscription == null || - (subscription.recurlySubscription_id == null && - subscription.paymentProvider?.subscriptionId == null) + SubscriptionHelper.getPaymentProviderSubscriptionId(subscription) == null ) { return } @@ -247,11 +246,8 @@ async function attemptPaypalInvoiceCollection(recurlyAccountCode) { ) } -async function extendTrial(subscription, daysToExend) { - await RecurlyWrapper.promises.extendTrial( - subscription.recurlySubscription_id, - daysToExend - ) +async function extendTrial(subscription, daysToExtend) { + await Modules.promises.hooks.fire('extendTrial', subscription, daysToExtend) } /** @@ -278,24 +274,12 @@ async function previewAddonPurchase(userId, addOnCode) { * @param {number} quantity */ async function purchaseAddon(userId, addOnCode, quantity) { - const subscription = await getSubscriptionForUser(userId) - try { - await RecurlyClient.promises.getAddOn(subscription.planCode, addOnCode) - } catch (err) { - if (err instanceof recurly.errors.NotFoundError) { - throw new NotFoundError({ - message: 'Add-on not found', - info: { addOnCode }, - }) - } - throw err - } - const changeRequest = subscription.getRequestForAddOnPurchase( + await Modules.promises.hooks.fire( + 'purchaseAddOn', + userId, addOnCode, quantity ) - await RecurlyClient.promises.applySubscriptionChangeRequest(changeRequest) - await syncSubscription({ uuid: subscription.id }, userId) } /** @@ -305,51 +289,17 @@ async function purchaseAddon(userId, addOnCode, quantity) { * @param {string} addOnCode */ async function removeAddon(userId, addOnCode) { - const subscription = await getSubscriptionForUser(userId) - const changeRequest = subscription.getRequestForAddOnRemoval(addOnCode) - await RecurlyClient.promises.applySubscriptionChangeRequest(changeRequest) - await syncSubscription({ uuid: subscription.id }, userId) -} - -/** - * Returns the Recurly UUID for the given user - * - * Throws a NotFoundError if the subscription can't be found - * - * @param {string} userId - * @return {Promise} - */ -async function getSubscriptionForUser(userId) { - const subscription = - await SubscriptionLocator.promises.getUsersSubscription(userId) - const recurlyId = subscription?.recurlySubscription_id - if (recurlyId == null) { - throw new NotFoundError({ - message: 'Recurly subscription not found', - info: { userId }, - }) - } - - try { - const subscription = await RecurlyClient.promises.getSubscription(recurlyId) - return subscription - } catch (err) { - if (err instanceof recurly.errors.NotFoundError) { - throw new NotFoundError({ - message: 'Subscription not found', - info: { userId, recurlyId }, - }) - } else { - throw err - } - } + await Modules.promises.hooks.fire('removeAddOn', userId, addOnCode) } async function pauseSubscription(user, pauseCycles) { // only allow pausing on monthly plans not in a trial const { subscription } = await LimitationsManager.promises.userHasSubscription(user) - if (!subscription || !subscription.recurlyStatus) { + if ( + !subscription || + !SubscriptionHelper.getPaidSubscriptionState(subscription) + ) { throw new Error('No active subscription to pause') } @@ -360,10 +310,9 @@ async function pauseSubscription(user, pauseCycles) { ) { throw new Error('Can only pause monthly individual plans') } - if ( - subscription.recurlyStatus.trialEndsAt && - subscription.recurlyStatus.trialEndsAt > new Date() - ) { + const trialEndsAt = + SubscriptionHelper.getSubscriptionTrialEndsAt(subscription) + if (trialEndsAt && trialEndsAt > new Date()) { throw new Error('Cannot pause a subscription in a trial') } if (subscription.addOns?.length) { @@ -379,7 +328,10 @@ async function pauseSubscription(user, pauseCycles) { async function resumeSubscription(user) { const { subscription } = await LimitationsManager.promises.userHasSubscription(user) - if (!subscription || !subscription.recurlyStatus) { + if ( + !subscription || + !SubscriptionHelper.getPaidSubscriptionState(subscription) + ) { throw new Error('No active subscription to resume') } await RecurlyClient.promises.resumeSubscriptionByUuid( @@ -387,6 +339,80 @@ async function resumeSubscription(user) { ) } +/** + * @param recurlySubscriptionId + */ +async function getSubscriptionRestorePoint(recurlySubscriptionId) { + const lastSubscription = + await SubscriptionLocator.promises.getLastSuccessfulSubscription( + recurlySubscriptionId + ) + return lastSubscription +} + +/** + * @param recurlySubscriptionId + * @param subscriptionRestorePoint + */ +async function revertPlanChange( + recurlySubscriptionId, + subscriptionRestorePoint +) { + const subscription = await RecurlyClient.promises.getSubscription( + recurlySubscriptionId + ) + + const changeRequest = subscription.getRequestForPlanRevert( + subscriptionRestorePoint.planCode, + subscriptionRestorePoint.addOns + ) + + const pastDue = await RecurlyClient.promises.getPastDueInvoices( + recurlySubscriptionId + ) + + // only process revert requests within the past 24 hours, as we dont want to restore plans at the end of their dunning cycle + const yesterday = new Date() + yesterday.setDate(yesterday.getDate() - 1) + if ( + pastDue.length !== 1 || + !pastDue[0].id || + !pastDue[0].dueAt || + pastDue[0].dueAt < yesterday || + pastDue[0].collectionMethod !== 'automatic' + ) { + throw new IndeterminateInvoiceError( + 'cant determine invoice to fail for plan revert', + { + recurlySubscriptionId, + } + ) + } + + await RecurlyClient.promises.failInvoice(pastDue[0].id) + await SubscriptionUpdater.promises.setSubscriptionWasReverted( + subscriptionRestorePoint._id + ) + await RecurlyClient.promises.applySubscriptionChangeRequest(changeRequest) + await syncSubscription({ uuid: recurlySubscriptionId }, {}) +} + +async function setSubscriptionRestorePoint(userId) { + const subscription = + await SubscriptionLocator.promises.getUsersSubscription(userId) + // if the subscription is not a recurly one, we can return early as we dont allow for failed payments on other payment providers + // we need to deal with it for recurly, because we cant verify payment in advance + if (!subscription?.recurlySubscription_id || !subscription.planCode) { + return + } + await SubscriptionUpdater.promises.setRestorePoint( + subscription.id, + subscription.planCode, + subscription.addOns, + false + ) +} + module.exports = { validateNoSubscriptionInRecurly: callbackify(validateNoSubscriptionInRecurly), createSubscription: callbackify(createSubscription), @@ -403,6 +429,9 @@ module.exports = { removeAddon: callbackify(removeAddon), pauseSubscription: callbackify(pauseSubscription), resumeSubscription: callbackify(resumeSubscription), + revertPlanChange: callbackify(revertPlanChange), + setSubscriptionRestorePoint: callbackify(setSubscriptionRestorePoint), + getSubscriptionRestorePoint: callbackify(getSubscriptionRestorePoint), promises: { validateNoSubscriptionInRecurly, createSubscription, @@ -419,5 +448,8 @@ module.exports = { removeAddon, pauseSubscription, resumeSubscription, + revertPlanChange, + setSubscriptionRestorePoint, + getSubscriptionRestorePoint, }, } diff --git a/services/web/app/src/Features/Subscription/SubscriptionHelper.js b/services/web/app/src/Features/Subscription/SubscriptionHelper.js index efb8895280..429432349d 100644 --- a/services/web/app/src/Features/Subscription/SubscriptionHelper.js +++ b/services/web/app/src/Features/Subscription/SubscriptionHelper.js @@ -1,11 +1,25 @@ const { formatCurrency } = require('../../util/currency') const GroupPlansData = require('./GroupPlansData') +const { isStandaloneAiAddOnPlanCode } = require('./PaymentProviderEntities') /** * If the user changes to a less expensive plan, we shouldn't apply the change immediately. * This is to avoid unintended/artifical credits on users Recurly accounts. */ -function shouldPlanChangeAtTermEnd(oldPlan, newPlan) { +function shouldPlanChangeAtTermEnd(oldPlan, newPlan, isInTrial) { + if (isInTrial) { + // we should always upgrade or downgrade immediately if actively in trial + return false + } + + if ( + oldPlan.annual === newPlan.annual && + isStandaloneAiAddOnPlanCode(oldPlan.planCode) && + !isStandaloneAiAddOnPlanCode(newPlan.planCode) + ) { + // changing from an standalone AI add-on plan to a non-AI plan should not be considered a downgrade + return false + } return oldPlan.price_in_cents > newPlan.price_in_cents } @@ -86,7 +100,75 @@ function generateInitialLocalizedGroupPrice(recommendedCurrency, locale) { } } +function isPaidSubscription(subscription) { + const hasRecurlySubscription = + subscription?.recurlySubscription_id && + subscription?.recurlySubscription_id !== '' + const hasStripeSubscription = + subscription?.paymentProvider?.subscriptionId && + subscription?.paymentProvider?.subscriptionId !== '' + return !!(subscription && (hasRecurlySubscription || hasStripeSubscription)) +} + +function isIndividualActivePaidSubscription(subscription) { + return ( + isPaidSubscription(subscription) && + subscription?.groupPlan === false && + subscription?.recurlyStatus?.state !== 'canceled' && + subscription?.paymentProvider?.state !== 'canceled' + ) +} + +function getPaymentProviderSubscriptionId(subscription) { + if (subscription?.recurlySubscription_id) { + return subscription.recurlySubscription_id + } + if (subscription?.paymentProvider?.subscriptionId) { + return subscription.paymentProvider.subscriptionId + } + return null +} + +function getPaidSubscriptionState(subscription) { + if (subscription?.recurlyStatus?.state) { + return subscription.recurlyStatus.state + } + if (subscription?.paymentProvider?.state) { + return subscription.paymentProvider.state + } + return null +} + +function getSubscriptionTrialStartedAt(subscription) { + if (subscription?.recurlyStatus?.trialStartedAt) { + return subscription.recurlyStatus?.trialStartedAt + } + return subscription?.paymentProvider?.trialStartedAt +} + +function getSubscriptionTrialEndsAt(subscription) { + if (subscription?.recurlyStatus?.trialEndsAt) { + return subscription.recurlyStatus?.trialEndsAt + } + return subscription?.paymentProvider?.trialEndsAt +} + +function isInTrial(trialEndsAt) { + if (!trialEndsAt) { + return false + } + + return trialEndsAt.getTime() > Date.now() +} + module.exports = { shouldPlanChangeAtTermEnd, generateInitialLocalizedGroupPrice, + isPaidSubscription, + isIndividualActivePaidSubscription, + getPaymentProviderSubscriptionId, + getPaidSubscriptionState, + getSubscriptionTrialStartedAt, + getSubscriptionTrialEndsAt, + isInTrial, } diff --git a/services/web/app/src/Features/Subscription/SubscriptionLocator.js b/services/web/app/src/Features/Subscription/SubscriptionLocator.js index 8526ad0fb2..c0c107eecf 100644 --- a/services/web/app/src/Features/Subscription/SubscriptionLocator.js +++ b/services/web/app/src/Features/Subscription/SubscriptionLocator.js @@ -1,3 +1,7 @@ +/** + * @import { AddOn } from '../../../../types/subscription/plan' + */ + const { callbackifyAll } = require('@overleaf/promise-utils') const { Subscription } = require('../../models/Subscription') const { DeletedSubscription } = require('../../models/DeletedSubscription') @@ -124,7 +128,8 @@ const SubscriptionLocator = { // todo: as opposed to recurlyEntities which use addon.code, subscription model uses addon.addOnCode // which we hope to align via https://github.com/overleaf/internal/issues/25494 return Boolean( - isStandaloneAiAddOnPlanCode(subscription?.planCode) || + (subscription?.planCode && + isStandaloneAiAddOnPlanCode(subscription?.planCode)) || subscription?.addOns?.some(addOn => addOn.addOnCode === AI_ADD_ON_CODE) ) }, @@ -136,6 +141,66 @@ const SubscriptionLocator = { return userOrId } }, + + /** + * Retrieves the last successful subscription for a given user. + * + * @async + * @function + * @param {string} recurlyId - The ID of the recurly subscription tied to the mongo subscription to check for a previous successful state. + * @returns {Promise<{_id: ObjectId, planCode: string, addOns: [AddOn]}|null>} A promise that resolves to the last successful planCode and addon state, + * or null if we havent stored a previous + */ + async getLastSuccessfulSubscription(recurlyId) { + const subscription = await Subscription.findOne({ + recurlySubscription_id: recurlyId, + }).exec() + return subscription && subscription.lastSuccesfulSubscription + ? { + ...subscription.lastSuccesfulSubscription, + _id: subscription._id, + } + : null + }, + + async getUserSubscriptionStatus(userId) { + let usersSubscription = { personal: false, group: false } + + if (!userId) { + return usersSubscription + } + + const memberSubscriptions = + await SubscriptionLocator.getMemberSubscriptions(userId) + + const hasActiveGroupSubscription = memberSubscriptions.some( + subscription => + subscription.recurlyStatus?.state === 'active' && subscription.groupPlan + ) + if (hasActiveGroupSubscription) { + // Member of a group plan + usersSubscription = { ...usersSubscription, group: true } + } + + const personalSubscription = + await SubscriptionLocator.getUsersSubscription(userId) + + if (personalSubscription) { + const hasActivePersonalSubscription = + personalSubscription.recurlyStatus?.state === 'active' + if (hasActivePersonalSubscription) { + if (personalSubscription.groupPlan) { + // Owner of a group plan + usersSubscription = { ...usersSubscription, group: true } + } else { + // Owner of an individual plan + usersSubscription = { ...usersSubscription, personal: true } + } + } + } + + return usersSubscription + }, } module.exports = { diff --git a/services/web/app/src/Features/Subscription/SubscriptionUpdater.js b/services/web/app/src/Features/Subscription/SubscriptionUpdater.js index 482d81ff41..9de194f262 100644 --- a/services/web/app/src/Features/Subscription/SubscriptionUpdater.js +++ b/services/web/app/src/Features/Subscription/SubscriptionUpdater.js @@ -10,14 +10,41 @@ const { DeletedSubscription } = require('../../models/DeletedSubscription') const logger = require('@overleaf/logger') const Features = require('../../infrastructure/Features') const UserAuditLogHandler = require('../User/UserAuditLogHandler') +const UserUpdater = require('../User/UserUpdater') const AccountMappingHelper = require('../Analytics/AccountMappingHelper') const { SSOConfig } = require('../../models/SSOConfig') +const mongoose = require('../../infrastructure/Mongoose') +const Modules = require('../../infrastructure/Modules') /** * @typedef {import('../../../../types/subscription/dashboard/subscription').Subscription} Subscription * @typedef {import('../../../../types/subscription/dashboard/subscription').PaymentProvider} PaymentProvider + * @typedef {import('../../../../types/group-management/group-audit-log').GroupAuditLog} GroupAuditLog + * @import { AddOn } from '../../../../types/subscription/plan' */ +/** + * + * @param {GroupAuditLog} auditLog + */ +async function subscriptionUpdateWithAuditLog(dbFilter, dbUpdate, auditLog) { + const session = await mongoose.startSession() + + try { + await session.withTransaction(async () => { + await Subscription.updateOne(dbFilter, dbUpdate, { session }).exec() + + await Modules.promises.hooks.fire( + 'addGroupAuditLogEntry', + auditLog, + session + ) + }) + } finally { + await session.endSession() + } +} + /** * Change the admin of the given subscription. * @@ -65,7 +92,7 @@ async function syncSubscription( ) } -async function addUserToGroup(subscriptionId, userId) { +async function addUserToGroup(subscriptionId, userId, auditLog) { await UserAuditLogHandler.promises.addEntry( userId, 'join-group-subscription', @@ -73,10 +100,18 @@ async function addUserToGroup(subscriptionId, userId) { undefined, { subscriptionId } ) - await Subscription.updateOne( + + await subscriptionUpdateWithAuditLog( { _id: subscriptionId }, - { $addToSet: { member_ids: userId } } - ).exec() + { $addToSet: { member_ids: userId } }, + { + initiatorId: auditLog?.initiatorId, + ipAddress: auditLog?.ipAddress, + groupId: subscriptionId, + operation: 'join-group', + } + ) + await FeaturesUpdater.promises.refreshFeatures(userId, 'add-to-group') await _sendUserGroupPlanCodeUserProperty(userId) await _sendSubscriptionEvent( @@ -86,7 +121,7 @@ async function addUserToGroup(subscriptionId, userId) { ) } -async function removeUserFromGroup(subscriptionId, userId) { +async function removeUserFromGroup(subscriptionId, userId, auditLog) { await UserAuditLogHandler.promises.addEntry( userId, 'leave-group-subscription', @@ -94,10 +129,37 @@ async function removeUserFromGroup(subscriptionId, userId) { undefined, { subscriptionId } ) + + await subscriptionUpdateWithAuditLog( + { _id: subscriptionId }, + { $pull: { member_ids: userId } }, + { + initiatorId: auditLog?.initiatorId, + ipAddress: auditLog?.ipAddress, + groupId: subscriptionId, + operation: 'leave-group', + info: { userIdRemoved: userId }, + } + ) + await Subscription.updateOne( { _id: subscriptionId }, { $pull: { member_ids: userId } } ).exec() + + const subscription = await Subscription.findById(subscriptionId) + if (subscription.managedUsersEnabled) { + await UserUpdater.promises.updateUser( + { _id: userId }, + { + $unset: { + 'enrollment.managedBy': 1, + 'enrollment.enrolledAt': 1, + }, + } + ) + } + await FeaturesUpdater.promises.refreshFeatures( userId, 'remove-user-from-group' @@ -271,38 +333,7 @@ async function updateSubscriptionFromRecurly( requesterData ) { if (recurlySubscription.state === 'expired') { - const hasManagedUsersFeature = - Features.hasFeature('saas') && subscription?.managedUsersEnabled - - // If a payment lapses and if the group is managed or has group SSO, as a temporary measure we need to - // make sure that the group continues as-is and no destructive actions are taken. - if (hasManagedUsersFeature) { - logger.warn( - { subscriptionId: subscription._id }, - 'expired subscription has managedUsers feature enabled, skipping deletion' - ) - } else { - let hasGroupSSOEnabled = false - if (subscription?.ssoConfig) { - const ssoConfig = await SSOConfig.findOne({ - _id: subscription.ssoConfig._id || subscription.ssoConfig, - }) - .lean() - .exec() - if (ssoConfig.enabled) { - hasGroupSSOEnabled = true - } - } - - if (hasGroupSSOEnabled) { - logger.warn( - { subscriptionId: subscription._id }, - 'expired subscription has groupSSO feature enabled, skipping deletion' - ) - } else { - await deleteSubscription(subscription, requesterData) - } - } + await handleExpiredSubscription(subscription, requesterData) return } const updatedPlanCode = recurlySubscription.plan.plan_code @@ -403,6 +434,41 @@ async function _sendUserGroupPlanCodeUserProperty(userId) { } } +async function handleExpiredSubscription(subscription, requesterData) { + const hasManagedUsersFeature = + Features.hasFeature('saas') && subscription?.managedUsersEnabled + + // If a payment lapses and if the group is managed or has group SSO, as a temporary measure we need to + // make sure that the group continues as-is and no destructive actions are taken. + if (hasManagedUsersFeature) { + logger.warn( + { subscriptionId: subscription._id }, + 'expired subscription has managedUsers feature enabled, skipping deletion' + ) + } else { + let hasGroupSSOEnabled = false + if (subscription?.ssoConfig) { + const ssoConfig = await SSOConfig.findOne({ + _id: subscription.ssoConfig._id || subscription.ssoConfig, + }) + .lean() + .exec() + if (ssoConfig.enabled) { + hasGroupSSOEnabled = true + } + } + + if (hasGroupSSOEnabled) { + logger.warn( + { subscriptionId: subscription._id }, + 'expired subscription has groupSSO feature enabled, skipping deletion' + ) + } else { + await deleteSubscription(subscription, requesterData) + } + } +} + async function _sendSubscriptionEvent(userId, subscriptionId, event) { const subscription = await Subscription.findOne( { _id: subscriptionId }, @@ -440,6 +506,53 @@ async function _sendSubscriptionEventForAllMembers(subscriptionId, event) { } } +/** + * Sets the plan code and addon state to revert the plan to in case of failed upgrades, or clears the last restore point if it was used/ voided + * @param {ObjectId} subscriptionId the mongo ID of the subscription to set the restore point for + * @param {string} planCode the plan code to revert to + * @param {Array} addOns the addOns to revert to + * @param {Boolean} consumed whether the restore point was used to revert a subscription + */ +async function setRestorePoint(subscriptionId, planCode, addOns, consumed) { + const update = { + $set: { + 'lastSuccesfulSubscription.planCode': planCode, + 'lastSuccesfulSubscription.addOns': addOns, + }, + } + + if (consumed) { + update.$inc = { timesRevertedDueToFailedPayment: 1 } + } + + await Subscription.updateOne({ _id: subscriptionId }, update).exec() +} + +/** + * Clears the restore point for a given subscription, and signals that the subscription was sucessfully reverted. + * + * @async + * @function setSubscriptionWasReverted + * @param {ObjectId} subscriptionId the mongo ID of the subscription to set the restore point for + * @returns {Promise} Resolves when the restore point has been cleared. + */ +async function setSubscriptionWasReverted(subscriptionId) { + // consume the backup and flag that the subscription was reverted due to failed payment + await setRestorePoint(subscriptionId, null, null, true) +} + +/** + * Clears the restore point for a given subscription, and signals that the subscription was not reverted. + * + * @async + * @function voidRestorePoint + * @param {string} subscriptionId - The unique identifier of the subscription. + * @returns {Promise} Resolves when the restore point has been cleared. + */ +async function voidRestorePoint(subscriptionId) { + await setRestorePoint(subscriptionId, null, null, false) +} + module.exports = { updateAdmin: callbackify(updateAdmin), syncSubscription: callbackify(syncSubscription), @@ -454,6 +567,9 @@ module.exports = { restoreSubscription: callbackify(restoreSubscription), updateSubscriptionFromRecurly: callbackify(updateSubscriptionFromRecurly), scheduleRefreshFeatures: callbackify(scheduleRefreshFeatures), + setSubscriptionRestorePoint: callbackify(setRestorePoint), + setSubscriptionWasReverted: callbackify(setSubscriptionWasReverted), + voidRestorePoint: callbackify(voidRestorePoint), promises: { updateAdmin, syncSubscription, @@ -468,5 +584,9 @@ module.exports = { restoreSubscription, updateSubscriptionFromRecurly, scheduleRefreshFeatures, + setRestorePoint, + setSubscriptionWasReverted, + voidRestorePoint, + handleExpiredSubscription, }, } diff --git a/services/web/app/src/Features/Subscription/SubscriptionViewModelBuilder.js b/services/web/app/src/Features/Subscription/SubscriptionViewModelBuilder.js index 441d9c2c9b..3681975a38 100644 --- a/services/web/app/src/Features/Subscription/SubscriptionViewModelBuilder.js +++ b/services/web/app/src/Features/Subscription/SubscriptionViewModelBuilder.js @@ -1,6 +1,5 @@ // ts-check const Settings = require('@overleaf/settings') -const RecurlyWrapper = require('./RecurlyWrapper') const PlansLocator = require('./PlansLocator') const { isStandaloneAiAddOnPlanCode, @@ -8,7 +7,6 @@ const { } = require('./PaymentProviderEntities') const SubscriptionFormatters = require('./SubscriptionFormatters') const SubscriptionLocator = require('./SubscriptionLocator') -const SubscriptionUpdater = require('./SubscriptionUpdater') const InstitutionsGetter = require('../Institutions/InstitutionsGetter') const InstitutionsManager = require('../Institutions/InstitutionsManager') const PublishersGetter = require('../Publishers/PublishersGetter') @@ -227,6 +225,7 @@ async function buildUsersSubscriptionViewModel(user, locale = 'en') { // don't return subscription payment information delete personalSubscription.paymentProvider delete personalSubscription.recurly + delete personalSubscription.recurlySubscription_id const tax = paymentRecord.subscription.taxAmount || 0 // Some plans allow adding more seats than the base plan provides. @@ -374,15 +373,6 @@ async function buildUsersSubscriptionViewModel(user, locale = 'en') { } } -/** - * @param {{_id: string}} user - * @returns {Promise} - */ -async function getBestSubscription(user) { - const { bestSubscription } = await getUsersSubscriptionDetails(user) - return bestSubscription -} - /** * @param {{_id: string}} user * @returns {Promise<{bestSubscription:Subscription,individualSubscription:DBSubscription|null,memberGroupSubscriptions:DBSubscription[]}>} @@ -400,15 +390,18 @@ async function getUsersSubscriptionDetails(user) { if ( individualSubscription && !individualSubscription.customAccount && - individualSubscription.recurlySubscription_id && - !individualSubscription.recurlyStatus?.state + SubscriptionHelper.getPaymentProviderSubscriptionId( + individualSubscription + ) && + !SubscriptionHelper.getPaidSubscriptionState(individualSubscription) ) { - const recurlySubscription = await RecurlyWrapper.promises.getSubscription( - individualSubscription.recurlySubscription_id, - { includeAccount: true } + const paymentResults = await Modules.promises.hooks.fire( + 'getPaymentFromRecordPromise', + individualSubscription ) - await SubscriptionUpdater.promises.updateSubscriptionFromRecurly( - recurlySubscription, + await Modules.promises.hooks.fire( + 'syncSubscription', + paymentResults[0]?.subscription, individualSubscription ) individualSubscription = @@ -477,7 +470,7 @@ async function getUsersSubscriptionDetails(user) { return { bestSubscription, individualSubscription, memberGroupSubscriptions } } -function buildPlansList(currentPlan) { +function buildPlansList(currentPlan, isInTrial) { const { plans } = Settings const allPlans = {} @@ -491,7 +484,11 @@ function buildPlansList(currentPlan) { result.planCodesChangingAtTermEnd = _.map( _.filter(plans, plan => { if (!plan.hideFromUsers) { - return SubscriptionHelper.shouldPlanChangeAtTermEnd(currentPlan, plan) + return SubscriptionHelper.shouldPlanChangeAtTermEnd( + currentPlan, + plan, + isInTrial + ) } }), 'planCode' @@ -540,7 +537,8 @@ function _isPlanEqualOrBetter(planA, planB) { function _getRemainingTrialDays(subscription) { const now = new Date() - const trialEndDate = subscription.recurlyStatus?.trialEndsAt + const trialEndDate = + SubscriptionHelper.getSubscriptionTrialEndsAt(subscription) return trialEndDate && trialEndDate > now ? Math.ceil( (trialEndDate.getTime() - now.getTime()) / (24 * 60 * 60 * 1000) @@ -575,8 +573,8 @@ function buildGroupSubscriptionForView(groupSubscription) { } } -function buildPlansListForSubscriptionDash(currentPlan) { - const allPlansData = buildPlansList(currentPlan) +function buildPlansListForSubscriptionDash(currentPlan, isInTrial) { + const allPlansData = buildPlansList(currentPlan, isInTrial) const plans = [] // only list individual and visible plans for "change plans" UI if (allPlansData.studentAccounts) { @@ -605,10 +603,8 @@ module.exports = { buildUsersSubscriptionViewModel: callbackify(buildUsersSubscriptionViewModel), buildPlansList, buildPlansListForSubscriptionDash, - getBestSubscription: callbackify(getBestSubscription), promises: { buildUsersSubscriptionViewModel, - getBestSubscription, getUsersSubscriptionDetails, }, } diff --git a/services/web/app/src/Features/Subscription/TeamInvitesController.mjs b/services/web/app/src/Features/Subscription/TeamInvitesController.mjs index ca508755e6..1eb9ac2907 100644 --- a/services/web/app/src/Features/Subscription/TeamInvitesController.mjs +++ b/services/web/app/src/Features/Subscription/TeamInvitesController.mjs @@ -4,6 +4,7 @@ import OError from '@overleaf/o-error' import TeamInvitesHandler from './TeamInvitesHandler.js' import SessionManager from '../Authentication/SessionManager.js' import SubscriptionLocator from './SubscriptionLocator.js' +import SubscriptionHelper from './SubscriptionHelper.js' import ErrorController from '../Errors/ErrorController.js' import EmailHelper from '../Helpers/EmailHelper.js' import UserGetter from '../User/UserGetter.js' @@ -14,6 +15,7 @@ import EmailHandler from '../Email/EmailHandler.js' import { RateLimiter } from '../../infrastructure/RateLimiter.js' import Modules from '../../infrastructure/Modules.js' import UserAuditLogHandler from '../User/UserAuditLogHandler.js' +import { sanitizeSessionUserForFrontEnd } from '../../infrastructure/FrontEndUser.js' const rateLimiters = { resendGroupInvite: new RateLimiter('resend-group-invite', { @@ -36,10 +38,15 @@ async function createInvite(req, res, next) { } try { + const auditLog = { + initiatorId: teamManagerId, + ipAddress: req.ip, + } const invitedUserData = await TeamInvitesHandler.promises.createInvite( teamManagerId, subscription, - email + email, + auditLog ) return res.json({ user: invitedUserData }) } catch (err) { @@ -82,12 +89,10 @@ async function viewInvite(req, res, next) { const personalSubscription = await SubscriptionLocator.promises.getUsersSubscription(userId) - const hasIndividualRecurlySubscription = - personalSubscription && - personalSubscription.groupPlan === false && - personalSubscription.recurlyStatus?.state !== 'canceled' && - personalSubscription.recurlySubscription_id && - personalSubscription.recurlySubscription_id !== '' + const hasIndividualPaidSubscription = + SubscriptionHelper.isIndividualActivePaidSubscription( + personalSubscription + ) if (subscription?.managedUsersEnabled) { if (!subscription.populated('groupPolicy')) { @@ -128,6 +133,9 @@ async function viewInvite(req, res, next) { logger.error({ err }, 'error getting subscription admin email') } + const usersSubscription = + await SubscriptionLocator.promises.getUserSubscriptionStatus(userId) + return res.render('subscriptions/team/invite-managed', { inviterName: invite.inviterName, inviteToken: invite.token, @@ -136,7 +144,8 @@ async function viewInvite(req, res, next) { currentManagedUserAdminEmail, groupSSOActive, subscriptionId: subscription._id.toString(), - user: sessionUser, + user: sanitizeSessionUserForFrontEnd(sessionUser), + usersSubscription, }) } else { let currentManagedUserAdminEmail @@ -150,13 +159,13 @@ async function viewInvite(req, res, next) { return res.render('subscriptions/team/invite', { inviterName: invite.inviterName, inviteToken: invite.token, - hasIndividualRecurlySubscription, + hasIndividualPaidSubscription, expired: req.query.expired, userRestrictions: Array.from(req.userRestrictions || []), currentManagedUserAdminEmail, groupSSOActive, subscriptionId: subscription._id.toString(), - user: sessionUser, + user: sanitizeSessionUserForFrontEnd(sessionUser), }) } } else { @@ -197,7 +206,8 @@ async function acceptInvite(req, res, next) { const subscription = await TeamInvitesHandler.promises.acceptInvite( token, - userId + userId, + req.ip ) const groupSSOActive = ( await Modules.promises.hooks.fire('hasGroupSSOEnabled', subscription) diff --git a/services/web/app/src/Features/Subscription/TeamInvitesHandler.js b/services/web/app/src/Features/Subscription/TeamInvitesHandler.js index 45a0495353..f7a4908355 100644 --- a/services/web/app/src/Features/Subscription/TeamInvitesHandler.js +++ b/services/web/app/src/Features/Subscription/TeamInvitesHandler.js @@ -22,6 +22,7 @@ const { callbackifyMultiResult, } = require('@overleaf/promise-utils') const NotificationsBuilder = require('../Notifications/NotificationsBuilder') +const RecurlyClient = require('./RecurlyClient') async function getInvite(token) { const subscription = await Subscription.findOne({ @@ -64,19 +65,64 @@ async function importInvite(subscription, inviterName, email, token, sentAt) { return subscription.save() } -async function acceptInvite(token, userId) { +async function _deleteUserSubscription(userId, ipAddress) { + // Delete released user subscription to make it on a free plan + const subscription = + await SubscriptionLocator.promises.getUsersSubscription(userId) + + if (subscription) { + logger.debug( + { + subscriptionId: subscription._id, + }, + 'deleting user subscription' + ) + + const deleterData = { + id: userId, + ip: ipAddress, + } + await SubscriptionUpdater.promises.deleteSubscription( + subscription, + deleterData + ) + + // Terminate the subscription in Recurly + if (subscription.recurlySubscription_id) { + try { + await RecurlyClient.promises.terminateSubscriptionByUuid( + subscription.recurlySubscription_id + ) + } catch (err) { + logger.error( + { err, subscriptionId: subscription._id }, + 'terminating subscription failed' + ) + } + } + } +} + +async function acceptInvite(token, userId, ipAddress) { const { invite, subscription } = await getInvite(token) if (!invite) { throw new Errors.NotFoundError('invite not found') } + const auditLog = { initiatorId: userId, ipAddress } - await SubscriptionUpdater.promises.addUserToGroup(subscription._id, userId) + await SubscriptionUpdater.promises.addUserToGroup( + subscription._id, + userId, + auditLog + ) if (subscription.managedUsersEnabled) { + await _deleteUserSubscription(userId, ipAddress) await Modules.promises.hooks.fire( 'enrollInManagedSubscription', userId, - subscription + subscription, + auditLog ) } if (subscription.ssoConfig) { @@ -146,9 +192,11 @@ async function _createInvite(subscription, email, inviter) { emailData => emailData.email === email ) if (isInvitingSelf) { + const auditLog = { initiatorId: inviter._id } await SubscriptionUpdater.promises.addUserToGroup( subscription._id, - inviter._id + inviter._id, + auditLog ) // legacy: remove any invite that might have been created in the past diff --git a/services/web/app/src/Features/Templates/TemplatesController.js b/services/web/app/src/Features/Templates/TemplatesController.js index a8730a61be..39c4d50ae0 100644 --- a/services/web/app/src/Features/Templates/TemplatesController.js +++ b/services/web/app/src/Features/Templates/TemplatesController.js @@ -4,13 +4,9 @@ const TemplatesManager = require('./TemplatesManager') const ProjectHelper = require('../Project/ProjectHelper') const logger = require('@overleaf/logger') const { expressify } = require('@overleaf/promise-utils') -const SplitTestHandler = require('../SplitTests/SplitTestHandler') const TemplatesController = { async getV1Template(req, res) { - // Read split test assignment so that it's available for Pug to read - await SplitTestHandler.promises.getAssignment(req, res, 'core-pug-bs5') - const templateVersionId = req.params.Template_version_id const templateId = req.query.id if (!/^[0-9]+$/.test(templateVersionId) || !/^[0-9]+$/.test(templateId)) { diff --git a/services/web/app/src/Features/Tutorial/TutorialController.mjs b/services/web/app/src/Features/Tutorial/TutorialController.mjs index e5fc940b34..b4ab3f6727 100644 --- a/services/web/app/src/Features/Tutorial/TutorialController.mjs +++ b/services/web/app/src/Features/Tutorial/TutorialController.mjs @@ -15,6 +15,7 @@ const VALID_KEYS = [ 'editor-popup-ux-survey', 'wf-features-moved', 'review-mode', + 'new-error-logs-promo', ] async function completeTutorial(req, res, next) { diff --git a/services/web/app/src/Features/User/SAMLIdentityManager.js b/services/web/app/src/Features/User/SAMLIdentityManager.js index dc790c59ca..0d3c382775 100644 --- a/services/web/app/src/Features/User/SAMLIdentityManager.js +++ b/services/web/app/src/Features/User/SAMLIdentityManager.js @@ -210,9 +210,13 @@ async function getUser(providerId, externalUserId, userIdAttribute) { ) } const user = await User.findOne({ - 'samlIdentifiers.externalUserId': externalUserId.toString(), - 'samlIdentifiers.providerId': providerId.toString(), - 'samlIdentifiers.userIdAttribute': userIdAttribute.toString(), + samlIdentifiers: { + $elemMatch: { + externalUserId: externalUserId.toString(), + providerId: providerId.toString(), + userIdAttribute: userIdAttribute.toString(), + }, + }, }).exec() return user diff --git a/services/web/app/src/Features/User/UserAuditLogHandler.js b/services/web/app/src/Features/User/UserAuditLogHandler.js index b1d404303e..87cd810161 100644 --- a/services/web/app/src/Features/User/UserAuditLogHandler.js +++ b/services/web/app/src/Features/User/UserAuditLogHandler.js @@ -8,6 +8,7 @@ function _canHaveNoIpAddressId(operation, info) { if (operation === 'must-reset-password-set') return true if (operation === 'remove-email' && info.script) return true if (operation === 'release-managed-user' && info.script) return true + if (operation === 'unlink-dropbox' && info.batch) return true return false } diff --git a/services/web/app/src/Features/User/UserController.js b/services/web/app/src/Features/User/UserController.js index e4186d39a8..24a2ba9119 100644 --- a/services/web/app/src/Features/User/UserController.js +++ b/services/web/app/src/Features/User/UserController.js @@ -387,6 +387,9 @@ async function updateUserSettings(req, res, next) { if (req.body.mathPreview != null) { user.ace.mathPreview = req.body.mathPreview } + if (req.body.breadcrumbs != null) { + user.ace.breadcrumbs = Boolean(req.body.breadcrumbs) + } if (req.body.referencesSearchMode != null) { const mode = req.body.referencesSearchMode === 'simple' ? 'simple' : 'advanced' @@ -503,6 +506,12 @@ async function expireDeletedUsersAfterDuration(req, res, next) { res.sendStatus(204) } +async function listAllUsers(req, res, next) { + const users = await UserGetter.promises.getAllUsers() + + res.json(users) +} + module.exports = { clearSessions: expressify(clearSessions), changePassword: expressify(changePassword), @@ -515,4 +524,5 @@ module.exports = { expireDeletedUsersAfterDuration: expressify(expireDeletedUsersAfterDuration), ensureAffiliationMiddleware: expressify(ensureAffiliationMiddleware), ensureAffiliation, + listAllUsers: expressify(listAllUsers), } diff --git a/services/web/app/src/Features/User/UserDeleter.js b/services/web/app/src/Features/User/UserDeleter.js index 721943b163..c8d9891bf9 100644 --- a/services/web/app/src/Features/User/UserDeleter.js +++ b/services/web/app/src/Features/User/UserDeleter.js @@ -60,8 +60,12 @@ async function deleteUser(userId, options) { await _createDeletedUser(user, options) logger.info({ userId }, 'deleting user projects') await ProjectDeleter.promises.deleteUsersProjects(user._id) - logger.info({ userId }, 'sending deletion email to user') - await _sendDeleteEmail(user, options.force) + if (options.skipEmail) { + logger.info({ userId }, 'skipping sending deletion email to user') + } else { + logger.info({ userId }, 'sending deletion email to user') + await _sendDeleteEmail(user, options.force) + } logger.info({ userId }, 'deleting user record') await deleteMongoUser(user._id) logger.info({ userId }, 'user deletion complete') @@ -83,17 +87,29 @@ async function deleteMongoUser(userId) { } async function expireDeletedUser(userId) { - await Modules.promises.hooks.fire('expireDeletedUser', userId) - const deletedUser = await DeletedUser.findOne({ - 'deleterData.deletedUserId': userId, - }).exec() - - await Feedback.deleteMany({ userId }).exec() - await OnboardingDataCollectionManager.deleteOnboardingDataCollection(userId) - - deletedUser.user = undefined - deletedUser.deleterData.deleterIpAddress = undefined - await deletedUser.save() + logger.info({ userId }, 'expiring deleted user') + try { + logger.info({ userId }, 'firing expireDeletedUser hook') + await Modules.promises.hooks.fire('expireDeletedUser', userId) + logger.info({ userId }, 'removing deleted user feedback records') + await Feedback.deleteMany({ userId }).exec() + logger.info({ userId }, 'removing deleted user onboarding data') + await OnboardingDataCollectionManager.deleteOnboardingDataCollection(userId) + logger.info({ userId }, 'redacting PII from the deleted user record') + const deletedUser = await DeletedUser.findOne({ + 'deleterData.deletedUserId': userId, + }).exec() + deletedUser.user = undefined + deletedUser.deleterData.deleterIpAddress = undefined + await deletedUser.save() + logger.info({ userId }, 'deleted user expiry complete') + } catch (error) { + logger.warn( + { error, userId }, + 'something went wrong expiring the deleted user' + ) + throw error + } } async function expireDeletedUsersAfterDuration() { @@ -108,11 +124,27 @@ async function expireDeletedUsersAfterDuration() { if (deletedUsers.length === 0) { return } - - for (let i = 0; i < deletedUsers.length; i++) { - const deletedUserId = deletedUsers[i].deleterData.deletedUserId - await expireDeletedUser(deletedUserId) - await UserAuditLogEntry.deleteMany({ userId: deletedUserId }).exec() + logger.info( + { deletedUsers: deletedUsers.length, retentionPeriodInDays: DURATION }, + 'expiring batch of deleted users older than retention period' + ) + try { + for (let i = 0; i < deletedUsers.length; i++) { + const deletedUserId = deletedUsers[i].deleterData.deletedUserId + await expireDeletedUser(deletedUserId) + logger.info({ deletedUserId }, 'removing deleted user audit log entries') + await UserAuditLogEntry.deleteMany({ userId: deletedUserId }).exec() + } + logger.info( + { deletedUsers: deletedUsers.length }, + 'batch of deleted users expired successfully' + ) + } catch (error) { + logger.warn( + { error }, + 'something went wrong expiring batch of deleted users' + ) + throw error } } diff --git a/services/web/app/src/Features/User/UserEmailsController.js b/services/web/app/src/Features/User/UserEmailsController.js index 54ace10cb0..8a7c2bbeb4 100644 --- a/services/web/app/src/Features/User/UserEmailsController.js +++ b/services/web/app/src/Features/User/UserEmailsController.js @@ -530,7 +530,7 @@ async function primaryEmailCheckPage(req, res) { const { variant } = await SplitTestHandler.promises.getAssignment( req, res, - 'auth-pages-bs5' + 'bs5-auth-pages' ) const template = diff --git a/services/web/app/src/Features/User/UserGetter.js b/services/web/app/src/Features/User/UserGetter.js index bce4568880..21f8f13e8f 100644 --- a/services/web/app/src/Features/User/UserGetter.js +++ b/services/web/app/src/Features/User/UserGetter.js @@ -150,6 +150,44 @@ async function getWritefullData(userId) { } } +getTotalProjectStorageForUser = async function (userId) { + const ProjectEntityHandler = require('../Project/ProjectEntityHandler') + const { Project } = require('../../models/Project') + const fs = require('fs') + const path = require('path') + + let totalsize = 0 + // only owned projects, not shared + const ownedProjects = await Project.find( + { owner_ref: userId }, + "_id" + ).exec() + + for (let i = 0; i < ownedProjects.length; i++) { + const project = ownedProjects[i] + const files = await ProjectEntityHandler.promises.getAllFiles(project._id) + + for (const [filePath, file] of Object.entries(files)) { + const f = path.join(settings.filestore.stores.user_files, project._id.toString() + '_' + file._id.toString()) + + const fstat = await fs.promises.stat(f) + const fsize = fstat.size + totalsize += fsize + } + } // foreach Project + return { count: ownedProjects.length, total: totalsize } // bytes +} + +function formatBytes(bytes) { + const units = ['B', 'KB', 'MB', 'GB', 'TB'] + let i = 0 + while (bytes >= 1024 && i < units.length - 1) { + bytes /= 1024 + i++ + } + return `${bytes.toFixed(2)} ${units[i]}` +} + const UserGetter = { getSsoUsersAtInstitution: callbackify(getSsoUsersAtInstitution), @@ -269,6 +307,7 @@ const UserGetter = { getUsers(query, projection, callback) { try { query = normalizeMultiQuery(query) + if (query?._id?.$in?.length === 0) return callback(null, []) // shortcut for getUsers([]) db.users.find(query, { projection }).toArray(callback) } catch (err) { callback(err) @@ -285,6 +324,43 @@ const UserGetter = { }) }, getWritefullData: callbackify(getWritefullData), + + getAllUsers(callback) { + const projection = { + _id: 1, + email: 1, + first_name: 1, + last_name: 1, + lastLoggedIn: 1, + signUpDate: 1, + loginCount: 1, + isAdmin: 1, + suspended: 1, + institution: 1, + } + + const query = { $or: [{ 'emails.email': { $exists: true } },], } + + db.users.find(query, {projection: projection}).toArray(async (err, users) => { + if (err) { + console.error('Error fetching users:', err) + return callback(err) + } + for (let i = 0; i < users.length; i++) { + const user = users[i] + user.signUpDateformatted = moment(user.signUpDate).format('DD/MM/YYYY') + user.lastLoggedInformatted = moment(user.lastLoggedIn).format('DD/MM/YYYY') + const ProjectsInfo = await getTotalProjectStorageForUser(user._id) + + user.projectsSize = ProjectsInfo.total + user.projectsSizeFormatted = formatBytes(ProjectsInfo.total) + user.projectsCount = ProjectsInfo.count + } + + callback(null, users) + }) + + } } const decorateFullEmails = ( diff --git a/services/web/app/src/Features/User/UserPagesController.mjs b/services/web/app/src/Features/User/UserPagesController.mjs index 6f7bb7802d..d353ca88e3 100644 --- a/services/web/app/src/Features/User/UserPagesController.mjs +++ b/services/web/app/src/Features/User/UserPagesController.mjs @@ -176,6 +176,7 @@ async function settingsPage(req, res) { gitBridgeEnabled: Settings.enableGitBridge, isSaas: Features.hasFeature('saas'), memberOfSSOEnabledGroups, + capabilities: [...req.capabilitySet], }) } @@ -195,7 +196,7 @@ async function reconfirmAccountPage(req, res) { const { variant } = await SplitTestHandler.promises.getAssignment( req, res, - 'auth-pages-bs5' + 'bs5-auth-pages' ) const template = diff --git a/services/web/app/src/Features/User/UserUpdater.js b/services/web/app/src/Features/User/UserUpdater.js index 627e73875d..f21ee9a1ed 100644 --- a/services/web/app/src/Features/User/UserUpdater.js +++ b/services/web/app/src/Features/User/UserUpdater.js @@ -11,7 +11,6 @@ const EmailHandler = require('../Email/EmailHandler') const EmailHelper = require('../Helpers/EmailHelper') const Errors = require('../Errors/Errors') const NewsletterManager = require('../Newsletter/NewsletterManager') -const RecurlyWrapper = require('../Subscription/RecurlyWrapper') const UserAuditLogHandler = require('./UserAuditLogHandler') const AnalyticsManager = require('../Analytics/AnalyticsManager') const SubscriptionLocator = require('../Subscription/SubscriptionLocator') @@ -252,7 +251,11 @@ async function setDefaultEmailAddress( } try { - await RecurlyWrapper.promises.updateAccountEmailAddress(user._id, email) + await Modules.promises.hooks.fire( + 'updateAccountEmailAddress', + user._id, + email + ) } catch (error) { // errors are ignored } diff --git a/services/web/app/src/Features/UserMembership/UserMembershipController.mjs b/services/web/app/src/Features/UserMembership/UserMembershipController.mjs index aaa8fa5812..4be1221255 100644 --- a/services/web/app/src/Features/UserMembership/UserMembershipController.mjs +++ b/services/web/app/src/Features/UserMembership/UserMembershipController.mjs @@ -31,8 +31,11 @@ async function manageGroupMembers(req, res, next) { ) const ssoConfig = await SSOConfig.findById(subscription.ssoConfig).exec() const plan = PlansLocator.findLocalPlanInSettings(subscription.planCode) - const userId = SessionManager.getLoggedInUserId(req.session) + const userId = SessionManager.getLoggedInUserId(req.session)?.toString() const isAdmin = subscription.admin_id.toString() === userId + const isUserGroupManager = + Boolean(subscription.manager_ids?.some(id => id.toString() === userId)) && + !isAdmin const recurlySubscription = subscription.recurlySubscription_id ? await RecurlyClient.promises.getSubscription( subscription.recurlySubscription_id @@ -51,6 +54,7 @@ async function manageGroupMembers(req, res, next) { users, groupSize: subscription.membersLimit, managedUsersActive: subscription.managedUsersEnabled, + isUserGroupManager, groupSSOActive: ssoConfig?.enabled, canUseFlexibleLicensing: plan?.canUseFlexibleLicensing, canUseAddSeatsFeature, diff --git a/services/web/app/src/infrastructure/ExpressLocals.js b/services/web/app/src/infrastructure/ExpressLocals.js index eae1b48219..34eda0ba2d 100644 --- a/services/web/app/src/infrastructure/ExpressLocals.js +++ b/services/web/app/src/infrastructure/ExpressLocals.js @@ -19,6 +19,7 @@ const { const { addOptionalCleanupHandlerAfterDrainingConnections, } = require('./GracefulShutdown') +const { sanitizeSessionUserForFrontEnd } = require('./FrontEndUser') const IEEE_BRAND_ID = Settings.ieeeBrandId @@ -300,11 +301,7 @@ module.exports = function (webRouter, privateApiRouter, publicApiRouter) { webRouter.use(function (req, res, next) { const currentUser = SessionManager.getSessionUser(req.session) if (currentUser != null) { - res.locals.user = { - email: currentUser.email, - first_name: currentUser.first_name, - last_name: currentUser.last_name, - } + res.locals.user = sanitizeSessionUserForFrontEnd(currentUser) } next() }) diff --git a/services/web/app/src/infrastructure/FrontEndUser.js b/services/web/app/src/infrastructure/FrontEndUser.js new file mode 100644 index 0000000000..5a4af9868c --- /dev/null +++ b/services/web/app/src/infrastructure/FrontEndUser.js @@ -0,0 +1,15 @@ +function sanitizeSessionUserForFrontEnd(sessionUser) { + if (sessionUser != null) { + return { + email: sessionUser.email, + first_name: sessionUser.first_name, + last_name: sessionUser.last_name, + } + } + + return null +} + +module.exports = { + sanitizeSessionUserForFrontEnd, +} diff --git a/services/web/app/src/infrastructure/mongodb.js b/services/web/app/src/infrastructure/mongodb.js index 7fc1039140..24103b2d82 100644 --- a/services/web/app/src/infrastructure/mongodb.js +++ b/services/web/app/src/infrastructure/mongodb.js @@ -33,7 +33,6 @@ addConnectionDrainer('mongodb', async () => { const internalDb = mongoClient.db() const db = { contacts: internalDb.collection('contacts'), - deletedFiles: internalDb.collection('deletedFiles'), deletedProjects: internalDb.collection('deletedProjects'), deletedSubscriptions: internalDb.collection('deletedSubscriptions'), deletedUsers: internalDb.collection('deletedUsers'), @@ -62,7 +61,6 @@ const db = { projectHistoryFailures: internalDb.collection('projectHistoryFailures'), projectHistoryGlobalBlobs: internalDb.collection('projectHistoryGlobalBlobs'), projectHistoryLabels: internalDb.collection('projectHistoryLabels'), - projectHistoryMetaData: internalDb.collection('projectHistoryMetaData'), projectHistorySyncState: internalDb.collection('projectHistorySyncState'), projectInvites: internalDb.collection('projectInvites'), projects: internalDb.collection('projects'), @@ -131,10 +129,15 @@ async function getCollectionInternal(name) { return internalDb.collection(name) } +async function waitForDb() { + await connectionPromise +} + module.exports = { db, ObjectId, connectionPromise, + waitForDb, getCollectionNames, getCollectionInternal, cleanupTestDatabase, diff --git a/services/web/app/src/models/DeletedFile.js b/services/web/app/src/models/DeletedFile.js deleted file mode 100644 index 45d30d8099..0000000000 --- a/services/web/app/src/models/DeletedFile.js +++ /dev/null @@ -1,21 +0,0 @@ -const mongoose = require('../infrastructure/Mongoose') -const { Schema } = mongoose - -const DeletedFileSchema = new Schema( - { - name: String, - projectId: Schema.ObjectId, - created: { - type: Date, - }, - linkedFileData: { type: Schema.Types.Mixed }, - hash: { - type: String, - }, - deletedAt: { type: Date }, - }, - { collection: 'deletedFiles', minimize: false } -) - -exports.DeletedFile = mongoose.model('DeletedFile', DeletedFileSchema) -exports.DeletedFileSchema = DeletedFileSchema diff --git a/services/web/app/src/models/GroupPolicy.js b/services/web/app/src/models/GroupPolicy.js index e975834008..55728a2415 100644 --- a/services/web/app/src/models/GroupPolicy.js +++ b/services/web/app/src/models/GroupPolicy.js @@ -27,6 +27,9 @@ const GroupPolicySchema = new Schema( // User can't use the chat feature userCannotUseChat: Boolean, + + // User can't use the Dropbox feature + userCannotUseDropbox: Boolean, }, { minimize: false } ) diff --git a/services/web/app/src/models/Project.js b/services/web/app/src/models/Project.js index 145c8f9023..69db145038 100644 --- a/services/web/app/src/models/Project.js +++ b/services/web/app/src/models/Project.js @@ -12,18 +12,6 @@ const DeletedDocSchema = new Schema({ deletedAt: { type: Date }, }) -const DeletedFileSchema = new Schema({ - name: String, - created: { - type: Date, - }, - linkedFileData: { type: Schema.Types.Mixed }, - hash: { - type: String, - }, - deletedAt: { type: Date }, -}) - const ProjectSchema = new Schema( { name: { type: String, default: 'new project' }, @@ -54,7 +42,6 @@ const ProjectSchema = new Schema( archived: { type: Schema.Types.Mixed }, trashed: [{ type: ObjectId, ref: 'User' }], deletedDocs: [DeletedDocSchema], - deletedFiles: [DeletedFileSchema], imageName: { type: String }, brandVariationId: { type: String }, track_changes: { type: Object }, diff --git a/services/web/app/src/models/SSOConfig.js b/services/web/app/src/models/SSOConfig.js index 5d50d51d02..6734b29f57 100644 --- a/services/web/app/src/models/SSOConfig.js +++ b/services/web/app/src/models/SSOConfig.js @@ -10,6 +10,7 @@ const SSOConfigSchema = new Schema( userLastNameAttribute: { type: String }, validated: { type: Boolean, default: false }, enabled: { type: Boolean, default: false }, + useSettingsUKAMF: { type: Boolean, default: false }, }, { diff --git a/services/web/app/src/models/Subscription.js b/services/web/app/src/models/Subscription.js index 92a7739515..4a5fed6f1f 100644 --- a/services/web/app/src/models/Subscription.js +++ b/services/web/app/src/models/Subscription.js @@ -25,6 +25,13 @@ const SubscriptionSchema = new Schema( invited_emails: [String], teamInvites: [TeamInviteSchema], recurlySubscription_id: String, + lastSuccesfulSubscription: { + planCode: { + type: String, + }, + addOns: Schema.Types.Mixed, + }, + timesRevertedDueToFailedPayment: { type: Number, default: 0 }, teamName: { type: String }, teamNotice: { type: String }, planCode: { type: String }, diff --git a/services/web/app/src/models/User.js b/services/web/app/src/models/User.js index d228c46b82..c1701023c4 100644 --- a/services/web/app/src/models/User.js +++ b/services/web/app/src/models/User.js @@ -97,6 +97,7 @@ const UserSchema = new Schema( fontFamily: { type: String }, lineHeight: { type: String }, mathPreview: { type: Boolean, default: true }, + breadcrumbs: { type: Boolean, default: true }, referencesSearchMode: { type: String, default: 'advanced' }, // 'advanced' or 'simple' enableNewEditor: { type: Boolean }, }, diff --git a/services/web/app/src/router.mjs b/services/web/app/src/router.mjs index f87297c35c..7851a4a66f 100644 --- a/services/web/app/src/router.mjs +++ b/services/web/app/src/router.mjs @@ -182,7 +182,7 @@ const rateLimiters = { duration: 60, }), sendConfirmation: new RateLimiter('send-confirmation', { - points: 1, + points: 2, duration: 60, }), sendChatMessage: new RateLimiter('send-chat-message', { @@ -915,6 +915,12 @@ async function initialize(webRouter, privateApiRouter, publicApiRouter) { NotificationsController.markNotificationAsRead ) + webRouter.get( + '/user/notification/:notificationId', + AuthenticationController.requireLogin(), + NotificationsController.getNotification + ) + // Deprecated in favour of /internal/project/:project_id but still used by versioning privateApiRouter.get( '/project/:project_id/details', diff --git a/services/web/app/views/_cookie_banner.pug b/services/web/app/views/_cookie_banner.pug index a164e48e83..2d5631f9c8 100644 --- a/services/web/app/views/_cookie_banner.pug +++ b/services/web/app/views/_cookie_banner.pug @@ -1,5 +1,5 @@ -.cookie-banner.hidden-print.hidden +section.cookie-banner.hidden-print.hidden(aria-label="Cookie banner") .cookie-banner-content We only use cookies for essential purposes and to improve your experience on our site. You can find out more in our cookie policy. .cookie-banner-actions button(type="button" class="btn btn-link btn-sm" data-ol-cookie-banner-set-consent="essential") Essential cookies only - button(type="button" class="btn btn-primary btn-sm" data-ol-cookie-banner-set-consent="all") Accept all cookies \ No newline at end of file + button(type="button" class="btn btn-primary btn-sm" data-ol-cookie-banner-set-consent="all") Accept all cookies diff --git a/services/web/app/views/_customer_io.pug b/services/web/app/views/_customer_io.pug index 81d75f7d7f..781dfaab13 100644 --- a/services/web/app/views/_customer_io.pug +++ b/services/web/app/views/_customer_io.pug @@ -1,10 +1,12 @@ if(customerIoEnabled && ExposedSettings.cioWriteKey && ExposedSettings.cioSiteId) - script(type="text/javascript", id="cio-loader", nonce=scriptNonce, data-cio-write-key=ExposedSettings.cioWriteKey, data-cio-site-id=ExposedSettings.cioSiteId, data-session-analytics-id=getSessionAnalyticsId(), data-user-id=getLoggedInUserId()). + script(type="text/javascript", id="cio-loader", nonce=scriptNonce, data-best-subscription=(usersBestSubscription && usersBestSubscription.type), data-cio-write-key=ExposedSettings.cioWriteKey, data-cio-site-id=ExposedSettings.cioSiteId, data-session-analytics-id=getSessionAnalyticsId(), data-user-id=getLoggedInUserId()). var cioSettings = document.querySelector('#cio-loader').dataset; var analyticsId = cioSettings.sessionAnalyticsId; var siteId = cioSettings.cioSiteId; var writeKey = cioSettings.cioWriteKey; var userId = cioSettings.userId; + var usersBestSubscription = cioSettings.bestSubscription + !function(){var i="cioanalytics", analytics=(window[i]=window[i]||[]);if(!analytics.initialize)if(analytics.invoked)window.console&&console.error&&console.error("Snippet included twice.");else{analytics.invoked=!0;analytics.methods=["trackSubmit","trackClick","trackLink","trackForm","pageview","identify","reset","group","track","ready","alias","debug","page","once","off","on","addSourceMiddleware","addIntegrationMiddleware","setAnonymousId","addDestinationMiddleware"];analytics.factory=function(e){return function(){var t=Array.prototype.slice.call(arguments);t.unshift(e);analytics.push(t);return analytics}};for(var e=0;e 0) - p.thanks The Overleaf Bonus Program has been discontinued, but you'll continue to have access to the features you already earned. - else - p.thanks The Overleaf Bonus Program has been discontinued. - p.thanks Please contact us if you have any questions. - - if (refered_user_count > 0) - .row.ab-bonus - .col-md-10.col-md-offset-1.bonus-banner(style="position: relative; height: 30px; margin-top: 20px;") - - for (var i = 0; i <= 10; i++) { - if (refered_user_count == i) - .number(style="left: "+i+"0%").active #{i} - else - .number(style="left: "+i+"0%") #{i} - - } - - .row.ab-bonus - .col-md-10.col-md-offset-1.bonus-banner - .progress - .progress-bar.progress-bar-info(style="width: "+refered_user_count+"0%") - - .row.ab-bonus - .col-md-10.col-md-offset-1.bonus-banner(style="position: relative; height: 110px;") - .perk(style="left: 10%;", class = refered_user_count >= 1 ? "active" : "") #{translate("one_free_collab")} - .perk(style="left: 30%;", class = refered_user_count >= 3 ? "active" : "") #{translate("three_free_collab")} - .perk(style="left: 60%;", class = refered_user_count >= 6 ? "active" : "") #{translate("free_dropbox_and_history")} + #{translate("three_free_collab")} - .perk(style="left: 90%;", class = refered_user_count >= 9 ? "active" : "") #{translate("free_dropbox_and_history")} + #{translate("unlimited_collabs")} - .row   - - .row.ab-bonus - .col-md-10.col-md-offset-1.bonus-banner.bonus-status - if (refered_user_count == 1) - p.thanks You’ve introduced 1 person to #{settings.appName}. + .card-body + .container-fluid + .row + .col-lg-10.offset-lg-1 + if (refered_user_count > 0) + p.thanks The Overleaf Bonus Program has been discontinued, but you'll continue to have access to the features you already earned. else - p.thanks You’ve introduced #{refered_user_count} people to #{settings.appName}. + p.thanks The Overleaf Bonus Program has been discontinued. + p.thanks Please contact us if you have any questions. + + if (refered_user_count > 0) + .row.ab-bonus + .col-lg-10.offset-lg-1(style="position: relative; height: 30px; margin-top: 20px;") + - for (var i = 0; i <= 10; i++) { + if (refered_user_count == i) + .number(style="left: "+i+"0%").active #{i} + else + .number(style="left: "+i+"0%") #{i} + - } + + .row.ab-bonus + .col-lg-10.offset-lg-1 + .progress + .progress-bar.progress-bar-info(style="width: "+refered_user_count+"0%") + + .row.ab-bonus + .col-lg-10.offset-lg-1(style="position: relative; height: 110px;") + .perk(style="left: 10%;", class = refered_user_count >= 1 ? "active" : "") #{translate("one_free_collab")} + .perk(style="left: 30%;", class = refered_user_count >= 3 ? "active" : "") #{translate("three_free_collab")} + .perk(style="left: 60%;", class = refered_user_count >= 6 ? "active" : "") #{translate("free_dropbox_and_history")} + #{translate("three_free_collab")} + .perk(style="left: 90%;", class = refered_user_count >= 9 ? "active" : "") #{translate("free_dropbox_and_history")} + #{translate("unlimited_collabs")} + .row   + + .row.ab-bonus + .col-lg-10.offset-lg-1.bonus-status + if (refered_user_count == 1) + p.thanks You’ve introduced 1 person to #{settings.appName}. + else + p.thanks You’ve introduced #{refered_user_count} people to #{settings.appName}. diff --git a/services/web/app/views/subscriptions/add-seats.pug b/services/web/app/views/subscriptions/add-seats.pug index 697a554c97..bcbf5be666 100644 --- a/services/web/app/views/subscriptions/add-seats.pug +++ b/services/web/app/views/subscriptions/add-seats.pug @@ -1,9 +1,10 @@ -extends ../layout-marketing +extends ../layout-react block entrypointVar - entrypoint = 'pages/user/subscription/group-management/add-seats' block append meta + meta(name="ol-user" data-type="json" content=user) meta(name="ol-groupName", data-type="string", content=groupName) meta(name="ol-subscriptionId", data-type="string", content=subscriptionId) meta(name="ol-totalLicenses", data-type="number", content=totalLicenses) diff --git a/services/web/app/views/subscriptions/dashboard-react.pug b/services/web/app/views/subscriptions/dashboard-react.pug index d6a1bff49c..2b6251f2a3 100644 --- a/services/web/app/views/subscriptions/dashboard-react.pug +++ b/services/web/app/views/subscriptions/dashboard-react.pug @@ -27,6 +27,7 @@ block append meta meta(name="ol-user" data-type="json" content=user) if (personalSubscription && personalSubscription.payment) meta(name="ol-recurlyApiKey" content=settings.apis.recurly.publicKey) + meta(name="ol-stripeUKApiKey" content=settings.apis.stripeUK.publishableKey) meta(name="ol-recommendedCurrency" content=personalSubscription.payment.currency) meta(name="ol-groupPlans" data-type="json" content=groupPlans) diff --git a/services/web/app/views/subscriptions/manually-collected-subscription.pug b/services/web/app/views/subscriptions/manually-collected-subscription.pug index 1555ac2ea1..ba6bf73473 100644 --- a/services/web/app/views/subscriptions/manually-collected-subscription.pug +++ b/services/web/app/views/subscriptions/manually-collected-subscription.pug @@ -1,9 +1,10 @@ -extends ../layout-marketing +extends ../layout-react block entrypointVar - entrypoint = 'pages/user/subscription/group-management/manually-collected-subscription' block append meta + meta(name="ol-user" data-type="json" content=user) meta(name="ol-groupName", data-type="string", content=groupName) block content diff --git a/services/web/app/views/subscriptions/missing-billing-information.pug b/services/web/app/views/subscriptions/missing-billing-information.pug index 67d13f8e89..416bac65f5 100644 --- a/services/web/app/views/subscriptions/missing-billing-information.pug +++ b/services/web/app/views/subscriptions/missing-billing-information.pug @@ -1,9 +1,10 @@ -extends ../layout-marketing +extends ../layout-react block entrypointVar - entrypoint = 'pages/user/subscription/group-management/missing-billing-information' block append meta + meta(name="ol-user" data-type="json" content=user) meta(name="ol-groupName", data-type="string", content=groupName) block content diff --git a/services/web/app/views/subscriptions/plans/_faq_new.pug b/services/web/app/views/subscriptions/plans/_faq_new.pug index baefb6ed3f..3c926fb22d 100644 --- a/services/web/app/views/subscriptions/plans/_faq_new.pug +++ b/services/web/app/views/subscriptions/plans/_faq_new.pug @@ -1,5 +1,6 @@ include ./_plans_faq_tabs include ../../_mixins/eyebrow +include ../../_mixins/material_symbol - var managingYourSubscription = 'managingYourSubscription' - var overleafIndividualPlans = 'overleafIndividualPlans' @@ -81,6 +82,10 @@ include ../../_mixins/eyebrow .row .col-xs-12.plans-faq-support span #{translate('still_have_questions')} - button(data-ol-open-contact-form-modal="general") + button( + data-ol-open-contact-form-modal="general" + data-bs-toggle=bootstrapVersion === 5 ? "modal" : undefined + data-bs-target=bootstrapVersion === 5 ? "#contactUsModal" : undefined + ) span(style="margin-right: 4px") #{translate('contact_support')} - i.icon-md.material-symbols.material-symbols-rounded.material-symbols-arrow-right(aria-hidden="true") arrow_right_alt + +material-symbol-rounded("arrow_right_alt", "icon-md") diff --git a/services/web/app/views/subscriptions/plans/_plans_faq_tabs.pug b/services/web/app/views/subscriptions/plans/_plans_faq_tabs.pug index f312ebeb46..a598f4774c 100644 --- a/services/web/app/views/subscriptions/plans/_plans_faq_tabs.pug +++ b/services/web/app/views/subscriptions/plans/_plans_faq_tabs.pug @@ -1,5 +1,6 @@ //- If the `plans-page-bs5` split test has been completed, remove the `data-toggle` and `data-target` because it is not needed anymore (bs5 uses `data-bs-toggle` and `data-bs-target`) - +include ../../_mixins/material_symbol + mixin managingYourSubscription() .ol-accordions-container .custom-accordion-item @@ -14,7 +15,7 @@ mixin managingYourSubscription() ) | Can I change plans or cancel later? span.custom-accordion-icon - i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down + +material-symbol-outlined("keyboard_arrow_down") .collapse(id="managingYourSubscriptionQ1") .custom-accordion-body span Yes, you can do this at any time by going to @@ -32,7 +33,7 @@ mixin managingYourSubscription() ) | If I change or cancel my Overleaf plan, will I lose my projects? span.custom-accordion-icon - i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down + +material-symbol-outlined("keyboard_arrow_down") .collapse(id="managingYourSubscriptionQ2") .custom-accordion-body | No. Changing or canceling your plan won’t affect your projects, the only change will be to the features available to you. You can see which features are available only on paid plans in the comparison table. @@ -48,7 +49,7 @@ mixin managingYourSubscription() ) | Can I pay by invoice or purchase order? span.custom-accordion-icon - i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down + +material-symbol-outlined("keyboard_arrow_down") .collapse(id="managingYourSubscriptionQ3") .custom-accordion-body | This is possible when you’re purchasing a group subscription for five or more people, or a site license. For individual subscriptions, we can only accept payment online via credit card, debit card, or PayPal. @@ -64,7 +65,7 @@ mixin managingYourSubscription() ) | How do I view/update the credit card being charged for my subscription? span.custom-accordion-icon - i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down + +material-symbol-outlined("keyboard_arrow_down") .collapse(id="managingYourSubscriptionQ4") .custom-accordion-body | You can view and update the card on file by going to Account > @@ -96,7 +97,7 @@ mixin overleafIndividualPlans() ) | How does the free trial work? span.custom-accordion-icon - i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down + +material-symbol-outlined("keyboard_arrow_down") .collapse(id="overleafIndividualPlansQ1") .custom-accordion-body span You get full access to your chosen plan during your 7-day free trial, and there’s no obligation to continue beyond the trial. Your card will be charged at the end of your trial unless you cancel before then. To cancel, go to @@ -124,7 +125,7 @@ mixin overleafIndividualPlans() ) | What’s a collaborator on an Overleaf individual subscription? span.custom-accordion-icon - i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down + +material-symbol-outlined("keyboard_arrow_down") .collapse(id="overleafIndividualPlansQ2") .custom-accordion-body | A collaborator is someone you invite to work with you on a project. So, for example, on our Standard plan you can have up to 10 people collaborating with you on any given project. @@ -141,7 +142,7 @@ mixin overleafIndividualPlans() ) | The individual Standard plan has 10 project collaborators, does it mean that 10 people will be upgraded? span.custom-accordion-icon - i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down + +material-symbol-outlined("keyboard_arrow_down") .collapse(id="overleafIndividualPlansQ3") .custom-accordion-body span No. Only the subscriber’s account will be upgraded. An individual Standard subscription allows you to invite 10 people per project to edit the project with you. Your collaborators can access features such as the full document history and extended compile time, but @@ -159,7 +160,7 @@ mixin overleafIndividualPlans() ) | Do collaborators also have access to the editing and collaboration features I’ve paid for? span.custom-accordion-icon - i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down + +material-symbol-outlined("keyboard_arrow_down") .collapse(id="overleafIndividualPlansQ4") .custom-accordion-body span If you have an Overleaf subscription, then your project collaborators will have access to features like real-time track changes and document history, but @@ -177,7 +178,7 @@ mixin overleafIndividualPlans() ) | Can I purchase an individual plan on behalf of someone else? span.custom-accordion-icon - i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down + +material-symbol-outlined("keyboard_arrow_down") .collapse(id="overleafIndividualPlansQ5") .custom-accordion-body | Individual subscriptions must be purchased by the account that will be the end user. If you want to purchase a plan for someone else, you’ll need to provide them with relevant payment details to enable them to make the purchase. @@ -193,7 +194,7 @@ mixin overleafIndividualPlans() ) | Who is eligible for the Student plan? span.custom-accordion-icon - i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down + +material-symbol-outlined("keyboard_arrow_down") .collapse(id="overleafIndividualPlansQ6") .custom-accordion-body | As the name suggests, the Student plan is only for students at educational institutions. This includes graduate students. @@ -209,7 +210,7 @@ mixin overleafIndividualPlans() ) | Can I transfer an individual subscription to someone else? span.custom-accordion-icon - i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down + +material-symbol-outlined("keyboard_arrow_down") .collapse(id="overleafIndividualPlansQ7") .custom-accordion-body | No. Individual plans can’t be transferred. @@ -232,7 +233,7 @@ mixin overleafGroupPlans() ) | What’s the difference between users and collaborators on an Overleaf group subscription? span.custom-accordion-icon - i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down + +material-symbol-outlined("keyboard_arrow_down") .collapse(id="overleafGroupPlansQ1") .custom-accordion-body div On any of our group plans, the number of users refers to the number of people you can invite to join your group. All of these people will have access to the plan’s paid-for features across all their projects, such as real-time track changes and document history. @@ -249,7 +250,7 @@ mixin overleafGroupPlans() ) | What is the benefit of purchasing an Overleaf Group plan? span.custom-accordion-icon - i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down + +material-symbol-outlined("keyboard_arrow_down") .collapse(id="overleafGroupPlansQ2") .custom-accordion-body | Our Group subscriptions allow you to purchase access to our premium features for multiple people. They’re easy to manage, help save on paperwork, and allow groups of 5 or more to purchase via purchase order (PO). We also offer discounts on purchases of Group subscriptions for more than 20 users; just get in touch with our @@ -275,7 +276,7 @@ mixin overleafGroupPlans() ) | Who is eligible for the educational discount? span.custom-accordion-icon - i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down + +material-symbol-outlined("keyboard_arrow_down") .collapse(id="overleafGroupPlansQ3") .custom-accordion-body | The educational discount for group subscriptions is for students or faculty who are using Overleaf primarily for teaching. @@ -291,7 +292,7 @@ mixin overleafGroupPlans() ) | How do I add more licenses to my group subscription, and what will it cost? span.custom-accordion-icon - i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down + +material-symbol-outlined("keyboard_arrow_down") .collapse(id="overleafGroupPlansQ4") .custom-accordion-body div @@ -340,7 +341,7 @@ mixin overleafGroupPlans() ) | How do I upgrade my plan from Group Standard to Group Professional? span.custom-accordion-icon - i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down + +material-symbol-outlined("keyboard_arrow_down") .collapse(id="overleafGroupPlansQ5") .custom-accordion-body | You can upgrade your plan from Group Standard to Group Professional on the diff --git a/services/web/app/views/subscriptions/preview-change.pug b/services/web/app/views/subscriptions/preview-change.pug index 663bbe30d2..5330eb8684 100644 --- a/services/web/app/views/subscriptions/preview-change.pug +++ b/services/web/app/views/subscriptions/preview-change.pug @@ -1,9 +1,10 @@ -extends ../layout-marketing +extends ../layout-react block entrypointVar - entrypoint = 'pages/user/subscription/preview-change' block append meta + meta(name="ol-user" data-type="json" content=user) meta(name="ol-subscriptionChangePreview" data-type="json" content=changePreview) meta(name="ol-purchaseReferrer" data-type="string" content=purchaseReferrer) diff --git a/services/web/app/views/subscriptions/subtotal-limit-exceeded.pug b/services/web/app/views/subscriptions/subtotal-limit-exceeded.pug index 15f79488fa..4457383e93 100644 --- a/services/web/app/views/subscriptions/subtotal-limit-exceeded.pug +++ b/services/web/app/views/subscriptions/subtotal-limit-exceeded.pug @@ -1,9 +1,10 @@ -extends ../layout-marketing +extends ../layout-react block entrypointVar - entrypoint = 'pages/user/subscription/group-management/subtotal-limit-exceeded' block append meta + meta(name="ol-user" data-type="json" content=user) meta(name="ol-groupName", data-type="string", content=groupName) block content diff --git a/services/web/app/views/subscriptions/team/invite-managed.pug b/services/web/app/views/subscriptions/team/invite-managed.pug index f59b8b4937..d31f12656b 100644 --- a/services/web/app/views/subscriptions/team/invite-managed.pug +++ b/services/web/app/views/subscriptions/team/invite-managed.pug @@ -13,6 +13,7 @@ block append meta meta(name="ol-groupSSOActive" data-type="boolean" content=groupSSOActive) meta(name="ol-subscriptionId" data-type="string" content=subscriptionId) meta(name="ol-user" data-type="json" content=user) + meta(name="ol-usersSubscription" data-type="json" content=usersSubscription) block content main.content.content-alt.team-invite#invite-managed-root diff --git a/services/web/app/views/subscriptions/team/invite.pug b/services/web/app/views/subscriptions/team/invite.pug index dc1b509cbf..1b2ecb4646 100644 --- a/services/web/app/views/subscriptions/team/invite.pug +++ b/services/web/app/views/subscriptions/team/invite.pug @@ -4,7 +4,7 @@ block entrypointVar - entrypoint = 'pages/user/subscription/invite' block append meta - meta(name="ol-hasIndividualRecurlySubscription" data-type="boolean" content=hasIndividualRecurlySubscription) + meta(name="ol-hasIndividualPaidSubscription" data-type="boolean" content=hasIndividualPaidSubscription) meta(name="ol-inviterName" data-type="string" content=inviterName) meta(name="ol-inviteToken" data-type="string" content=inviteToken) meta(name="ol-currentManagedUserAdminEmail" data-type="string" content=currentManagedUserAdminEmail) diff --git a/services/web/app/views/subscriptions/team/invite_logged_out.pug b/services/web/app/views/subscriptions/team/invite_logged_out.pug index d07fa5368c..e5930aba4f 100644 --- a/services/web/app/views/subscriptions/team/invite_logged_out.pug +++ b/services/web/app/views/subscriptions/team/invite_logged_out.pug @@ -1,4 +1,4 @@ -extends ../../layout-react +extends ../../layout-marketing block append meta meta(name="ol-user" data-type="json" content=user) @@ -14,7 +14,7 @@ block content .card-body .page-header // TODO: Remove `team-invite-name` once we fully migrated to Bootstrap 5 - h1.text-centered !{translate("invited_to_group", {inviterName: inviterName, appName: appName }, [{name: 'span', attrs: {class: 'team-invite-name'}}])} + h1.text-center !{translate("invited_to_group", {inviterName: inviterName, appName: appName }, [{name: 'span', attrs: {class: 'team-invite-name'}}])} if (accountExists) div diff --git a/services/web/app/views/subscriptions/upgrade-group-subscription-react.pug b/services/web/app/views/subscriptions/upgrade-group-subscription-react.pug index c482629463..4347a2a633 100644 --- a/services/web/app/views/subscriptions/upgrade-group-subscription-react.pug +++ b/services/web/app/views/subscriptions/upgrade-group-subscription-react.pug @@ -1,9 +1,10 @@ -extends ../layout-marketing +extends ../layout-react block entrypointVar - entrypoint = 'pages/user/subscription/group-management/upgrade-group-subscription' block append meta + meta(name="ol-user" data-type="json" content=user) meta(name="ol-subscriptionChangePreview" data-type="json" content=changePreview) meta(name="ol-totalLicenses", data-type="number", content=totalLicenses) meta(name="ol-groupName", data-type="string", content=groupName) diff --git a/services/web/app/views/user/accountSuspended.pug b/services/web/app/views/user/accountSuspended.pug index da57f4d9ff..7231713416 100644 --- a/services/web/app/views/user/accountSuspended.pug +++ b/services/web/app/views/user/accountSuspended.pug @@ -4,12 +4,12 @@ block vars - var suppressNavbar = true - var suppressFooter = true - metadata.robotsNoindexNofollow = true - - bootstrap5PageStatus = 'disabled' block content main.content.content-alt#main-content .container-custom-sm.mx-auto .card - h3 #{translate('your_account_is_suspended')} - p #{translate('sorry_this_account_has_been_suspended')} - p !{translate('please_contact_us_if_you_think_this_is_in_error', {}, [{name: 'a', attrs: {href: `mailto:${settings.adminEmail}`}}])} + .card-body + h3 #{translate('your_account_is_suspended')} + p #{translate('sorry_this_account_has_been_suspended')} + p !{translate('please_contact_us_if_you_think_this_is_in_error', {}, [{name: 'a', attrs: {href: `mailto:${settings.adminEmail}`}}])} diff --git a/services/web/app/views/user/compromised_password.pug b/services/web/app/views/user/compromised_password.pug index e56ffd9841..c66a07415a 100644 --- a/services/web/app/views/user/compromised_password.pug +++ b/services/web/app/views/user/compromised_password.pug @@ -1,4 +1,4 @@ -extends ../layout-marketing +extends ../layout-react block vars - var suppressNavbar = true diff --git a/services/web/app/views/user/confirmSecondaryEmail.pug b/services/web/app/views/user/confirmSecondaryEmail.pug index 4d0c59e9db..181e58e4ce 100644 --- a/services/web/app/views/user/confirmSecondaryEmail.pug +++ b/services/web/app/views/user/confirmSecondaryEmail.pug @@ -1,4 +1,4 @@ -extends ../layout-marketing +extends ../layout-react block vars - var suppressNavbar = true diff --git a/services/web/app/views/user/confirm_email.pug b/services/web/app/views/user/confirm_email.pug index 37c04880b1..13e911f386 100644 --- a/services/web/app/views/user/confirm_email.pug +++ b/services/web/app/views/user/confirm_email.pug @@ -1,60 +1,57 @@ extends ../layout-marketing - -block vars - - bootstrap5PageStatus = 'disabled' +include ../_mixins/notification block content main.content.content-alt#main-content .container .row - .col-md-8.col-md-offset-2.col-lg-6.col-lg-offset-3 + .col-lg-8.offset-lg-2.col-xl-6.offset-xl-3 .card - .page-header(data-ol-hide-on-error-message="confirm-email-wrong-user") - h1 #{translate("confirm_email")} - form( - method="POST" - action="/logout" - id="logoutForm" - ) - input(type="hidden", name="_csrf", value=csrfToken) - input(type="hidden", name="redirect", value=currentUrlWithQueryParams) - form( - data-ol-async-form, - data-ol-auto-submit, - name="confirmEmailForm" - action="/user/emails/confirm", - method="POST", - id="confirmEmailForm", - ) - input(type="hidden", name="_csrf", value=csrfToken) - input(type="hidden", name="token", value=token) + .card-body + .page-header(data-ol-hide-on-error-message="confirm-email-wrong-user") + h1 #{translate("confirm_email")} + form( + method="POST" + action="/logout" + id="logoutForm" + ) + input(type="hidden", name="_csrf", value=csrfToken) + input(type="hidden", name="redirect", value=currentUrlWithQueryParams) + form( + data-ol-async-form, + data-ol-auto-submit, + name="confirmEmailForm" + action="/user/emails/confirm", + method="POST", + id="confirmEmailForm", + ) + input(type="hidden", name="_csrf", value=csrfToken) + input(type="hidden", name="token", value=token) + + div(data-ol-not-sent) + +formMessages() + div(data-ol-custom-form-message="confirm-email-wrong-user" hidden) + h1.h3 #{translate("we_cant_confirm_this_email")} + p !{translate("to_confirm_email_address_you_must_be_logged_in_with_the_requesting_account")} + p !{translate("you_are_currently_logged_in_as", {email: getUserEmail()})} + .actions + button.btn-primary.btn.w-100( + form="logoutForm" + ) #{translate('log_in_with_a_different_account')} - div(data-ol-not-sent) - +formMessages() - div(data-ol-custom-form-message="confirm-email-wrong-user" hidden) - h1.h3 #{translate("we_cant_confirm_this_email")} - p !{translate("to_confirm_email_address_you_must_be_logged_in_with_the_requesting_account")} - p !{translate("you_are_currently_logged_in_as", {email: getUserEmail()})} .actions - button.btn-primary.btn.btn-block( - form="logoutForm" - ) #{translate('log_in_with_a_different_account')} + button.btn-primary.btn.w-100( + type='submit', + data-ol-disabled-inflight + data-ol-hide-on-error-message="confirm-email-wrong-user" + ) + span(data-ol-inflight="idle") + | #{translate('confirm')} + span(hidden data-ol-inflight="pending") + span(role='status').spinner-border.spinner-border-sm.mx-2 - .actions - button.btn-primary.btn.btn-block( - type='submit', - data-ol-disabled-inflight - data-ol-hide-on-error-message="confirm-email-wrong-user" - ) - span(data-ol-inflight="idle") - | #{translate('confirm')} - span(hidden data-ol-inflight="pending") - i.fa.fa-fw.fa-spin.fa-spinner(aria-hidden="true") - |  #{translate('confirming')}… - - div(hidden data-ol-sent) - .alert.alert-success - | #{translate('thank_you_email_confirmed')} - div.text-center - a.btn.btn-primary(href="/user/settings") - | #{translate('go_to_account_settings')} + div(hidden data-ol-sent) + +notification({ariaLive: 'polite', type: 'success', className: 'mb-3', content: translate("thank_you_email_confirmed")}) + div.text-center + a.btn.btn-primary(href="/user/settings") + | #{translate('go_to_account_settings')} diff --git a/services/web/app/views/user/email-preferences.pug b/services/web/app/views/user/email-preferences.pug index 465ffede37..86ebc5f841 100644 --- a/services/web/app/views/user/email-preferences.pug +++ b/services/web/app/views/user/email-preferences.pug @@ -1,49 +1,47 @@ extends ../layout-marketing include ../_mixins/back_to_btns -block vars - - bootstrap5PageStatus = 'disabled' - block content main.content.content-alt#main-content .container .row - .col-md-10.col-md-offset-1.col-lg-8.col-lg-offset-2 + .col-lg-10.offset-lg-1.col-xl-8.offset-xl-2 .card - .page-header - h1 #{translate("newsletter_info_title")} - - p #{translate("newsletter_info_summary")} - - - var submitAction - if subscribed - - submitAction = '/user/newsletter/unsubscribe' - p !{translate("newsletter_info_subscribed", {}, ['strong'])} - else - - submitAction = '/user/newsletter/subscribe' - p !{translate("newsletter_info_unsubscribed", {}, ['strong'])} - - form( - data-ol-async-form - data-ol-reload-on-success - name="newsletterForm" - action=submitAction - method="POST" - ) - input(name='_csrf', type='hidden', value=csrfToken) - +formMessages() - p.actions.text-center - if subscribed - button.btn-danger.btn(type='submit', data-ol-disabled-inflight) - span(data-ol-inflight="idle") #{translate("unsubscribe")} - span(hidden data-ol-inflight="pending") #{translate("saving")}… - else - button.btn-primary.btn(type='submit', data-ol-disabled-inflight) - span(data-ol-inflight="idle") #{translate("subscribe")} - span(hidden data-ol-inflight="pending") #{translate("saving")}… - - if subscribed - p #{translate("newsletter_info_note")} - - .page-separator - +back-to-btns() + .card-body + .page-header + h1 #{translate("newsletter_info_title")} + + p #{translate("newsletter_info_summary")} + + - var submitAction + if subscribed + - submitAction = '/user/newsletter/unsubscribe' + p !{translate("newsletter_info_subscribed", {}, ['strong'])} + else + - submitAction = '/user/newsletter/subscribe' + p !{translate("newsletter_info_unsubscribed", {}, ['strong'])} + + form( + data-ol-async-form + data-ol-reload-on-success + name="newsletterForm" + action=submitAction + method="POST" + ) + input(name='_csrf', type='hidden', value=csrfToken) + +formMessages() + p.actions.text-center + if subscribed + button.btn-danger.btn(type='submit', data-ol-disabled-inflight) + span(data-ol-inflight="idle") #{translate("unsubscribe")} + span(hidden data-ol-inflight="pending") #{translate("saving")}… + else + button.btn-primary.btn(type='submit', data-ol-disabled-inflight) + span(data-ol-inflight="idle") #{translate("subscribe")} + span(hidden data-ol-inflight="pending") #{translate("saving")}… + + if subscribed + p #{translate("newsletter_info_note")} + + .page-separator + +back-to-btns() diff --git a/services/web/app/views/user/login.pug b/services/web/app/views/user/login.pug index 9185b0b14b..1ad77cb8b4 100644 --- a/services/web/app/views/user/login.pug +++ b/services/web/app/views/user/login.pug @@ -1,52 +1,50 @@ extends ../layout-marketing -block vars - - bootstrap5PageStatus = 'disabled' - block content main.content.content-alt#main-content .container .row - .col-md-6.col-md-offset-3.col-lg-4.col-lg-offset-4 + .col-lg-6.offset-lg-3.col-xl-4.offset-xl-4 .card - .page-header - if login_support_title - h1 !{login_support_title} - else - h1 #{translate("log_in")} - form(data-ol-async-form, name="loginForm", action='/login', method="POST") - input(name='_csrf', type='hidden', value=csrfToken) - +formMessages() - +customFormMessage('invalid-password-retry-or-reset', 'danger') - | !{translate('email_or_password_wrong_try_again_or_reset', {}, [{ name: 'a', attrs: { href: '/user/password/reset', 'aria-describedby': 'resetPasswordDescription' } }])} - span.sr-only(id='resetPasswordDescription') - | #{translate('reset_password_link')} - +customValidationMessage('password-compromised') - | !{translate('password_compromised_try_again_or_use_known_device_or_reset', {}, [{name: 'a', attrs: {href: 'https://haveibeenpwned.com/passwords', rel: 'noopener noreferrer', target: '_blank'}}, {name: 'a', attrs: {href: '/user/password/reset', target: '_blank'}}])}. - .form-group - input.form-control( - type='email', - name='email', - required, - placeholder='email@example.com', - autofocus="true" - ) - .form-group - input.form-control( - type='password', - name='password', - required, - placeholder='********', - ) - .actions - button.btn-primary.btn( - type='submit', - data-ol-disabled-inflight - ) - span(data-ol-inflight="idle") #{translate("login")} - span(hidden data-ol-inflight="pending") #{translate("logging_in")}… - a.pull-right(href='/user/password/reset') #{translate("forgot_your_password")}? - if login_support_text - hr - p.text-center !{login_support_text} - + .card-body + .page-header + if login_support_title + h1 !{login_support_title} + else + h1 #{translate("log_in")} + form(data-ol-async-form, name="loginForm", action='/login', method="POST") + input(name='_csrf', type='hidden', value=csrfToken) + +formMessagesNewStyle() + +customFormMessageNewStyle('invalid-password-retry-or-reset', 'danger') + | !{translate('email_or_password_wrong_try_again_or_reset', {}, [{ name: 'a', attrs: { href: '/user/password/reset', 'aria-describedby': 'resetPasswordDescription' } }])} + span.visually-hidden(id='resetPasswordDescription') + | #{translate('reset_password_link')} + +customFormMessageNewStyle('password-compromised') + | !{translate('password_compromised_try_again_or_use_known_device_or_reset', {}, [{name: 'a', attrs: {href: 'https://haveibeenpwned.com/passwords', rel: 'noopener noreferrer', target: '_blank'}}, {name: 'a', attrs: {href: '/user/password/reset', target: '_blank'}}])}. + .form-group + input.form-control( + type='email', + name='email', + required, + placeholder='email@example.com', + autofocus="true" + ) + .form-group + input.form-control( + type='password', + name='password', + required, + placeholder='********', + ) + .actions + button.btn-primary.btn( + type='submit', + data-ol-disabled-inflight + ) + span(data-ol-inflight="idle") #{translate("login")} + span(hidden data-ol-inflight="pending") #{translate("logging_in")}… + a.float-end(href='/user/password/reset') #{translate("forgot_your_password")}? + if login_support_text + hr + p.text-center !{login_support_text} + diff --git a/services/web/app/views/user/one_time_login.pug b/services/web/app/views/user/one_time_login.pug index 89e1491913..648f6d93c1 100644 --- a/services/web/app/views/user/one_time_login.pug +++ b/services/web/app/views/user/one_time_login.pug @@ -1,20 +1,18 @@ extends ../layout-marketing -block vars - - bootstrap5PageStatus = 'disabled' - block content main.content.content-alt#main-content .container .row - .col-md-6.col-md-offset-3.col-lg-4.col-lg-offset-4 + .col-lg-6.offset-lg-3.col-xl-4.offset-xl-4 .card - .page-header - h1 We're back! - p Overleaf is now running normally. - p - | Please - | - a(href="/login") log in - | - | to continue working on your projects. + .card-body + .page-header + h1 We're back! + p Overleaf is now running normally. + p + | Please + | + a(href="/login") log in + | + | to continue working on your projects. diff --git a/services/web/app/views/user/passwordReset-bs5.pug b/services/web/app/views/user/passwordReset-bs5.pug index 7637a91062..08e0a71b9d 100644 --- a/services/web/app/views/user/passwordReset-bs5.pug +++ b/services/web/app/views/user/passwordReset-bs5.pug @@ -1,10 +1,11 @@ -extends ../layout-website-redesign-bootstrap-5 +extends ../layout-website-redesign include ../_mixins/recaptcha include ../_mixins/notification block vars - var suppressNavbar = true - var suppressFooter = true + - isWebsiteRedesign = true block content - var showCaptcha = settings.recaptcha && settings.recaptcha.siteKey && !(settings.recaptcha.disabled && settings.recaptcha.disabled.passwordReset) diff --git a/services/web/app/views/user/passwordReset.pug b/services/web/app/views/user/passwordReset.pug index 410e79fbb2..ed806c32cd 100644 --- a/services/web/app/views/user/passwordReset.pug +++ b/services/web/app/views/user/passwordReset.pug @@ -1,5 +1,6 @@ extends ../layout-marketing include ../_mixins/recaptcha +include ../_mixins/material_symbol block vars - bootstrap5PageStatus = 'disabled' @@ -48,7 +49,7 @@ block content div(data-ol-custom-form-message="no-password-allowed-due-to-sso" hidden) .notification.notification-type-error(aria-live="polite" style="margin-bottom: 10px;") .notification-icon - span.material-symbols.material-symbols-rounded(aria-hidden="true") error + +material-symbol-rounded("error") .notification-content-and-cta .notification-content p diff --git a/services/web/app/views/user/primaryEmailCheck-bs5.pug b/services/web/app/views/user/primaryEmailCheck-bs5.pug index 0828c06e4b..b25136927a 100644 --- a/services/web/app/views/user/primaryEmailCheck-bs5.pug +++ b/services/web/app/views/user/primaryEmailCheck-bs5.pug @@ -1,4 +1,8 @@ -extends ../layout-website-redesign-bootstrap-5 +extends ../layout-website-redesign + +block vars + - bootstrap5PageStatus = 'enabled' + - isWebsiteRedesign = true block content main#main-content diff --git a/services/web/app/views/user/reconfirm-bs5.pug b/services/web/app/views/user/reconfirm-bs5.pug index 8d9d13955f..fce9a44295 100644 --- a/services/web/app/views/user/reconfirm-bs5.pug +++ b/services/web/app/views/user/reconfirm-bs5.pug @@ -1,69 +1,72 @@ -extends ../layout-website-redesign-bootstrap-5 +extends ../layout-website-redesign include ../_mixins/recaptcha +block vars + - isWebsiteRedesign = true + block content - - var email = reconfirm_email ? reconfirm_email : "" - - var showCaptcha = settings.recaptcha && settings.recaptcha.siteKey && !(settings.recaptcha.disabled && settings.recaptcha.disabled.passwordReset) + - var email = reconfirm_email ? reconfirm_email : "" + - var showCaptcha = settings.recaptcha && settings.recaptcha.siteKey && !(settings.recaptcha.disabled && settings.recaptcha.disabled.passwordReset) - if showCaptcha - script(type="text/javascript", nonce=scriptNonce, src="https://www.recaptcha.net/recaptcha/api.js?render=explicit") - div( - id="recaptcha" - class="g-recaptcha" - data-sitekey=settings.recaptcha.siteKey - data-size="invisible" - data-badge="inline" - ) + if showCaptcha + script(type="text/javascript", nonce=scriptNonce, src="https://www.recaptcha.net/recaptcha/api.js?render=explicit") + div( + id="recaptcha" + class="g-recaptcha" + data-sitekey=settings.recaptcha.siteKey + data-size="invisible" + data-badge="inline" + ) - main#main-content(data-ol-captcha-retry-trigger-area="") - .container.auth-aux-container(style="max-width: 420px;") - form( - data-ol-async-form - name="reconfirmAccountForm" - action="/user/reconfirm" - method="POST" - aria-label=translate('request_reconfirmation_email') - captcha=(showCaptcha ? '' : false) - captcha-action-name=(showCaptcha ? "passwordReset" : false) - ) - h1.h5.mb-3 #{translate("reconfirm_account")} - p #{translate('reconfirm_explained')} - | - a(href=`mailto:${settings.adminEmail}`) #{settings.adminEmail} - | . - - div(data-ol-not-sent) - +formMessagesNewStyle() + main#main-content(data-ol-captcha-retry-trigger-area="") + .container.auth-aux-container(style="max-width: 420px;") + form( + data-ol-async-form + name="reconfirmAccountForm" + action="/user/reconfirm" + method="POST" + aria-label=translate('request_reconfirmation_email') + captcha=(showCaptcha ? '' : false) + captcha-action-name=(showCaptcha ? "passwordReset" : false) + ) + h1.h5.mb-3 #{translate("reconfirm_account")} + p #{translate('reconfirm_explained')} + | + a(href=`mailto:${settings.adminEmail}`) #{settings.adminEmail} + | . + + div(data-ol-not-sent) + +formMessagesNewStyle() - input(type="hidden" name="_csrf" value=csrfToken) - .form-group.mb-3 - label.form-label(for='email') #{translate("please_enter_email")} - input.form-control( - aria-label="email" - type='email' - name='email' - placeholder='email@example.com' - required - autofocus - value=email - ) - .actions - button.btn.btn-primary.w-100( - style="white-space: normal;" - type='submit' - data-ol-disabled-inflight - aria-label=translate('request_password_reset_to_reconfirm') - ) - span(data-ol-inflight="idle") - | #{translate('request_password_reset_to_reconfirm')} - span(hidden data-ol-inflight="pending") - | #{translate('request_password_reset_to_reconfirm')}… - div(hidden data-ol-sent) - div.alert.alert-success( - role="alert" - aria-live="polite" - ) - span #{translate('password_reset_email_sent')} + input(type="hidden" name="_csrf" value=csrfToken) + .form-group.mb-3 + label.form-label(for='email') #{translate("please_enter_email")} + input.form-control( + aria-label="email" + type='email' + name='email' + placeholder='email@example.com' + required + autofocus + value=email + ) + .actions + button.btn.btn-primary.w-100( + style="white-space: normal;" + type='submit' + data-ol-disabled-inflight + aria-label=translate('request_password_reset_to_reconfirm') + ) + span(data-ol-inflight="idle") + | #{translate('request_password_reset_to_reconfirm')} + span(hidden data-ol-inflight="pending") + | #{translate('request_password_reset_to_reconfirm')}… + div(hidden data-ol-sent) + div.alert.alert-success( + role="alert" + aria-live="polite" + ) + span #{translate('password_reset_email_sent')} - if showCaptcha - +recaptchaConditions + if showCaptcha + +recaptchaConditions diff --git a/services/web/app/views/user/reconfirm.pug b/services/web/app/views/user/reconfirm.pug index 7c17423d5a..23b77d278d 100644 --- a/services/web/app/views/user/reconfirm.pug +++ b/services/web/app/views/user/reconfirm.pug @@ -23,7 +23,7 @@ block content .row .col-sm-12.col-md-6.col-md-offset-3 .card - h1.card-header #{translate("reconfirm")} #{translate("Account")} + h1.card-header #{translate("reconfirm_account")} p #{translate('reconfirm_explained')}  a(href=`mailto:${settings.adminEmail}`) #{settings.adminEmail} | . diff --git a/services/web/app/views/user/restricted.pug b/services/web/app/views/user/restricted.pug index eba1d2ab05..0140064a99 100644 --- a/services/web/app/views/user/restricted.pug +++ b/services/web/app/views/user/restricted.pug @@ -1,4 +1,5 @@ extends ../layout-marketing +include ../_mixins/material_symbol block content main.content#main-content @@ -6,8 +7,8 @@ block content .row .col-md-8.offset-md-2.text-center .page-header - h2 #{translate("restricted_no_permission")} + h1 #{translate("restricted_no_permission")} p - span.inline-material-symbols - a(href="/").material-symbols(aria-hidden="true") arrow_left_alt - a(href="/") #{translate("take_me_home")} + a.inline-material-symbols(href="/") + +material-symbol("arrow_left_alt") + | #{translate("take_me_home")} diff --git a/services/web/app/views/user/sessions.pug b/services/web/app/views/user/sessions.pug index 187c1dae75..ffd65a3548 100644 --- a/services/web/app/views/user/sessions.pug +++ b/services/web/app/views/user/sessions.pug @@ -1,72 +1,70 @@ extends ../layout-marketing -block vars - - bootstrap5PageStatus = 'disabled' - block content main.content.content-alt#main-content .container .row - .col-md-10.col-md-offset-1.col-lg-8.col-lg-offset-2 + .col-lg-10.offset-lg-1.col-xl-8.offset-xl-2 .card.clear-user-sessions - .page-header - h1 #{translate("your_sessions")} - - if currentSession.ip_address && currentSession.session_created - h3 #{translate("current_session")} - div - table.table.table-striped - thead - tr - th #{translate("ip_address")} - th #{translate("session_created_at")} - tr - td #{currentSession.ip_address} - td #{moment(currentSession.session_created).utc().format('Do MMM YYYY, h:mm a')} UTC - - h3 #{translate("other_sessions")} - div - p.small - | !{translate("clear_sessions_description")} - - form( - data-ol-async-form - action='/user/sessions/clear' - method='POST' - ) - input(name='_csrf' type='hidden' value=csrfToken) - div(data-ol-not-sent) - if sessions.length == 0 - p.text-center - | #{translate("no_other_sessions")} - - if sessions.length > 0 + .card-body + .page-header + h1 #{translate("your_sessions")} + + if currentSession.ip_address && currentSession.session_created + h3 #{translate("current_session")} + div table.table.table-striped thead tr th #{translate("ip_address")} th #{translate("session_created_at")} - for session in sessions tr - td #{session.ip_address} - td #{moment(session.session_created).utc().format('Do MMM YYYY, h:mm a')} UTC - - p.actions - .text-center - button.btn.btn-lg.btn-primary( - type="submit" - data-ol-disable-inflight - ) - span(data-ol-inflight="idle") #{translate('clear_sessions')} - span(hidden data-ol-inflight="pending") #{translate("processing")}… - - div(hidden data-ol-sent) - p.text-center - | #{translate("no_other_sessions")} - - p.text-success.text-center - | #{translate('clear_sessions_success')} - .page-separator - a.btn.btn-secondary(href='/user/settings') #{translate('back_to_account_settings')} - | - a.btn.btn-secondary(href='/project') #{translate('back_to_your_projects')} + td #{currentSession.ip_address} + td #{moment(currentSession.session_created).utc().format('Do MMM YYYY, h:mm a')} UTC + + h3 #{translate("other_sessions")} + div + p.small + | !{translate("clear_sessions_description")} + + form( + data-ol-async-form + action='/user/sessions/clear' + method='POST' + ) + input(name='_csrf' type='hidden' value=csrfToken) + div(data-ol-not-sent) + if sessions.length == 0 + p.text-center + | #{translate("no_other_sessions")} + + if sessions.length > 0 + table.table.table-striped + thead + tr + th #{translate("ip_address")} + th #{translate("session_created_at")} + for session in sessions + tr + td #{session.ip_address} + td #{moment(session.session_created).utc().format('Do MMM YYYY, h:mm a')} UTC + + p.actions + .text-center + button.btn.btn-lg.btn-primary( + type="submit" + data-ol-disable-inflight + ) + span(data-ol-inflight="idle") #{translate('clear_sessions')} + span(hidden data-ol-inflight="pending") #{translate("processing")}… + + div(hidden data-ol-sent) + p.text-center + | #{translate("no_other_sessions")} + + p.text-success.text-center + | #{translate('clear_sessions_success')} + .page-separator + .d-flex.gap-3 + a.btn.btn-secondary(href='/user/settings') #{translate('back_to_account_settings')} + a.btn.btn-secondary(href='/project') #{translate('back_to_your_projects')} diff --git a/services/web/app/views/user/setPassword-bs5.pug b/services/web/app/views/user/setPassword-bs5.pug index 007ae5e87c..83c3a531bb 100644 --- a/services/web/app/views/user/setPassword-bs5.pug +++ b/services/web/app/views/user/setPassword-bs5.pug @@ -1,90 +1,91 @@ -extends ../layout-website-redesign-bootstrap-5 +extends ../layout-website-redesign block vars - - var suppressNavbar = true - - var suppressFooter = true + - var suppressNavbar = true + - var suppressFooter = true + - isWebsiteRedesign = true block content - main#main-content - a.auth-aux-logo(href="/") - img(src=buildImgPath("ol-brand/overleaf-o-dark.svg") alt=settings.appName) - .auth-aux-container - form( - data-ol-async-form - name="passwordResetForm" - action="/user/password/set" - method="POST" - data-ol-hide-on-error="token-expired" - ) - div( - hidden - data-ol-sent - ) - h1.h3.mb-3.mt-0 #{translate("password_updated")} - p.mb-4 #{translate("your_password_has_been_successfully_changed")}. - a.btn.btn-primary.w-100(href='/login') #{translate("log_in_now")} + main#main-content + a.auth-aux-logo(href="/") + img(src=buildImgPath("ol-brand/overleaf-o-dark.svg") alt=settings.appName) + .auth-aux-container + form( + data-ol-async-form + name="passwordResetForm" + action="/user/password/set" + method="POST" + data-ol-hide-on-error="token-expired" + ) + div( + hidden + data-ol-sent + ) + h1.h3.mb-3.mt-0 #{translate("password_updated")} + p.mb-4 #{translate("your_password_has_been_successfully_changed")}. + a.btn.btn-primary.w-100(href='/login') #{translate("log_in_now")} - div(data-ol-not-sent) - h1.h3.mb-3.mt-0 #{translate("reset_your_password")} - p(data-ol-hide-on-error-message="token-expired") #{translate("create_a_new_password_for_your_account")}. - +formMessagesNewStyle() + div(data-ol-not-sent) + h1.h3.mb-3.mt-0 #{translate("reset_your_password")} + p(data-ol-hide-on-error-message="token-expired") #{translate("create_a_new_password_for_your_account")}. + +formMessagesNewStyle() - +customFormMessageNewStyle('password-contains-email', 'danger') - | #{translate('invalid_password_contains_email')}. - | #{translate('use_a_different_password')}. + +customFormMessageNewStyle('password-contains-email', 'danger') + | #{translate('invalid_password_contains_email')}. + | #{translate('use_a_different_password')}. - +customFormMessageNewStyle('password-too-similar', 'danger') - | #{translate('invalid_password_too_similar')}. - | #{translate('use_a_different_password')}. + +customFormMessageNewStyle('password-too-similar', 'danger') + | #{translate('invalid_password_too_similar')}. + | #{translate('use_a_different_password')}. - +customFormMessageNewStyle('token-expired', 'danger') - | #{translate('password_reset_token_expired')} - br - a(href="/user/password/reset") - | #{translate('request_new_password_reset_email')} + +customFormMessageNewStyle('token-expired', 'danger') + | #{translate('password_reset_token_expired')} + br + a(href="/user/password/reset") + | #{translate('request_new_password_reset_email')} - input(type="hidden" name="_csrf" value=csrfToken) - input(type="text" hidden name="email" autocomplete="username" value=email) + input(type="hidden" name="_csrf" value=csrfToken) + input(type="text" hidden name="email" autocomplete="username" value=email) - .form-group.mb-3 - label.form-label(for='passwordField', data-ol-hide-on-error-message="token-expired") #{translate("new_password")} - input.form-control.auth-aux-new-password#passwordField( - type='password' - name='password' - autocomplete="new-password" - autofocus - required - minlength=settings.passwordStrengthOptions.length.min - ) + .form-group.mb-3 + label.form-label(for='passwordField', data-ol-hide-on-error-message="token-expired") #{translate("new_password")} + input.form-control.auth-aux-new-password#passwordField( + type='password' + name='password' + autocomplete="new-password" + autofocus + required + minlength=settings.passwordStrengthOptions.length.min + ) - +customValidationMessageNewStyle('invalid-password') - | #{translate('invalid_password')}. + +customValidationMessageNewStyle('invalid-password') + | #{translate('invalid_password')}. - +customValidationMessageNewStyle('password-must-be-different') - | #{translate('password_cant_be_the_same_as_current_one')}. + +customValidationMessageNewStyle('password-must-be-different') + | #{translate('password_cant_be_the_same_as_current_one')}. - +customValidationMessageNewStyle('password-must-be-strong') - | !{translate('password_was_detected_on_a_public_list_of_known_compromised_passwords', {}, [{name: 'a', attrs: {href: 'https://haveibeenpwned.com/passwords', rel: 'noopener noreferrer', target: '_blank'}}])}. - | #{translate('use_a_different_password')}. + +customValidationMessageNewStyle('password-must-be-strong') + | !{translate('password_was_detected_on_a_public_list_of_known_compromised_passwords', {}, [{name: 'a', attrs: {href: 'https://haveibeenpwned.com/passwords', rel: 'noopener noreferrer', target: '_blank'}}])}. + | #{translate('use_a_different_password')}. - input( - type="hidden" - name="passwordResetToken" - value=passwordResetToken - ) - div(data-ol-hide-on-error-message="token-expired") - div #{translate('in_order_to_have_a_secure_account_make_sure_your_password')} - ul.mb-3.ps-4 - li #{translate('is_longer_than_n_characters', {n: settings.passwordStrengthOptions.length.min})} - li #{translate('does_not_contain_or_significantly_match_your_email')} - li #{translate('is_not_used_on_any_other_website')} - .actions - button.btn.btn-primary.w-100( - type='submit' - data-ol-disabled-inflight - aria-label=translate('set_new_password') - ) - span(data-ol-inflight="idle") - | #{translate('set_new_password')} - span(hidden data-ol-inflight="pending") - | #{translate('set_new_password')}… + input( + type="hidden" + name="passwordResetToken" + value=passwordResetToken + ) + div(data-ol-hide-on-error-message="token-expired") + div #{translate('in_order_to_have_a_secure_account_make_sure_your_password')} + ul.mb-3.ps-4 + li #{translate('is_longer_than_n_characters', {n: settings.passwordStrengthOptions.length.min})} + li #{translate('does_not_contain_or_significantly_match_your_email')} + li #{translate('is_not_used_on_any_other_website')} + .actions + button.btn.btn-primary.w-100( + type='submit' + data-ol-disabled-inflight + aria-label=translate('set_new_password') + ) + span(data-ol-inflight="idle") + | #{translate('set_new_password')} + span(hidden data-ol-inflight="pending") + | #{translate('set_new_password')}… diff --git a/services/web/app/views/user/settings.pug b/services/web/app/views/user/settings.pug index 4f939a41ca..4ac35bef71 100644 --- a/services/web/app/views/user/settings.pug +++ b/services/web/app/views/user/settings.pug @@ -32,6 +32,7 @@ block append meta meta(name="ol-gitBridgeEnabled" data-type="boolean" content=gitBridgeEnabled) meta(name="ol-isSaas" data-type="boolean" content=isSaas) meta(name="ol-memberOfSSOEnabledGroups" data-type="json" content=memberOfSSOEnabledGroups) + meta(name="ol-capabilities" data-type="json" content=capabilities) block content main.content.content-alt#main-content diff --git a/services/web/app/views/user_membership/group-managers-react.pug b/services/web/app/views/user_membership/group-managers-react.pug index f4d8c0e973..d227a7a511 100644 --- a/services/web/app/views/user_membership/group-managers-react.pug +++ b/services/web/app/views/user_membership/group-managers-react.pug @@ -1,9 +1,10 @@ -extends ../layout-marketing +extends ../layout-react block entrypointVar - entrypoint = 'pages/user/subscription/group-management/group-managers' block append meta + meta(name="ol-user", data-type="json", content=user) meta(name="ol-users", data-type="json", content=users) meta(name="ol-groupId", data-type="string", content=groupId) meta(name="ol-groupName", data-type="string", content=name) diff --git a/services/web/app/views/user_membership/group-members-react.pug b/services/web/app/views/user_membership/group-members-react.pug index 314a332489..05327c4b6d 100644 --- a/services/web/app/views/user_membership/group-members-react.pug +++ b/services/web/app/views/user_membership/group-members-react.pug @@ -1,14 +1,16 @@ -extends ../layout-marketing +extends ../layout-react block entrypointVar - entrypoint = 'pages/user/subscription/group-management/group-members' block append meta + meta(name="ol-user", data-type="json", content=user) meta(name="ol-users", data-type="json", content=users) meta(name="ol-groupId", data-type="string", content=groupId) meta(name="ol-groupName", data-type="string", content=name) meta(name="ol-groupSize", data-type="json", content=groupSize) meta(name="ol-managedUsersActive", data-type="boolean", content=managedUsersActive) + meta(name="ol-isUserGroupManager", data-type="boolean", content=isUserGroupManager) meta(name="ol-groupSSOActive", data-type="boolean", content=groupSSOActive) meta(name="ol-canUseFlexibleLicensing", data-type="boolean", content=canUseFlexibleLicensing) meta(name="ol-canUseAddSeatsFeature", data-type="boolean", content=canUseAddSeatsFeature) diff --git a/services/web/app/views/user_membership/institution-managers-react.pug b/services/web/app/views/user_membership/institution-managers-react.pug index 690e8409f2..ee62fcd430 100644 --- a/services/web/app/views/user_membership/institution-managers-react.pug +++ b/services/web/app/views/user_membership/institution-managers-react.pug @@ -1,9 +1,10 @@ -extends ../layout-marketing +extends ../layout-react block entrypointVar - entrypoint = 'pages/user/subscription/group-management/institution-managers' block append meta + meta(name="ol-user" data-type="json" content=user) meta(name="ol-users", data-type="json", content=users) meta(name="ol-groupId", data-type="string", content=groupId) meta(name="ol-groupName", data-type="string", content=name) diff --git a/services/web/app/views/user_membership/publisher-managers-react.pug b/services/web/app/views/user_membership/publisher-managers-react.pug index 793bdf9602..a956e30c35 100644 --- a/services/web/app/views/user_membership/publisher-managers-react.pug +++ b/services/web/app/views/user_membership/publisher-managers-react.pug @@ -1,9 +1,10 @@ -extends ../layout-marketing +extends ../layout-react block entrypointVar - entrypoint = 'pages/user/subscription/group-management/publisher-managers' block append meta + meta(name="ol-user" data-type="json" content=user) meta(name="ol-users", data-type="json", content=users) meta(name="ol-groupId", data-type="string", content=groupId) meta(name="ol-groupName", data-type="string", content=name) diff --git a/services/web/bin/test_unit_run_dir b/services/web/bin/test_unit_run_dir new file mode 100755 index 0000000000..4d5d5ecb9a --- /dev/null +++ b/services/web/bin/test_unit_run_dir @@ -0,0 +1,60 @@ +#!/bin/bash + +declare -a vitest_args=("$@") + +has_mocha_test=0 +has_vitest_test=0 + +for dir_path in "$@"; do + if [ -n "$(find "$dir_path" -name "*.js" -type f -print -quit 2>/dev/null)" ]; then + has_mocha_test=1 + fi + + if [ -n "$(find "$dir_path" -name "*.test.mjs" -type f -print -quit 2>/dev/null)" ]; then + has_vitest_test=1 + fi +done + +if [[ -n "$MOCHA_GREP" ]]; then + vitest_args+=("--testNamePattern" "$MOCHA_GREP") +fi + +if [[ -n "$VITEST_NO_CACHE" ]]; then + echo "Disabling cache for vitest." + vitest_args+=("--no-cache") +fi + +echo "Running unit tests in directory: $*" + +# Remove this if/else when we have converted all module tests to vitest. +if (( has_vitest_test == 1 )); then + npm run test:unit:esm -- "${vitest_args[@]}" + vitest_status=$? +else + echo "No vitest tests found in $*, skipping vitest step." + vitest_status=0 +fi + +if (( has_mocha_test == 1 )); then + mocha --recursive --timeout 25000 --exit --grep="$MOCHA_GREP" --require test/unit/bootstrap.js --extension=js "$@" + mocha_status=$? +else + echo "No mocha tests found in $TARGET_DIR, skipping mocha step." + mocha_status=0 +fi + +if [ "$mocha_status" -eq 0 ] && [ "$vitest_status" -eq 0 ]; then + exit 0 +fi + +# Report status briefly at the end for failures + +if [ "$mocha_status" -ne 0 ]; then + echo "Mocha tests failed with status: $mocha_status" +fi + +if [ "$vitest_status" -ne 0 ]; then + echo "Vitest tests failed with status: $vitest_status" +fi + +exit 1 diff --git a/services/web/config/settings.defaults.js b/services/web/config/settings.defaults.js index a7ff970ef0..07558a0420 100644 --- a/services/web/config/settings.defaults.js +++ b/services/web/config/settings.defaults.js @@ -893,6 +893,7 @@ module.exports = { 'figcaption', 'span', 'source', + 'track', 'video', 'del', ], @@ -918,7 +919,7 @@ module.exports = { col: ['width'], figure: ['class', 'id', 'style'], figcaption: ['class', 'id', 'style'], - i: ['aria-hidden', 'aria-label', 'class', 'id'], + i: ['aria-hidden', 'aria-label', 'class', 'id', 'translate'], iframe: [ 'allowfullscreen', 'frameborder', @@ -943,6 +944,7 @@ module.exports = { 'style', ], tr: ['class'], + track: ['src', 'kind', 'srcLang', 'label'], video: ['alt', 'class', 'controls', 'height', 'width'], }, }, @@ -966,6 +968,7 @@ module.exports = { editorToolbarButtons: [], sourceEditorExtensions: [], sourceEditorComponents: [], + pdfLogEntryHeaderActionComponents: [], pdfLogEntryComponents: [], pdfLogEntriesComponents: [], pdfPreviewPromotions: [], @@ -996,8 +999,10 @@ module.exports = { toastGenerators: [], editorSidebarComponents: [], fileTreeToolbarComponents: [], + fullProjectSearchPanel: [], integrationPanelComponents: [], referenceSearchSetting: [], + errorLogsComponents: [], }, moduleImportSequence: [ diff --git a/services/web/docker-compose.ci.yml b/services/web/docker-compose.ci.yml index 164cc22c5a..33b5a3ca2e 100644 --- a/services/web/docker-compose.ci.yml +++ b/services/web/docker-compose.ci.yml @@ -13,6 +13,9 @@ services: logging: driver: local user: node + volumes: + - ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it + entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 -- command: npm run test:unit:app working_dir: /overleaf/services/web env_file: docker-compose.common.env @@ -21,6 +24,7 @@ services: OVERLEAF_CONFIG: NODE_ENV: test NODE_OPTIONS: "--unhandled-rejections=strict" + VITEST_NO_CACHE: true depends_on: - mongo @@ -38,6 +42,9 @@ services: OVERLEAF_CONFIG: extra_hosts: - 'www.overleaf.test:127.0.0.1' + volumes: + - ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it + entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 -- command: npm run test:acceptance:app user: root depends_on: @@ -85,7 +92,7 @@ services: user: root redis: - image: redis + image: redis:7.4.3 mongo: image: mongo:7.0.20 diff --git a/services/web/docker-compose.yml b/services/web/docker-compose.yml index 5314e94ed3..10e0a7842c 100644 --- a/services/web/docker-compose.yml +++ b/services/web/docker-compose.yml @@ -11,6 +11,7 @@ services: - .:/overleaf/services/web - ../../node_modules:/overleaf/node_modules - ../../libraries:/overleaf/libraries + - ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it working_dir: /overleaf/services/web env_file: docker-compose.common.env environment: @@ -20,6 +21,7 @@ services: LOG_LEVEL: ${LOG_LEVEL:-} NODE_ENV: test NODE_OPTIONS: "--unhandled-rejections=strict" + entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 -- command: npm run --silent test:unit:app user: node depends_on: @@ -31,6 +33,7 @@ services: - .:/overleaf/services/web - ../../node_modules:/overleaf/node_modules - ../../libraries:/overleaf/libraries + - ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it user: node working_dir: /overleaf/services/web env_file: docker-compose.common.env @@ -50,6 +53,7 @@ services: - mongo - saml - ldap + entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 -- command: npm run --silent test:acceptance:app test_frontend: @@ -84,7 +88,7 @@ services: - "cypress:run-ct" redis: - image: redis + image: redis:7.4.3 mongo: image: mongo:7.0.20 diff --git a/services/web/frontend/extracted-translations.json b/services/web/frontend/extracted-translations.json index c64817b94c..156ec9df3f 100644 --- a/services/web/frontend/extracted-translations.json +++ b/services/web/frontend/extracted-translations.json @@ -1,7 +1,9 @@ { - "12x_more_compile_time": "", + "0_free_suggestions": "", "1_2_width": "", "1_4_width": "", + "1_free_suggestion": "", + "24x_more_compile_time": "", "3_4_width": "", "About": "", "Account": "", @@ -29,6 +31,7 @@ "about_to_enable_managed_users": "", "about_to_leave_project": "", "about_to_leave_projects": "", + "about_to_remove_user_preamble": "", "about_to_trash_projects": "", "abstract": "", "accept_and_continue": "", @@ -106,6 +109,7 @@ "agree_with_the_terms": "", "ai_assist_in_overleaf_is_included_via_writefull_groups": "", "ai_assist_in_overleaf_is_included_via_writefull_individual": "", + "ai_assist_unavailable_due_to_subscription_type": "", "ai_assistance_to_help_you": "", "ai_based_language_tools": "", "ai_can_make_mistakes": "", @@ -130,6 +134,7 @@ "an_email_has_already_been_sent_to": "", "an_error_occured_while_restoring_project": "", "an_error_occurred_when_verifying_the_coupon_code": "", + "and_upgrade_for_compile_time": "", "annual_discount": "", "anonymous": "", "anyone_with_link_can_edit": "", @@ -186,6 +191,7 @@ "blog": "", "bold": "", "booktabs": "", + "breadcrumbs": "", "browser": "", "bullet_list": "", "buy_licenses": "", @@ -197,6 +203,8 @@ "can_view_content": "", "cancel": "", "cancel_add_on": "", + "cancel_any_existing_subscriptions": "", + "cancel_any_existing_subscriptions_and_leave_any_group_subscriptions": "", "cancel_anytime": "", "cancel_my_account": "", "cancel_my_subscription": "", @@ -286,6 +294,8 @@ "compile_error_entry_description": "", "compile_error_handling": "", "compile_larger_projects": "", + "compile_limit_reached": "", + "compile_limit_upgrade_prompt": "", "compile_mode": "", "compile_terminated_by_user": "", "compiler": "", @@ -304,6 +314,7 @@ "confirm_reject_selected_changes": "", "confirm_reject_selected_changes_plural": "", "confirm_remove_sso_config_enter_email": "", + "confirm_remove_user_type_email_address": "", "confirm_secondary_email": "", "confirm_your_email": "", "confirming": "", @@ -410,7 +421,6 @@ "discount": "", "discount_of": "", "discover_the_fastest_way_to_search_and_cite": "", - "dismiss_error_popup": "", "display": "", "display_deleted_user": "", "display_math": "", @@ -519,7 +529,6 @@ "enabling": "", "end_of_document": "", "ensure_recover_account": "", - "enter_6_digit_code": "", "enter_any_size_including_units_or_valid_latex_command": "", "enter_image_url": "", "enter_the_code": "", @@ -530,6 +539,7 @@ "error": "", "error_assist": "", "error_log": "", + "error_logs_have_had_an_update": "", "error_opening_document": "", "error_opening_document_detail": "", "error_performing_request": "", @@ -623,6 +633,7 @@ "generic_if_problem_continues_contact_us": "", "generic_linked_file_compile_error": "", "generic_something_went_wrong": "", + "get_ai_assist": "", "get_collaborative_benefits": "", "get_discounted_plan": "", "get_error_assist": "", @@ -679,6 +690,8 @@ "go_next_page": "", "go_page": "", "go_prev_page": "", + "go_to_account_settings": "", + "go_to_code_location": "", "go_to_code_location_in_pdf": "", "go_to_overleaf": "", "go_to_pdf_location_in_code": "", @@ -836,6 +849,7 @@ "integrations": "", "integrations_like_github": "", "interested_in_cheaper_personal_plan": "", + "introducing_shorter_compile_timeout": "", "invalid_confirmation_code": "", "invalid_email": "", "invalid_file_name": "", @@ -968,6 +982,7 @@ "login_count": "", "login_to_accept_invitation": "", "login_with_service": "", + "logs": "", "logs_and_output_files": "", "looking_multiple_licenses": "", "looks_like_youre_at": "", @@ -1038,6 +1053,7 @@ "more_compile_time": "", "more_editor_toolbar_item": "", "more_info": "", + "more_logs_and_files": "", "more_options": "", "my_library": "", "n_items": "", @@ -1061,6 +1077,7 @@ "neither_agree_nor_disagree": "", "new_compile_domain_notice": "", "new_create_tables_and_equations": "", + "new_error_logs_panel": "", "new_file": "", "new_folder": "", "new_font_open_dyslexic": "", @@ -1129,10 +1146,11 @@ "on_free_plan_upgrade_to_access_features": "", "one_step_away_from_professional_features": "", "only_group_admin_or_managers_can_delete_your_account_1": "", - "only_group_admin_or_managers_can_delete_your_account_2": "", "only_group_admin_or_managers_can_delete_your_account_3": "", - "only_group_admin_or_managers_can_delete_your_account_4": "", - "only_group_admin_or_managers_can_delete_your_account_5": "", + "only_group_admin_or_managers_can_delete_your_account_6": "", + "only_group_admin_or_managers_can_delete_your_account_7": "", + "only_group_admin_or_managers_can_delete_your_account_8": "", + "only_group_admin_or_managers_can_delete_your_account_9": "", "only_importer_can_refresh": "", "open_action_menu": "", "open_advanced_reference_search": "", @@ -1147,7 +1165,6 @@ "organization_name": "", "organize_tags": "", "other": "", - "other_causes_of_compile_timeouts": "", "other_logs_and_files": "", "other_output_files": "", "our_team_will_get_back_to_you_shortly": "", @@ -1224,8 +1241,8 @@ "please_check_your_inbox_to_confirm": "", "please_compile_pdf_before_download": "", "please_compile_pdf_before_word_count": "", - "please_confirm_primary_email": "", - "please_confirm_secondary_email": "", + "please_confirm_primary_email_or_edit": "", + "please_confirm_secondary_email_or_edit": "", "please_confirm_your_email_before_making_it_default": "", "please_contact_support_to_makes_change_to_your_plan": "", "please_enter_confirmation_code": "", @@ -1292,6 +1309,7 @@ "project_ownership_transfer_confirmation_2": "", "project_renamed_or_deleted": "", "project_renamed_or_deleted_detail": "", + "project_search": "", "project_search_file_count": "", "project_search_file_count_plural": "", "project_search_result_count": "", @@ -1327,6 +1345,8 @@ "reactivate_subscription": "", "read_lines_from_path": "", "read_more": "", + "read_more_about_compile_timeout_changes": "", + "read_more_about_fix_prevent_timeout": "", "read_more_about_free_compile_timeouts_servers": "", "read_only_dropbox_sync_message": "", "read_only_token": "", @@ -1375,7 +1395,6 @@ "remote_service_error": "", "remove": "", "remove_access": "", - "remove_email_address": "", "remove_from_group": "", "remove_link": "", "remove_manager": "", @@ -1383,6 +1402,7 @@ "remove_secondary_email_addresses": "", "remove_sso_login_option": "", "remove_tag": "", + "remove_user": "", "removed_from_project": "", "removing": "", "rename": "", @@ -1403,12 +1423,10 @@ "resend": "", "resend_confirmation_code": "", "resend_confirmation_email": "", - "resend_email": "", "resend_group_invite": "", "resend_link_sso": "", "resend_managed_user_invite": "", "resending_confirmation_code": "", - "resending_confirmation_email": "", "resize": "", "resolve_comment": "", "resolve_comment_error_message": "", @@ -1484,6 +1502,7 @@ "search_whole_word": "", "search_within_selection": "", "searched_path_for_lines_containing": "", + "searching_all_project_files_is_now_available": "", "security": "", "see_suggestions_from_collaborators": "", "select_a_column_or_a_merged_cell_to_align": "", @@ -1520,11 +1539,11 @@ "select_user": "", "selected": "", "selection_deleted": "", + "send_confirmation_code": "", "send_first_message": "", "send_message": "", "send_request": "", "sending": "", - "sent": "", "server_error": "", "server_pro_license_entitlement_line_1": "", "server_pro_license_entitlement_line_2": "", @@ -1545,6 +1564,8 @@ "sharelatex_beta_program": "", "shortcut_to_open_advanced_reference_search": "", "show_all_projects": "", + "show_breadcrumbs": "", + "show_breadcrumbs_in_toolbar": "", "show_document_preamble": "", "show_equation_preview": "", "show_file_tree": "", @@ -1640,6 +1661,7 @@ "start_a_free_trial": "", "start_by_adding_your_email": "", "start_by_fixing_the_first_error_in_your_doc": "", + "start_by_fixing_the_first_error_in_your_document": "", "start_free_trial": "", "start_free_trial_without_exclamation": "", "start_the_conversation_by_saying_hello_or_sharing_an_update": "", @@ -1674,6 +1696,7 @@ "suggest_a_different_fix": "", "suggest_fix": "", "suggested": "", + "suggested_code": "", "suggested_fix_for_error_in_path": "", "suggestion_applied": "", "suggests_code_completions_while_typing": "", @@ -1755,6 +1778,12 @@ "there_is_an_unrecoverable_latex_error": "", "there_was_a_problem_restoring_the_project_please_try_again_in_a_few_moments_or_contact_us": "", "they_lose_access_to_account": "", + "they_will_be_removed_from_the_group": "", + "they_will_continue_to_have_access_to_any_projects_shared_with_them": "", + "they_will_no_longer_be_a_managed_user": "", + "they_will_retain_ownership_of_projects_currently_owned_by_them_and_collaborators_will_become_read_only": "", + "they_will_retain_their_existing_account_on_the_free_plan": "", + "they_wont_be_able_to_log_in_with_sso_they_will_need_to_set_password": "", "this_action_cannot_be_reversed": "", "this_action_cannot_be_undone": "", "this_address_will_be_shown_on_the_invoice": "", @@ -1766,6 +1795,7 @@ "this_is_a_new_feature": "", "this_is_the_file_that_references_pulled_from_your_reference_manager_will_be_added_to": "", "this_project_already_has_maximum_collaborators": "", + "this_project_compiled_but_soon_might_not": "", "this_project_contains_a_file_called_output": "", "this_project_exceeded_collaborator_limit": "", "this_project_exceeded_compile_timeout_limit_on_free_plan": "", @@ -1950,7 +1980,7 @@ "updating": "", "upgrade": "", "upgrade_cc_btn": "", - "upgrade_for_12x_more_compile_time": "", + "upgrade_for_more_compile_time": "", "upgrade_my_plan": "", "upgrade_now": "", "upgrade_plan": "", @@ -1981,6 +2011,7 @@ "user_deletion_error": "", "user_deletion_password_reset_tip": "", "user_first_name_attribute": "", + "user_has_left_organization_and_need_to_transfer_their_projects": "", "user_last_name_attribute": "", "user_sessions": "", "using_latex": "", @@ -2047,8 +2078,8 @@ "were_making_some_changes_to_project_sharing_this_means_you_will_be_visible": "", "were_performing_maintenance": "", "were_redesigning_our_editor_to_make_it_easier_to_use": "", - "weve_recently_reduced_the_compile_timeout_limit_which_may_have_affected_this_project": "", - "weve_recently_reduced_the_compile_timeout_limit_which_may_have_affected_your_project": "", + "were_reducing_compile_timeout": "", + "what_did_you_find_most_helpful": "", "what_do_you_need_help_with": "", "what_does_this_mean": "", "what_does_this_mean_for_you": "", @@ -2119,6 +2150,7 @@ "you_have_been_removed_from_this_project_and_will_be_redirected_to_project_dashboard": "", "you_have_x_licenses_and_your_plan_supports_up_to_y": "", "you_have_x_licenses_on_your_subscription": "", + "you_may_be_able_to_fix_issues_to_speed_up_the_compile": "", "you_need_to_configure_your_sso_settings": "", "you_unpaused_your_subscription": "", "you_will_be_able_to_reassign_subscription": "", @@ -2153,6 +2185,7 @@ "your_plan_is_limited_to_n_editors": "", "your_plan_is_limited_to_n_editors_plural": "", "your_premium_plan_is_paused": "", + "your_project_compiled_but_soon_might_not": "", "your_project_exceeded_collaborator_limit": "", "your_project_exceeded_compile_timeout_limit_on_free_plan": "", "your_project_near_compile_timeout_limit": "", diff --git a/services/web/frontend/fonts/material-symbols/MaterialSymbolsRoundedUnfilledPartialSlice.woff2 b/services/web/frontend/fonts/material-symbols/MaterialSymbolsRoundedUnfilledPartialSlice.woff2 index df942df176..a507329c8e 100644 Binary files a/services/web/frontend/fonts/material-symbols/MaterialSymbolsRoundedUnfilledPartialSlice.woff2 and b/services/web/frontend/fonts/material-symbols/MaterialSymbolsRoundedUnfilledPartialSlice.woff2 differ diff --git a/services/web/frontend/fonts/material-symbols/unfilled-symbols.mjs b/services/web/frontend/fonts/material-symbols/unfilled-symbols.mjs index baefac05aa..222be1fd36 100644 --- a/services/web/frontend/fonts/material-symbols/unfilled-symbols.mjs +++ b/services/web/frontend/fonts/material-symbols/unfilled-symbols.mjs @@ -4,26 +4,32 @@ // You may need to hard reload your browser window to see the changes. export default /** @type {const} */ ([ + 'auto_delete', 'book_5', 'brush', 'code', + 'content_copy', 'create_new_folder', 'delete', 'description', + 'error', 'experiment', 'forum', 'help', 'image', 'info', 'integration_instructions', + 'lightbulb', 'note_add', 'picture_as_pdf', 'rate_review', 'report', + 'search', 'settings', 'space_dashboard', 'table_chart', + 'thumb_down', + 'thumb_up', 'upload_file', 'web_asset', - 'error', ]) diff --git a/services/web/frontend/js/features/chat/context/chat-context.tsx b/services/web/frontend/js/features/chat/context/chat-context.tsx index 9feca60579..2ba0ff5f5d 100644 --- a/services/web/frontend/js/features/chat/context/chat-context.tsx +++ b/services/web/frontend/js/features/chat/context/chat-context.tsx @@ -193,7 +193,7 @@ export const ChatContext = createContext< >(undefined) export const ChatProvider: FC = ({ children }) => { - const chatEnabled = getMeta('ol-chatEnabled') + const chatEnabled = getMeta('ol-capabilities')?.includes('chat') const clientId = useRef() if (clientId.current === undefined) { diff --git a/services/web/frontend/js/features/contact-form/index.js b/services/web/frontend/js/features/contact-form/index.js index 0b4a4898aa..51aff806e3 100644 --- a/services/web/frontend/js/features/contact-form/index.js +++ b/services/web/frontend/js/features/contact-form/index.js @@ -23,7 +23,7 @@ document }) document.querySelectorAll('[data-ol-contact-form]').forEach(el => { - el.addEventListener('submit', function (e) { + el.addEventListener('submit', function () { const emailValue = document.querySelector( '[data-ol-contact-form-email-input]' ).value diff --git a/services/web/frontend/js/features/contact-form/search.js b/services/web/frontend/js/features/contact-form/search.js index 10e2ab2f63..1787a068be 100644 --- a/services/web/frontend/js/features/contact-form/search.js +++ b/services/web/frontend/js/features/contact-form/search.js @@ -47,8 +47,9 @@ export function setupSearch(formEl) { const iconEl = document.createElement('i') iconEl.className = 'material-symbols dropdown-item-trailing-icon' - iconEl.innerText = 'open_in_new' + iconEl.textContent = 'open_in_new' iconEl.setAttribute('aria-hidden', 'true') + iconEl.translate = false linkEl.append(iconEl) resultsEl.append(liEl) diff --git a/services/web/frontend/js/features/editor-left-menu/context/project-settings-context.tsx b/services/web/frontend/js/features/editor-left-menu/context/project-settings-context.tsx index e40c4c6872..e5cd576ba1 100644 --- a/services/web/frontend/js/features/editor-left-menu/context/project-settings-context.tsx +++ b/services/web/frontend/js/features/editor-left-menu/context/project-settings-context.tsx @@ -27,6 +27,7 @@ type ProjectSettingsSetterContextValue = { setLineHeight: (lineHeight: UserSettings['lineHeight']) => void setPdfViewer: (pdfViewer: UserSettings['pdfViewer']) => void setMathPreview: (mathPreview: UserSettings['mathPreview']) => void + setBreadcrumbs: (breadcrumbs: UserSettings['breadcrumbs']) => void } type ProjectSettingsContextValue = Partial & @@ -74,6 +75,8 @@ export const ProjectSettingsProvider: FC = ({ setPdfViewer, mathPreview, setMathPreview, + breadcrumbs, + setBreadcrumbs, } = useUserWideSettings() useProjectWideSettingsSocketListener() @@ -110,6 +113,8 @@ export const ProjectSettingsProvider: FC = ({ setPdfViewer, mathPreview, setMathPreview, + breadcrumbs, + setBreadcrumbs, }), [ compiler, @@ -142,6 +147,8 @@ export const ProjectSettingsProvider: FC = ({ setPdfViewer, mathPreview, setMathPreview, + breadcrumbs, + setBreadcrumbs, ] ) diff --git a/services/web/frontend/js/features/editor-left-menu/hooks/use-root-doc-id.tsx b/services/web/frontend/js/features/editor-left-menu/hooks/use-root-doc-id.tsx index ca2e85841f..8a704f87ac 100644 --- a/services/web/frontend/js/features/editor-left-menu/hooks/use-root-doc-id.tsx +++ b/services/web/frontend/js/features/editor-left-menu/hooks/use-root-doc-id.tsx @@ -6,7 +6,7 @@ import useSaveProjectSettings from './use-save-project-settings' export default function useRootDocId() { const [rootDocId] = - useScopeValue('project.rootDoc_id') + useScopeValue('project.rootDocId') const { permissionsLevel } = useEditorContext() const saveProjectSettings = useSaveProjectSettings() diff --git a/services/web/frontend/js/features/editor-left-menu/hooks/use-save-project-settings.tsx b/services/web/frontend/js/features/editor-left-menu/hooks/use-save-project-settings.tsx index 07a20a10fa..f34c506708 100644 --- a/services/web/frontend/js/features/editor-left-menu/hooks/use-save-project-settings.tsx +++ b/services/web/frontend/js/features/editor-left-menu/hooks/use-save-project-settings.tsx @@ -19,13 +19,7 @@ export default function useSaveProjectSettings() { await saveProjectSettings(projectId, { [key]: newSetting, }) - - // rootDocId is used in our tsx and our endpoint, but rootDoc_id is used in our project $scope, etc - // as we use both namings in many files, and convert back and forth, - // its complicated to seperate and choose one name for all usages - // todo: make rootDocId or rootDoc_id consistent, and remove need for this/ other conversions - const settingsKey = key === 'rootDocId' ? 'rootDoc_id' : key - setProjectSettings({ ...projectSettings, [settingsKey]: newSetting }) + setProjectSettings({ ...projectSettings, [key]: newSetting }) } } } diff --git a/services/web/frontend/js/features/editor-left-menu/hooks/use-user-wide-settings.tsx b/services/web/frontend/js/features/editor-left-menu/hooks/use-user-wide-settings.tsx index 70202c9446..978148721a 100644 --- a/services/web/frontend/js/features/editor-left-menu/hooks/use-user-wide-settings.tsx +++ b/services/web/frontend/js/features/editor-left-menu/hooks/use-user-wide-settings.tsx @@ -20,6 +20,7 @@ export default function useUserWideSettings() { lineHeight, pdfViewer, mathPreview, + breadcrumbs, } = userSettings const setOverallTheme = useSetOverallTheme() @@ -93,6 +94,13 @@ export default function useUserWideSettings() { [saveUserSettings] ) + const setBreadcrumbs = useCallback( + (breadcrumbs: UserSettings['breadcrumbs']) => { + saveUserSettings('breadcrumbs', breadcrumbs) + }, + [saveUserSettings] + ) + return { autoComplete, setAutoComplete, @@ -116,5 +124,7 @@ export default function useUserWideSettings() { setPdfViewer, mathPreview, setMathPreview, + breadcrumbs, + setBreadcrumbs, } } diff --git a/services/web/frontend/js/features/editor-navigation-toolbar/components/toolbar-header.tsx b/services/web/frontend/js/features/editor-navigation-toolbar/components/toolbar-header.tsx index 4304768c48..87bcbc0aac 100644 --- a/services/web/frontend/js/features/editor-navigation-toolbar/components/toolbar-header.tsx +++ b/services/web/frontend/js/features/editor-navigation-toolbar/components/toolbar-header.tsx @@ -80,7 +80,7 @@ const ToolbarHeader = React.memo(function ToolbarHeader({ openShareModal: () => void trackChangesVisible: boolean | undefined }) { - const chatEnabled = getMeta('ol-chatEnabled') + const chatEnabled = getMeta('ol-capabilities')?.includes('chat') const { t } = useTranslation() const shouldDisplayPublishButton = hasPublishPermissions && PublishButton diff --git a/services/web/frontend/js/features/event-tracking/search-events.ts b/services/web/frontend/js/features/event-tracking/search-events.ts index cd9ff4b8ba..630d07aeaa 100644 --- a/services/web/frontend/js/features/event-tracking/search-events.ts +++ b/services/web/frontend/js/features/event-tracking/search-events.ts @@ -6,7 +6,7 @@ type SearchEventSegmentation = { searchType: 'full-project' } & ( | { method: 'keyboard' } - | { method: 'button'; location: 'toolbar' | 'search-form' } + | { method: 'button'; location: 'toolbar' | 'search-form' | 'rail' } )) | ({ searchType: 'document' diff --git a/services/web/frontend/js/features/file-tree/components/file-tree-create/modes/file-tree-upload-doc.tsx b/services/web/frontend/js/features/file-tree/components/file-tree-create/modes/file-tree-upload-doc.tsx index 2ffd591032..909e1a1962 100644 --- a/services/web/frontend/js/features/file-tree/components/file-tree-create/modes/file-tree-upload-doc.tsx +++ b/services/web/frontend/js/features/file-tree/components/file-tree-create/modes/file-tree-upload-doc.tsx @@ -176,7 +176,6 @@ export default function FileTreeUploadDoc() { // close the modal when all the uploads completed successfully .on('complete', result => { if (!result.failed.length) { - // $scope.$emit('done', { name: name }) cancel() } }) diff --git a/services/web/frontend/js/features/form-helpers/create-icon.js b/services/web/frontend/js/features/form-helpers/create-icon.js new file mode 100644 index 0000000000..13b2a04bf3 --- /dev/null +++ b/services/web/frontend/js/features/form-helpers/create-icon.js @@ -0,0 +1,8 @@ +export default function createIcon(type) { + const icon = document.createElement('span') + icon.className = 'material-symbols' + icon.setAttribute('aria-hidden', 'true') + icon.setAttribute('translate', 'no') + icon.textContent = type + return icon +} diff --git a/services/web/frontend/js/features/form-helpers/hydrate-form.js b/services/web/frontend/js/features/form-helpers/hydrate-form.js index ed7b9fc26e..89bd1a657d 100644 --- a/services/web/frontend/js/features/form-helpers/hydrate-form.js +++ b/services/web/frontend/js/features/form-helpers/hydrate-form.js @@ -4,6 +4,7 @@ import { canSkipCaptcha, validateCaptchaV2 } from './captcha' import inputValidator from './input-validator' import { disableElement, enableElement } from '../utils/disableElement' import { isBootstrap5 } from '@/features/utils/bootstrap-5' +import createIcon from '@/features/form-helpers/create-icon' // Form helper(s) to handle: // - Attaching to the relevant form elements @@ -164,10 +165,7 @@ function createNotificationFromMessageBS5(message) { if (materialIcon) { const iconEl = document.createElement('div') iconEl.className = 'notification-icon' - const iconSpan = document.createElement('span') - iconSpan.className = 'material-symbols' - iconSpan.setAttribute('aria-hidden', 'true') - iconSpan.textContent = materialIcon + const iconSpan = createIcon(materialIcon) iconEl.append(iconSpan) messageEl.append(iconEl) } @@ -315,10 +313,9 @@ function showMessagesNewStyle(formEl, messageBag) { } // create the left icon - const icon = document.createElement('span') - icon.className = 'material-symbols' - icon.setAttribute('aria-hidden', 'true') - icon.innerText = message.type === 'error' ? 'error' : 'check_circle' + const icon = createIcon( + message.type === 'error' ? 'error' : 'check_circle' + ) const messageIcon = document.createElement('div') messageIcon.className = 'notification-icon' messageIcon.appendChild(icon) diff --git a/services/web/frontend/js/features/form-helpers/input-validator.js b/services/web/frontend/js/features/form-helpers/input-validator.js index 411c6c0e83..f01c4af3da 100644 --- a/services/web/frontend/js/features/form-helpers/input-validator.js +++ b/services/web/frontend/js/features/form-helpers/input-validator.js @@ -1,9 +1,25 @@ +import { isBootstrap5 } from '@/features/utils/bootstrap-5' +import createIcon from '@/features/form-helpers/create-icon' + export default function inputValidator(inputEl) { const messageEl = document.createElement('div') messageEl.className = inputEl.getAttribute('data-ol-validation-message-classes') || - 'small text-danger mt-2' + 'small text-danger mt-2 form-text' messageEl.hidden = true + + const messageInnerEl = messageEl.appendChild(document.createElement('span')) + messageInnerEl.className = 'form-text-inner' + + const messageTextNode = document.createTextNode('') + + // In Bootstrap 5, add an icon + if (isBootstrap5()) { + const iconEl = createIcon('error') + messageInnerEl.append(iconEl) + } + messageInnerEl.append(messageTextNode) + inputEl.insertAdjacentElement('afterend', messageEl) // Hide messages until the user leaves the input field or submits the form. @@ -54,7 +70,7 @@ export default function inputValidator(inputEl) { // Require another blur before displaying errors again. canDisplayErrorMessages = false } else { - messageEl.textContent = inputEl.validationMessage + messageTextNode.data = inputEl.validationMessage messageEl.hidden = false } } diff --git a/services/web/frontend/js/features/group-management/components/members-table/dropdown-button.tsx b/services/web/frontend/js/features/group-management/components/members-table/dropdown-button.tsx index bd3b5ee10e..9e7038363a 100644 --- a/services/web/frontend/js/features/group-management/components/members-table/dropdown-button.tsx +++ b/services/web/frontend/js/features/group-management/components/members-table/dropdown-button.tsx @@ -20,6 +20,7 @@ import getMeta from '@/utils/meta' import MaterialIcon from '@/shared/components/material-icon' import DropdownListItem from '@/features/ui/components/bootstrap-5/dropdown-list-item' import { Spinner } from 'react-bootstrap' +import { sendMB } from '@/infrastructure/event-tracking' type resendInviteResponse = { success: boolean @@ -28,6 +29,7 @@ type resendInviteResponse = { type ManagedUserDropdownButtonProps = { user: User openOffboardingModalForUser: (user: User) => void + openRemoveModalForUser: (user: User) => void openUnlinkUserModal: (user: User) => void groupId: string setGroupUserAlert: Dispatch> @@ -36,6 +38,7 @@ type ManagedUserDropdownButtonProps = { export default function DropdownButton({ user, openOffboardingModalForUser, + openRemoveModalForUser, openUnlinkUserModal, groupId, setGroupUserAlert, @@ -57,7 +60,8 @@ export default function DropdownButton({ const managedUsersActive = getMeta('ol-managedUsersActive') const groupSSOActive = getMeta('ol-groupSSOActive') - + const userId = getMeta('ol-user_id') + const isUserGroupManager = getMeta('ol-isUserGroupManager') const userPending = user.invite const isGroupSSOLinked = !userPending && user.enrollment?.sso?.some(sso => sso.groupId === groupId) @@ -169,9 +173,15 @@ export default function DropdownButton({ } const onDeleteUserClick = () => { + sendMB('delete-managed-user-selected') openOffboardingModalForUser(user) } + const onReleaseUserClick = () => { + sendMB('remove-managed-user-selected') + openRemoveModalForUser(user) + } + const onRemoveFromGroup = () => { removeMember(user) } @@ -229,10 +239,13 @@ export default function DropdownButton({ ) } - if (isUserManaged && !user.isEntityAdmin) { + if ( + isUserManaged && + !user.isEntityAdmin && + (!isUserGroupManager || userId !== user._id) + ) { buttons.push( ) + buttons.push( + + {t('remove_user')} + + ) } else if (!isUserManaged) { buttons.push( {t('remove_from_group')} @@ -256,7 +277,7 @@ export default function DropdownButton({ if (buttons.length === 0) { buttons.push( - + void + openRemoveModalForUser: (user: User) => void openUnlinkUserModal: (user: User) => void groupId: string setGroupUserAlert: Dispatch> @@ -24,6 +25,7 @@ type ManagedUserRowProps = { export default function MemberRow({ user, openOffboardingModalForUser, + openRemoveModalForUser, openUnlinkUserModal, setGroupUserAlert, groupId, @@ -112,6 +114,7 @@ export default function MemberRow({ ( undefined ) + const [userToRemove, setUserToRemove] = useState(undefined) const [groupUserAlert, setGroupUserAlert] = useState(undefined) const [userToUnlink, setUserToUnlink] = useState(undefined) @@ -101,6 +103,7 @@ export default function MembersList({ groupId }: ManagedUsersListProps) { key={user.email} user={user} openOffboardingModalForUser={setUserToOffboard} + openRemoveModalForUser={setUserToRemove} openUnlinkUserModal={setUserToUnlink} setGroupUserAlert={setGroupUserAlert} groupId={groupId} @@ -116,6 +119,13 @@ export default function MembersList({ groupId }: ManagedUsersListProps) { onClose={() => setUserToOffboard(undefined)} /> )} + {userToRemove && ( + setUserToRemove(undefined)} + /> + )} {userToUnlink && ( { + const handleDeleteUserSubmit = (event: React.FormEvent) => { event.preventDefault() + sendMB('delete-managed-user-confirmed') runAsync( postJSON(`/manage/groups/${groupId}/offboardManagedUser/${user._id}`, { body: { diff --git a/services/web/frontend/js/features/group-management/components/members-table/remove-managed-user-modal.tsx b/services/web/frontend/js/features/group-management/components/members-table/remove-managed-user-modal.tsx new file mode 100644 index 0000000000..c3c6f8caa4 --- /dev/null +++ b/services/web/frontend/js/features/group-management/components/members-table/remove-managed-user-modal.tsx @@ -0,0 +1,138 @@ +import { User } from '../../../../../../types/group-management/user' +import { useState } from 'react' +import useAsync from '@/shared/hooks/use-async' +import { useTranslation, Trans } from 'react-i18next' +import { useLocation } from '@/shared/hooks/use-location' +import { FetchError, postJSON } from '@/infrastructure/fetch-json' +import { debugConsole } from '@/utils/debugging' +import OLModal, { + OLModalBody, + OLModalFooter, + OLModalHeader, + OLModalTitle, +} from '@/features/ui/components/ol/ol-modal' +import OLFormGroup from '@/features/ui/components/ol/ol-form-group' +import OLButton from '@/features/ui/components/ol/ol-button' +import OLNotification from '@/features/ui/components/ol/ol-notification' +import OLFormControl from '@/features/ui/components/ol/ol-form-control' +import OLFormLabel from '@/features/ui/components/ol/ol-form-label' +import { sendMB } from '@/infrastructure/event-tracking' + +type RemoveManagedUserModalProps = { + user: User + groupId: string + onClose: () => void +} + +export default function RemoveManagedUserModal({ + user, + groupId, + onClose, +}: RemoveManagedUserModalProps) { + const { t } = useTranslation() + const location = useLocation() + const { isLoading, isSuccess, error, setError, runAsync } = useAsync< + any, + any + >() + const [suppliedEmail, setSuppliedEmail] = useState() + const shouldEnableRemoveUserButton = suppliedEmail === user.email + const userFullName = user.last_name + ? `${user.first_name || ''} ${user.last_name || ''}` + : user.first_name + + const handleReleaseUserSubmit = (event: React.FormEvent) => { + event.preventDefault() + sendMB('remove-managed-user-confirmed') + runAsync( + postJSON(`/manage/groups/${groupId}/release-managed-user/${user._id}`, { + body: { + verificationEmail: suppliedEmail, + }, + }) + .then(() => { + location.reload() + }) + .catch(err => { + setError( + err instanceof FetchError ? err.getUserFacingMessage() : err.message + ) + debugConsole.error(err) + }) + ) + } + + return ( + +
+ + {t('remove_user')} + + +

+ {t('about_to_remove_user_preamble', { + userName: userFullName, + userEmail: user.email, + })} +

+
    +
  • {t('they_will_be_removed_from_the_group')}
  • +
  • {t('they_will_no_longer_be_a_managed_user')}
  • +
  • + {t('they_will_retain_their_existing_account_on_the_free_plan')} +
  • +
  • + {t( + 'they_will_retain_ownership_of_projects_currently_owned_by_them_and_collaborators_will_become_read_only' + )} +
  • +
  • + {t( + 'they_will_continue_to_have_access_to_any_projects_shared_with_them' + )} +
  • +
  • + {t( + 'they_wont_be_able_to_log_in_with_sso_they_will_need_to_set_password' + )} +
  • +
+

+ ]} // eslint-disable-line react/jsx-key + /> +

+ + + {t('confirm_remove_user_type_email_address', { + userName: userFullName, + })} + + setSuppliedEmail(e.target.value)} + /> + + {error && ( + + )} +
+ + + {t('cancel')} + + + {t('remove_user')} + + +
+
+ ) +} diff --git a/services/web/frontend/js/features/history/components/change-list/dropdown/actions-dropdown.tsx b/services/web/frontend/js/features/history/components/change-list/dropdown/actions-dropdown.tsx index 9029260057..df31a6c58f 100644 --- a/services/web/frontend/js/features/history/components/change-list/dropdown/actions-dropdown.tsx +++ b/services/web/frontend/js/features/history/components/change-list/dropdown/actions-dropdown.tsx @@ -2,20 +2,21 @@ import React, { ReactNode } from 'react' import { Dropdown, DropdownMenu, + DropdownToggle, } from '@/features/ui/components/bootstrap-5/dropdown-menu' -import DropdownToggleWithTooltip from '@/features/ui/components/bootstrap-5/dropdown-toggle-with-tooltip' +import OLTooltip from '@/features/ui/components/ol/ol-tooltip' type ActionDropdownProps = { id: string children: React.ReactNode isOpened: boolean iconTag: ReactNode - toolTipDescription: string + tooltipDescription: string setIsOpened: (isOpened: boolean) => void } function ActionsDropdown(props: ActionDropdownProps) { - const { id, children, isOpened, iconTag, setIsOpened, toolTipDescription } = + const { id, children, isOpened, iconTag, setIsOpened, tooltipDescription } = props return ( setIsOpened(open)} > - + {/* OverlayTrigger won't fire unless the child is a non-react html element (e.g div, span) */} + + + {iconTag} + + + {children} diff --git a/services/web/frontend/js/features/history/components/change-list/dropdown/compare-version-dropdown.tsx b/services/web/frontend/js/features/history/components/change-list/dropdown/compare-version-dropdown.tsx index 91f0bf991a..11967e3302 100644 --- a/services/web/frontend/js/features/history/components/change-list/dropdown/compare-version-dropdown.tsx +++ b/services/web/frontend/js/features/history/components/change-list/dropdown/compare-version-dropdown.tsx @@ -21,7 +21,7 @@ function CompareVersionDropdown({ id={id} isOpened={isOpened} setIsOpened={setIsOpened} - toolTipDescription={t('compare')} + tooltipDescription={t('compare')} iconTag={ {permissions.labelVersion && ( - + )} diff --git a/services/web/frontend/js/features/history/components/change-list/dropdown/menu-item/add-label.tsx b/services/web/frontend/js/features/history/components/change-list/dropdown/menu-item/add-label.tsx index 882bb9a439..1381b620d4 100644 --- a/services/web/frontend/js/features/history/components/change-list/dropdown/menu-item/add-label.tsx +++ b/services/web/frontend/js/features/history/components/change-list/dropdown/menu-item/add-label.tsx @@ -4,18 +4,12 @@ import OLDropdownMenuItem from '@/features/ui/components/ol/ol-dropdown-menu-ite import OLTagIcon from '@/features/ui/components/ol/icons/ol-tag-icon' import AddLabelModal from '../../add-label-modal' -type DownloadProps = { - projectId: string +type AddLabelProps = { version: number closeDropdown: () => void } -function AddLabel({ - version, - projectId, - closeDropdown, - ...props -}: DownloadProps) { +function AddLabel({ version, closeDropdown, ...props }: AddLabelProps) { const { t } = useTranslation() const [showModal, setShowModal] = useState(false) diff --git a/services/web/frontend/js/features/history/components/change-list/dropdown/menu-item/compare-items.tsx b/services/web/frontend/js/features/history/components/change-list/dropdown/menu-item/compare-items.tsx index 78ba0aae75..dd236ed98b 100644 --- a/services/web/frontend/js/features/history/components/change-list/dropdown/menu-item/compare-items.tsx +++ b/services/web/frontend/js/features/history/components/change-list/dropdown/menu-item/compare-items.tsx @@ -34,7 +34,7 @@ function CompareItems({ toVTimestamp: selRange.toVTimestamp, }} closeDropdown={closeDropdown} - toolTipDescription={t('history_compare_from_this_version')} + tooltipDescription={t('history_compare_from_this_version')} icon={ void } function Compare({ comparisonRange, closeDropdown, - toolTipDescription, + tooltipDescription, icon, }: CompareProps) { const { setSelection } = useHistoryContext() @@ -32,12 +32,12 @@ function Compare({ return ( diff --git a/services/web/frontend/js/features/history/components/file-tree/history-file-tree-doc.tsx b/services/web/frontend/js/features/history/components/file-tree/history-file-tree-doc.tsx index 3b788eb046..e3543ef527 100644 --- a/services/web/frontend/js/features/history/components/file-tree/history-file-tree-doc.tsx +++ b/services/web/frontend/js/features/history/components/file-tree/history-file-tree-doc.tsx @@ -1,9 +1,12 @@ import { memo } from 'react' import classNames from 'classnames' import HistoryFileTreeItem from './history-file-tree-item' -import iconTypeFromName from '../../../file-tree/util/icon-type-from-name' +import iconTypeFromName, { + newEditorIconTypeFromName, +} from '../../../file-tree/util/icon-type-from-name' import type { FileDiff } from '../../services/types/file' import MaterialIcon from '@/shared/components/material-icon' +import { useIsNewEditorEnabled } from '@/features/ide-redesign/utils/new-editor-utils' type HistoryFileTreeDocProps = { file: FileDiff @@ -20,6 +23,16 @@ function HistoryFileTreeDoc({ onClick, onKeyDown, }: HistoryFileTreeDocProps) { + const newEditor = useIsNewEditorEnabled() + const icon = newEditor ? ( + + ) : ( + + ) return (
  • - } + icons={icon} />
  • ) diff --git a/services/web/frontend/js/features/history/components/file-tree/history-file-tree-folder.tsx b/services/web/frontend/js/features/history/components/file-tree/history-file-tree-folder.tsx index 6c2c912f8c..44cb7f2921 100644 --- a/services/web/frontend/js/features/history/components/file-tree/history-file-tree-folder.tsx +++ b/services/web/frontend/js/features/history/components/file-tree/history-file-tree-folder.tsx @@ -6,6 +6,7 @@ import HistoryFileTreeFolderList from './history-file-tree-folder-list' import type { HistoryDoc, HistoryFileTree } from '../../utils/file-tree' import MaterialIcon from '@/shared/components/material-icon' +import { useIsNewEditorEnabled } from '@/features/ide-redesign/utils/new-editor-utils' type HistoryFileTreeFolderProps = { name: string @@ -35,6 +36,7 @@ function HistoryFileTreeFolder({ docs, }: HistoryFileTreeFolderProps) { const { t } = useTranslation() + const newEditor = useIsNewEditorEnabled() const [expanded, setExpanded] = useState(() => { return hasChanges({ name, folders, docs }) @@ -52,10 +54,12 @@ function HistoryFileTreeFolder({ className="file-tree-expand-icon" /> - + {!newEditor && ( + + )} ) @@ -79,7 +83,11 @@ function HistoryFileTreeFolder({ {expanded ? ( - + ) : null} ) diff --git a/services/web/frontend/js/features/history/extensions/highlights.ts b/services/web/frontend/js/features/history/extensions/highlights.ts index ce274cf724..1f81f82e74 100644 --- a/services/web/frontend/js/features/history/extensions/highlights.ts +++ b/services/web/frontend/js/features/history/extensions/highlights.ts @@ -238,7 +238,7 @@ class EmptyLineAdditionMarkerWidget extends WidgetType { super() } - toDOM(view: EditorView): HTMLElement { + toDOM(): HTMLElement { const element = document.createElement('span') element.classList.add( 'ol-cm-empty-line-addition-marker', @@ -255,7 +255,7 @@ class EmptyLineDeletionMarkerWidget extends WidgetType { super() } - toDOM(view: EditorView): HTMLElement { + toDOM(): HTMLElement { const element = document.createElement('span') element.classList.add( 'ol-cm-empty-line-deletion-marker', @@ -297,7 +297,7 @@ class ChangeGutterMarker extends GutterMarker { super() } - toDOM(view: EditorView) { + toDOM() { const el = document.createElement('div') el.className = 'ol-cm-changed-line-gutter' el.style.setProperty('--hue', this.hue.toString()) diff --git a/services/web/frontend/js/features/ide-react/components/layout/main-layout.tsx b/services/web/frontend/js/features/ide-react/components/layout/main-layout.tsx index b0a65e12bb..93382d613a 100644 --- a/services/web/frontend/js/features/ide-react/components/layout/main-layout.tsx +++ b/services/web/frontend/js/features/ide-react/components/layout/main-layout.tsx @@ -47,7 +47,8 @@ export const MainLayout: FC = () => { handlePaneExpand: handleChatExpand, } = useChatPane() - const chatEnabled = getMeta('ol-chatEnabled') && !isRestrictedTokenMember + const chatEnabled = + getMeta('ol-capabilities')?.includes('chat') && !isRestrictedTokenMember const { t } = useTranslation() diff --git a/services/web/frontend/js/features/ide-react/context/command-registry-context.tsx b/services/web/frontend/js/features/ide-react/context/command-registry-context.tsx index e8bec19b8b..ff54c21f2a 100644 --- a/services/web/frontend/js/features/ide-react/context/command-registry-context.tsx +++ b/services/web/frontend/js/features/ide-react/context/command-registry-context.tsx @@ -1,4 +1,11 @@ -import { createContext, useCallback, useContext, useState } from 'react' +import { isMac } from '@/shared/utils/os' +import { + createContext, + useCallback, + useContext, + useMemo, + useState, +} from 'react' type CommandInvocationContext = { location?: string @@ -10,17 +17,21 @@ export type Command = { handler?: (context: CommandInvocationContext) => void href?: string disabled?: boolean - // TODO: Keybinding? } const CommandRegistryContext = createContext( undefined ) +export type Shortcut = { key: string } + +export type Shortcuts = Record + type CommandRegistry = { registry: Map register: (...elements: Command[]) => void unregister: (...id: string[]) => void + shortcuts: Shortcuts } export const CommandRegistryProvider: React.FC = ({ @@ -43,8 +54,35 @@ export const CommandRegistryProvider: React.FC = ({ ) }, []) + // NOTE: This is where we'd add functionality for customising shortcuts. + const shortcuts: Record = useMemo( + () => ({ + undo: [ + { + key: 'Mod-z', + }, + ], + redo: [ + { + key: 'Mod-y', + }, + { + key: 'Mod-Shift-Z', + }, + ], + find: [{ key: 'Mod-f' }], + 'select-all': [{ key: 'Mod-a' }], + 'insert-comment': [{ key: 'Mod-Shift-C' }], + 'format-bold': [{ key: 'Mod-b' }], + 'format-italics': [{ key: 'Mod-i' }], + }), + [] + ) + return ( - + {children} ) @@ -59,3 +97,92 @@ export const useCommandRegistry = (): CommandRegistry => { } return context } + +function parseShortcut(shortcut: Shortcut) { + // Based on KeyBinding type of CodeMirror 6 + let alt = false + let ctrl = false + let shift = false + let meta = false + + let character = null + // isMac ? shortcut.mac : shortcut.key etc. + const shortcutString = shortcut.key ?? '' + const keys = shortcutString.split(/-(?!$)/) ?? [] + + for (let i = 0; i < keys.length; i++) { + const isLast = i === keys.length - 1 + const key = keys[i] + if (!key) { + throw new Error('Empty key in shortcut: ' + shortcutString) + } + if (key === 'Alt' || (!isLast && key === 'a')) { + alt = true + } else if ( + key === 'Ctrl' || + key === 'Control' || + (!isLast && key === 'c') + ) { + ctrl = true + } else if (key === 'Shift' || (!isLast && key === 's')) { + shift = true + } else if (key === 'Meta' || key === 'Cmd' || (!isLast && key === 'm')) { + meta = true + } else if (key === 'Mod') { + if (isMac) { + meta = true + } else { + ctrl = true + } + } else { + if (key === 'Space') { + character = ' ' + } + if (!isLast) { + throw new Error( + 'Character key must be last in shortcut: ' + shortcutString + ) + } + if (key.length !== 1) { + throw new Error(`Invalid key '${key}' in shortcut: ${shortcutString}`) + } + if (character) { + throw new Error('Multiple characters in shortcut: ' + shortcutString) + } + character = key + } + } + if (!character) { + throw new Error('No character in shortcut: ' + shortcutString) + } + + return { + alt, + ctrl, + shift, + meta, + character, + } +} + +export const formatShortcut = (shortcut: Shortcut): string => { + const { alt, ctrl, shift, meta, character } = parseShortcut(shortcut) + + if (isMac) { + return [ + ctrl ? '⌃' : '', + alt ? '⌥' : '', + shift ? '⇧' : '', + meta ? '⌘' : '', + character.toUpperCase(), + ].join('') + } + + return [ + ctrl ? 'Ctrl' : '', + shift ? 'Shift' : '', + meta ? 'Meta' : '', + alt ? 'Alt' : '', + character.toUpperCase(), + ].join(' ') +} diff --git a/services/web/frontend/js/features/ide-react/context/editor-manager-context.tsx b/services/web/frontend/js/features/ide-react/context/editor-manager-context.tsx index e1bb49c39c..e830d7ec1a 100644 --- a/services/web/frontend/js/features/ide-react/context/editor-manager-context.tsx +++ b/services/web/frontend/js/features/ide-react/context/editor-manager-context.tsx @@ -18,6 +18,7 @@ import { useConnectionContext } from '@/features/ide-react/context/connection-co import { debugConsole } from '@/utils/debugging' import { DocumentContainer } from '@/features/ide-react/editor/document-container' import { useLayoutContext } from '@/shared/context/layout-context' +import { useUserContext } from '@/shared/context/user-context' import { GotoLineOptions } from '@/features/ide-react/types/goto-line-options' import { Doc } from '../../../../../types/doc' import { useFileTreeData } from '@/shared/context/file-tree-data-context' @@ -99,6 +100,7 @@ export const EditorManagerProvider: FC = ({ const { view, setView } = useLayoutContext() const { showGenericMessageModal, genericModalVisible, showOutOfSyncModal } = useModalsContext() + const { id: userId } = useUserContext() const [showSymbolPalette, setShowSymbolPalette] = useScopeValue( 'editor.showSymbolPalette' @@ -309,7 +311,7 @@ export const EditorManagerProvider: FC = ({ const tryToggle = () => { const saved = doc.getInflightOp() == null && doc.getPendingOp() == null if (saved) { - doc.setTrackingChanges(want) + doc.setTrackChangesUserId(want ? userId : null) setTrackChanges(want) } else { syncTimeoutRef.current = window.setTimeout(tryToggle, 100) @@ -318,7 +320,7 @@ export const EditorManagerProvider: FC = ({ tryToggle() }, - [setTrackChanges] + [setTrackChanges, userId] ) const doOpenNewDocument = useCallback( diff --git a/services/web/frontend/js/features/ide-react/context/ide-react-context.tsx b/services/web/frontend/js/features/ide-react/context/ide-react-context.tsx index bb3d0c1a3c..51ecbdc6c9 100644 --- a/services/web/frontend/js/features/ide-react/context/ide-react-context.tsx +++ b/services/web/frontend/js/features/ide-react/context/ide-react-context.tsx @@ -16,7 +16,6 @@ import { } from '@/features/ide-react/create-ide-event-emitter' import { JoinProjectPayload } from '@/features/ide-react/connection/join-project-payload' import { useConnectionContext } from '@/features/ide-react/context/connection-context' -import { getMockIde } from '@/shared/context/mock/mock-ide' import { populateEditorScope } from '@/features/ide-react/scope-adapters/editor-manager-context-adapter' import { postJSON } from '@/infrastructure/fetch-json' import { ReactScopeEventEmitter } from '@/features/ide-react/scope-event-emitter/react-scope-event-emitter' @@ -128,10 +127,11 @@ export const IdeReactProvider: FC = ({ children }) => { // Populate scope values when joining project, then fire project:joined event useEffect(() => { function handleJoinProjectResponse({ - project, + project: { rootDoc_id: rootDocId, ..._project }, permissionsLevel, }: JoinProjectPayload) { - scopeStore.set('project', { rootDoc_id: null, ...project }) + const project = { ..._project, rootDocId } + scopeStore.set('project', project) scopeStore.set('permissionsLevel', permissionsLevel) // Make watchers update immediately scopeStore.flushUpdates() @@ -157,11 +157,11 @@ export const IdeReactProvider: FC = ({ children }) => { const ide = useMemo(() => { return { - ...getMockIde(), + _id: projectId, socket, reportError, } - }, [socket, reportError]) + }, [projectId, socket, reportError]) const value = useMemo( () => ({ diff --git a/services/web/frontend/js/features/ide-react/context/online-users-context.tsx b/services/web/frontend/js/features/ide-react/context/online-users-context.tsx index 1dba40e6d7..1195f9ae7c 100644 --- a/services/web/frontend/js/features/ide-react/context/online-users-context.tsx +++ b/services/web/frontend/js/features/ide-react/context/online-users-context.tsx @@ -95,7 +95,7 @@ export const OnlineUsersProvider: FC = ({ for (const [clientId, user] of Object.entries(onlineUsers)) { const decoratedUser = { ...user } const docId = user.doc_id - if (docId) { + if (docId && fileTreeData) { decoratedUser.doc = findDocEntityById(fileTreeData, docId) } diff --git a/services/web/frontend/js/features/ide-react/context/snapshot-context.tsx b/services/web/frontend/js/features/ide-react/context/snapshot-context.tsx index 70f170a8b0..817e03fe86 100644 --- a/services/web/frontend/js/features/ide-react/context/snapshot-context.tsx +++ b/services/web/frontend/js/features/ide-react/context/snapshot-context.tsx @@ -24,10 +24,14 @@ export const StubSnapshotUtils = { throw new Error('not implemented') } }, + // unused vars kept to document the interface + // eslint-disable-next-line @typescript-eslint/no-unused-vars buildFileTree(snapshot: Snapshot): Folder { throw new Error('not implemented') }, - createFolder(_id: string, name: string): Folder { + // unused vars kept to document the interface + // eslint-disable-next-line @typescript-eslint/no-unused-vars + createFolder(id: string, name: string): Folder { throw new Error('not implemented') }, } diff --git a/services/web/frontend/js/features/ide-react/editor/document-container.ts b/services/web/frontend/js/features/ide-react/editor/document-container.ts index fee359f146..28bcb955d1 100644 --- a/services/web/frontend/js/features/ide-react/editor/document-container.ts +++ b/services/web/frontend/js/features/ide-react/editor/document-container.ts @@ -196,9 +196,13 @@ export class DocumentContainer extends EventEmitter { return this.doc?.hasBufferedOps() } - setTrackingChanges(track_changes: boolean) { + setTrackChangesUserId(userId: string | null) { + this.track_changes_as = userId if (this.doc) { - this.doc.track_changes = track_changes + this.doc.setTrackChangesUserId(userId) + } + if (this.cm6) { + this.cm6.setTrackChangesUserId(userId) } } @@ -595,7 +599,7 @@ export class DocumentContainer extends EventEmitter { this.doc.on('remoteop', (...ops: AnyOperation[]) => { return this.trigger('remoteop', ...ops) }) - this.doc.on('op:sent', (op: AnyOperation) => { + this.doc.on('op:sent', () => { return this.trigger('op:sent') }) this.doc.on('op:acknowledged', (op: AnyOperation) => { @@ -605,7 +609,7 @@ export class DocumentContainer extends EventEmitter { }) return this.trigger('op:acknowledged') }) - this.doc.on('op:timeout', (op: AnyOperation) => { + this.doc.on('op:timeout', () => { this.trigger('op:timeout') return this.onError(new Error('op timed out')) }) diff --git a/services/web/frontend/js/features/ide-react/editor/share-js-doc.ts b/services/web/frontend/js/features/ide-react/editor/share-js-doc.ts index 96e866afec..5b362299d2 100644 --- a/services/web/frontend/js/features/ide-react/editor/share-js-doc.ts +++ b/services/web/frontend/js/features/ide-react/editor/share-js-doc.ts @@ -12,17 +12,20 @@ import { Message, ShareJsConnectionState, ShareJsOperation, - ShareJsTextType, TrackChangesIdSeeds, } from '@/features/ide-react/editor/types/document' import { EditorFacade } from '@/features/source-editor/extensions/realtime' import { recordDocumentFirstChangeEvent } from '@/features/event-tracking/document-first-change-event' import getMeta from '@/utils/meta' -import { HistoryOTType } from './share-js-history-ot-type' -import { StringFileData } from 'overleaf-editor-core/index' +import { historyOTType } from './share-js-history-ot-type' +import { + StringFileData, + TrackedChangeList, + EditOperationBuilder, +} from 'overleaf-editor-core' import { - RawEditOperation, StringFileRawData, + RawEditOperation, } from 'overleaf-editor-core/lib/types' // All times below are in milliseconds @@ -68,19 +71,17 @@ export class ShareJsDoc extends EventEmitter { readonly type: OTType = 'sharejs-text-ot' ) { super() - let sharejsType: ShareJsTextType = sharejs.types.text + let sharejsType // Decode any binary bits of data let snapshot: string | StringFileData if (this.type === 'history-ot') { snapshot = StringFileData.fromRaw( docLines as unknown as StringFileRawData ) - sharejsType = new HistoryOTType(snapshot) as ShareJsTextType< - StringFileData, - RawEditOperation[] - > + sharejsType = historyOTType } else { snapshot = docLines.map(line => decodeUtf8(line)).join('\n') + sharejsType = sharejs.types.text } this.connection = { @@ -159,6 +160,18 @@ export class ShareJsDoc extends EventEmitter { this.removeCarriageReturnCharFromShareJsDoc() } + setTrackChangesUserId(userId: string | null) { + this.track_changes = userId != null + } + + getTrackedChanges() { + if (this._doc.otType === 'history-ot') { + return this._doc.snapshot.getTrackedChanges() as TrackedChangeList + } else { + return null + } + } + private removeCarriageReturnCharFromShareJsDoc() { const doc = this._doc let nextPos @@ -253,7 +266,15 @@ export class ShareJsDoc extends EventEmitter { // issues are resolved. processUpdateFromServer(message: Message) { try { - this._doc._onMessage(message) + if (this.type === 'history-ot' && message.op != null) { + const ops = message.op as RawEditOperation[] + this._doc._onMessage({ + ...message, + op: ops.map(EditOperationBuilder.fromJSON), + }) + } else { + this._doc._onMessage(message) + } } catch (error) { // Version mismatches are thrown as errors debugConsole.log(error) diff --git a/services/web/frontend/js/features/ide-react/editor/share-js-history-ot-type.ts b/services/web/frontend/js/features/ide-react/editor/share-js-history-ot-type.ts index cec1983037..81243bb8c7 100644 --- a/services/web/frontend/js/features/ide-react/editor/share-js-history-ot-type.ts +++ b/services/web/frontend/js/features/ide-react/editor/share-js-history-ot-type.ts @@ -1,54 +1,79 @@ -import EventEmitter from '@/utils/EventEmitter' import { - EditOperationBuilder, - InsertOp, - RemoveOp, - RetainOp, + EditOperation, + EditOperationTransformer, StringFileData, - TextOperation, } from 'overleaf-editor-core' -import { RawEditOperation } from 'overleaf-editor-core/lib/types' +import { ShareDoc } from '../../../../../types/share-doc' -function loadTextOperation(raw: RawEditOperation): TextOperation { - const operation = EditOperationBuilder.fromJSON(raw) - if (!(operation instanceof TextOperation)) { - throw new Error(`operation not supported: ${operation.constructor.name}`) - } - return operation +type Api = { + otType: 'history-ot' + trackChangesUserId: string | null + + getText(): string + getLength(): number } -export class HistoryOTType extends EventEmitter { - // stub interface, these are actually on the Doc - api: HistoryOTType - snapshot: StringFileData +const api: Api & ThisType = { + otType: 'history-ot', + trackChangesUserId: null, - constructor(snapshot: StringFileData) { - super() - this.api = this - this.snapshot = snapshot - } + getText() { + return this.snapshot.getContent({ filterTrackedDeletes: true }) + }, - transformX(raw1: RawEditOperation[], raw2: RawEditOperation[]) { - const [a, b] = TextOperation.transform( - loadTextOperation(raw1[0]), - loadTextOperation(raw2[0]) - ) - return [[a.toJSON()], [b.toJSON()]] - } + getLength() { + return this.snapshot.getStringLength() + }, +} - apply(snapshot: StringFileData, rawEditOperation: RawEditOperation[]) { - const operation = loadTextOperation(rawEditOperation[0]) +export const historyOTType = { + api, + + transformX(ops1: EditOperation[], ops2: EditOperation[]) { + // Dynamic programming algorithm: gradually transform both sides in a nested + // loop. + const left = [...ops1] + const right = [...ops2] + for (let i = 0; i < left.length; i++) { + for (let j = 0; j < right.length; j++) { + // At this point: + // left[0..i] is ops1[0..i] rebased over ops2[0..j-1] + // right[0..j] is ops2[0..j] rebased over ops1[0..i-1] + const [a, b] = EditOperationTransformer.transform(left[i], right[j]) + left[i] = a + right[j] = b + } + } + return [left, right] + }, + + apply(snapshot: StringFileData, ops: EditOperation[]) { const afterFile = StringFileData.fromRaw(snapshot.toRaw()) - afterFile.edit(operation) - this.snapshot = afterFile + for (const op of ops) { + afterFile.edit(op) + } return afterFile - } + }, - compose(op1: RawEditOperation[], op2: RawEditOperation[]) { - return [ - loadTextOperation(op1[0]).compose(loadTextOperation(op2[0])).toJSON(), - ] - } + compose(ops1: EditOperation[], ops2: EditOperation[]) { + const ops = [...ops1, ...ops2] + let currentOp = ops.shift() + if (currentOp === undefined) { + // No ops to process + return [] + } + const result = [] + for (const op of ops) { + if (currentOp.canBeComposedWith(op)) { + currentOp = currentOp.compose(op) + } else { + result.push(currentOp) + currentOp = op + } + } + result.push(currentOp) + return result + }, // Do not provide normalize, used by submitOp to fixup bad input. // normalize(op: TextOperation) {} @@ -56,76 +81,4 @@ export class HistoryOTType extends EventEmitter { // Do not provide invert, only needed for reverting a rejected update. // We are displaying an out-of-sync modal when an op is rejected. // invert(op: TextOperation) {} - - // API - insert(pos: number, text: string, fromUndo: boolean) { - const old = this.getText() - const op = new TextOperation() - op.retain(pos) - op.insert(text) - op.retain(old.length - pos) - this.submitOp([op.toJSON()]) - } - - del(pos: number, length: number, fromUndo: boolean) { - const old = this.getText() - const op = new TextOperation() - op.retain(pos) - op.remove(length) - op.retain(old.length - pos - length) - this.submitOp([op.toJSON()]) - } - - getText() { - return this.snapshot.getContent({ filterTrackedDeletes: true }) - } - - getLength() { - return this.getText().length - } - - _register() { - this.on( - 'remoteop', - (rawEditOperation: RawEditOperation[], oldSnapshot: StringFileData) => { - const operation = loadTextOperation(rawEditOperation[0]) - const str = oldSnapshot.getContent() - if (str.length !== operation.baseLength) - throw new TextOperation.ApplyError( - "The operation's base length must be equal to the string's length.", - operation, - str - ) - - let outputCursor = 0 - let inputCursor = 0 - for (const op of operation.ops) { - if (op instanceof RetainOp) { - inputCursor += op.length - outputCursor += op.length - } else if (op instanceof InsertOp) { - this.emit('insert', outputCursor, op.insertion, op.insertion.length) - outputCursor += op.insertion.length - } else if (op instanceof RemoveOp) { - this.emit( - 'delete', - outputCursor, - str.slice(inputCursor, inputCursor + op.length) - ) - inputCursor += op.length - } - } - - if (inputCursor !== str.length) - throw new TextOperation.ApplyError( - "The operation didn't operate on the whole string.", - operation, - str - ) - } - ) - } - - // stub-interface, provided by sharejs.Doc - submitOp(op: RawEditOperation[]) {} } diff --git a/services/web/frontend/js/features/ide-react/editor/types/document.ts b/services/web/frontend/js/features/ide-react/editor/types/document.ts index fbed3ab8f1..f6e5f6aebb 100644 --- a/services/web/frontend/js/features/ide-react/editor/types/document.ts +++ b/services/web/frontend/js/features/ide-react/editor/types/document.ts @@ -1,5 +1,6 @@ import { StringFileData } from 'overleaf-editor-core' import { AnyOperation } from '../../../../../../types/change' +import { RawEditOperation } from 'overleaf-editor-core/lib/types' export type Version = number @@ -36,4 +37,5 @@ export type Message = { doc?: string snapshot?: string | StringFileData type?: ShareJsTextType + op?: AnyOperation[] | RawEditOperation[] } diff --git a/services/web/frontend/js/features/ide-react/hooks/use-editing-session-heartbeat.ts b/services/web/frontend/js/features/ide-react/hooks/use-editing-session-heartbeat.ts index d264766d76..cdb0a151ae 100644 --- a/services/web/frontend/js/features/ide-react/hooks/use-editing-session-heartbeat.ts +++ b/services/web/frontend/js/features/ide-react/hooks/use-editing-session-heartbeat.ts @@ -6,10 +6,15 @@ import { debugConsole } from '@/utils/debugging' import { useCallback, useEffect, useRef } from 'react' import useEventListener from '@/shared/hooks/use-event-listener' import useDomEventListener from '@/shared/hooks/use-dom-event-listener' +import { useIsNewEditorEnabled } from '@/features/ide-redesign/utils/new-editor-utils' -function createEditingSessionHeartbeatData(editorType: EditorType) { +function createEditingSessionHeartbeatData( + editorType: EditorType, + newEditor: boolean +) { return { editorType, + editorRedesign: newEditor, } } @@ -25,6 +30,7 @@ function sendEditingSessionHeartbeat( export function useEditingSessionHeartbeat() { const { projectId } = useIdeReactContext() const { getEditorType } = useEditorManagerContext() + const newEditor = useIsNewEditorEnabled() // Keep track of how many heartbeats we've sent so that we can calculate how // long to wait until the next one @@ -51,7 +57,10 @@ export function useEditingSessionHeartbeat() { heartBeatSentRecentlyRef.current = true - const segmentation = createEditingSessionHeartbeatData(editorType) + const segmentation = createEditingSessionHeartbeatData( + editorType, + newEditor + ) debugConsole.log('[Event] send heartbeat request', segmentation) sendEditingSessionHeartbeat(projectId, segmentation) @@ -71,7 +80,7 @@ export function useEditingSessionHeartbeat() { heartBeatResetTimerRef.current = window.setTimeout(() => { heartBeatSentRecentlyRef.current = false }, backoffSecs * 1000) - }, [getEditorType, projectId]) + }, [getEditorType, projectId, newEditor]) // Hook the heartbeat up to editor events useEventListener('cursor:editor:update', editingSessionHeartbeat) diff --git a/services/web/frontend/js/features/ide-redesign/components/breadcrumbs.tsx b/services/web/frontend/js/features/ide-redesign/components/breadcrumbs.tsx index 455df85d7f..9949b98c7f 100644 --- a/services/web/frontend/js/features/ide-redesign/components/breadcrumbs.tsx +++ b/services/web/frontend/js/features/ide-redesign/components/breadcrumbs.tsx @@ -1,4 +1,7 @@ -import { findInTreeOrThrow } from '@/features/file-tree/util/find-in-tree' +import { + findInTree, + findInTreeOrThrow, +} from '@/features/file-tree/util/find-in-tree' import { useFileTreeOpenContext } from '@/features/ide-react/context/file-tree-open-context' import { useOutlineContext } from '@/features/ide-react/context/outline-context' import useNestedOutline from '@/features/outline/hooks/use-nested-outline' @@ -39,26 +42,41 @@ export default function Breadcrumbs() { const { highlightedLine, canShowOutline } = useOutlineContext() const folderHierarchy = useMemo(() => { - if (!openEntity || !fileTreeData) { + if (openEntity?.type !== 'doc' || !fileTreeData) { return [] } - return openEntity.path - .filter(id => id !== fileTreeData._id) // Filter out the root folder - .map(id => { - return findInTreeOrThrow(fileTreeData, id)?.entity - }) + try { + return openEntity.path + .filter(id => id !== fileTreeData._id) // Filter out the root folder + .map(id => { + return findInTreeOrThrow(fileTreeData, id)?.entity + }) + } catch { + // If any of the folders in the path are not found, the entire hierarchy + // is invalid. + return [] + } }, [openEntity, fileTreeData]) + const fileName = useMemo(() => { + // NOTE: openEntity.entity.name may not always be accurate, so we read it + // from the file tree data instead. + if (openEntity?.type !== 'doc' || !fileTreeData) { + return undefined + } + return findInTree(fileTreeData, openEntity.entity._id)?.entity.name + }, [fileTreeData, openEntity]) + const outlineHierarchy = useMemo(() => { - if (!canShowOutline || !outline) { + if (openEntity?.type !== 'doc' || !canShowOutline || !outline) { return [] } return constructOutlineHierarchy(outline.items, highlightedLine) - }, [outline, highlightedLine, canShowOutline]) + }, [outline, highlightedLine, canShowOutline, openEntity]) - if (!openEntity || !fileTreeData) { + if (openEntity?.type !== 'doc' || !fileTreeData) { return null } @@ -73,7 +91,7 @@ export default function Breadcrumbs() { ))} -
    {openEntity.entity.name}
    +
    {fileName}
    {numOutlineItems > 0 && } {outlineHierarchy.map((section, idx) => ( diff --git a/services/web/frontend/js/features/ide-redesign/components/chat/chat.tsx b/services/web/frontend/js/features/ide-redesign/components/chat/chat.tsx index 9ebe33e065..54d098c6c8 100644 --- a/services/web/frontend/js/features/ide-redesign/components/chat/chat.tsx +++ b/services/web/frontend/js/features/ide-redesign/components/chat/chat.tsx @@ -9,8 +9,8 @@ import { useUserContext } from '@/shared/context/user-context' import { lazy, Suspense, useEffect } from 'react' import { useTranslation } from 'react-i18next' import classNames from 'classnames' -import { RailPanelHeader } from '../rail' import { RailIndicator } from '../rail-indicator' +import RailPanelHeader from '../rail-panel-header' const MessageList = lazy(() => import('../../../chat/components/message-list')) diff --git a/services/web/frontend/js/features/ide-redesign/components/errors.tsx b/services/web/frontend/js/features/ide-redesign/components/error-logs/error-indicator.tsx similarity index 56% rename from services/web/frontend/js/features/ide-redesign/components/errors.tsx rename to services/web/frontend/js/features/ide-redesign/components/error-logs/error-indicator.tsx index 2313022d3c..7b721a1d51 100644 --- a/services/web/frontend/js/features/ide-redesign/components/errors.tsx +++ b/services/web/frontend/js/features/ide-redesign/components/error-logs/error-indicator.tsx @@ -1,9 +1,7 @@ -import PdfLogsViewer from '@/features/pdf-preview/components/pdf-logs-viewer' -import { PdfPreviewProvider } from '@/features/pdf-preview/components/pdf-preview-provider' import { useDetachCompileContext as useCompileContext } from '@/shared/context/detach-compile-context' -import { RailIndicator } from './rail-indicator' +import { RailIndicator } from '../rail-indicator' -export const ErrorIndicator = () => { +export default function ErrorIndicator() { const { logEntries } = useCompileContext() if (!logEntries) { @@ -25,11 +23,3 @@ export const ErrorIndicator = () => { /> ) } - -export const ErrorPane = () => { - return ( - - - - ) -} diff --git a/services/web/frontend/js/features/ide-redesign/components/error-logs/error-logs-header.tsx b/services/web/frontend/js/features/ide-redesign/components/error-logs/error-logs-header.tsx new file mode 100644 index 0000000000..2f3a54b095 --- /dev/null +++ b/services/web/frontend/js/features/ide-redesign/components/error-logs/error-logs-header.tsx @@ -0,0 +1,98 @@ +import { useTranslation } from 'react-i18next' +import RailPanelHeader from '../rail-panel-header' +import OLIconButton from '@/features/ui/components/ol/ol-icon-button' +import { useDetachCompileContext as useCompileContext } from '@/shared/context/detach-compile-context' +import { + Dropdown, + DropdownMenu, + DropdownToggle, +} from '@/features/ui/components/bootstrap-5/dropdown-menu' +import PdfFileList from '@/features/pdf-preview/components/pdf-file-list' +import { forwardRef } from 'react' +import OLTooltip from '@/features/ui/components/ol/ol-tooltip' + +export default function ErrorLogsHeader() { + const { t } = useTranslation() + + return ( + , + , + ]} + /> + ) +} + +const ClearCacheButton = () => { + const { compiling, clearCache, clearingCache } = useCompileContext() + const { t } = useTranslation() + + return ( + + clearCache()} + className="rail-panel-header-button-subdued" + icon="auto_delete" + isLoading={clearingCache} + disabled={clearingCache || compiling} + accessibilityLabel={t('clear_cached_files')} + size="sm" + /> + + ) +} + +const DownloadFileDropdown = () => { + const { fileList } = useCompileContext() + + const { t } = useTranslation() + + return ( + + + {t('other_logs_and_files')} + + {fileList && ( + + + + )} + + ) +} + +const DownloadFileDropdownToggleButton = forwardRef< + HTMLButtonElement, + { onClick: React.MouseEventHandler } +>(function DownloadFileDropdownToggleButton({ onClick }, ref) { + const { compiling, fileList } = useCompileContext() + const { t } = useTranslation() + + return ( + + + + ) +}) diff --git a/services/web/frontend/js/features/ide-redesign/components/error-logs/error-logs-panel.tsx b/services/web/frontend/js/features/ide-redesign/components/error-logs/error-logs-panel.tsx new file mode 100644 index 0000000000..2cff048256 --- /dev/null +++ b/services/web/frontend/js/features/ide-redesign/components/error-logs/error-logs-panel.tsx @@ -0,0 +1,14 @@ +import { PdfPreviewProvider } from '@/features/pdf-preview/components/pdf-preview-provider' +import ErrorLogs from './error-logs' +import ErrorLogsHeader from './error-logs-header' + +export default function ErrorLogsPanel() { + return ( + +
    + + +
    +
    + ) +} diff --git a/services/web/frontend/js/features/ide-redesign/components/error-logs/error-logs.tsx b/services/web/frontend/js/features/ide-redesign/components/error-logs/error-logs.tsx new file mode 100644 index 0000000000..a6a62e998a --- /dev/null +++ b/services/web/frontend/js/features/ide-redesign/components/error-logs/error-logs.tsx @@ -0,0 +1,142 @@ +import { useTranslation } from 'react-i18next' +import { ElementType, memo, useMemo, useState } from 'react' +import { usePdfPreviewContext } from '@/features/pdf-preview/components/pdf-preview-provider' +import StopOnFirstErrorPrompt from '@/features/pdf-preview/components/stop-on-first-error-prompt' +import PdfPreviewError from '@/features/pdf-preview/components/pdf-preview-error' +import PdfValidationIssue from '@/features/pdf-preview/components/pdf-validation-issue' +import PdfLogsEntries from '@/features/pdf-preview/components/pdf-logs-entries' +import PdfPreviewErrorBoundaryFallback from '@/features/pdf-preview/components/pdf-preview-error-boundary-fallback' +import withErrorBoundary from '@/infrastructure/error-boundary' +import { useDetachCompileContext as useCompileContext } from '@/shared/context/detach-compile-context' +import { Nav, NavLink, TabContainer, TabContent } from 'react-bootstrap' +import { LogEntry as LogEntryData } from '@/features/pdf-preview/util/types' +import LogEntry from './log-entry' +import importOverleafModules from '../../../../../macros/import-overleaf-module.macro' + +const logsComponents: Array<{ + import: { default: ElementType } + path: string +}> = importOverleafModules('errorLogsComponents') + +type ErrorLogTab = { + key: string + label: string + entries: LogEntryData[] | undefined +} + +function ErrorLogs() { + const { error, logEntries, rawLog, validationIssues, stoppedOnFirstError } = + useCompileContext() + + const tabs = useMemo(() => { + return [ + { key: 'all', label: 'All', entries: logEntries?.all }, + { key: 'errors', label: 'Errors', entries: logEntries?.errors }, + { key: 'warnings', label: 'Warnings', entries: logEntries?.warnings }, + { key: 'info', label: 'Info', entries: logEntries?.typesetting }, + ] + }, [logEntries]) + + const { loadingError } = usePdfPreviewContext() + + const { t } = useTranslation() + + const [activeTab, setActiveTab] = useState('all') + + const entries = useMemo(() => { + return tabs.find(tab => tab.key === activeTab)?.entries || [] + }, [activeTab, tabs]) + + const includeErrors = activeTab === 'all' || activeTab === 'errors' + const includeWarnings = activeTab === 'all' || activeTab === 'warnings' + + return ( + + + {logsComponents.map(({ import: { default: Component }, path }) => ( + + ))} + +
    + {stoppedOnFirstError && includeErrors && } + + {loadingError && ( + + )} + + {error && ( + + )} + + {includeErrors && + validationIssues && + Object.entries(validationIssues).map(([name, issue]) => ( + + ))} + + {entries && ( + 0} + /> + )} + + {rawLog && activeTab === 'all' && ( + + )} +
    +
    +
    + ) +} + +function formatErrorNumber(num: number | undefined) { + if (num === undefined) { + return undefined + } + + if (num > 99) { + return '99+' + } + + return Math.floor(num).toString() +} + +const TabHeader = ({ tab, active }: { tab: ErrorLogTab; active: boolean }) => { + return ( + + {tab.label} +
    + {/* TODO: it would be nice if this number included custom errors */} + {formatErrorNumber(tab.entries?.length)} +
    +
    + ) +} + +export default withErrorBoundary(memo(ErrorLogs), () => ( + +)) diff --git a/services/web/frontend/js/features/ide-redesign/components/error-logs/log-entry-header.tsx b/services/web/frontend/js/features/ide-redesign/components/error-logs/log-entry-header.tsx new file mode 100644 index 0000000000..ce43af3744 --- /dev/null +++ b/services/web/frontend/js/features/ide-redesign/components/error-logs/log-entry-header.tsx @@ -0,0 +1,167 @@ +import classNames from 'classnames' +import { useState, useRef, MouseEventHandler, ElementType } from 'react' +import { useTranslation } from 'react-i18next' +import OLTooltip from '@/features/ui/components/ol/ol-tooltip' +import { + ErrorLevel, + SourceLocation, + LogEntry as LogEntryData, +} from '@/features/pdf-preview/util/types' +import useResizeObserver from '@/features/preview/hooks/use-resize-observer' +import OLIconButton from '@/features/ui/components/ol/ol-icon-button' +import importOverleafModules from '../../../../../macros/import-overleaf-module.macro' +import MaterialIcon from '@/shared/components/material-icon' + +const actionComponents = importOverleafModules( + 'pdfLogEntryHeaderActionComponents' +) as { + import: { default: ElementType } + path: string +}[] + +function LogEntryHeader({ + sourceLocation, + level, + headerTitle, + logType, + showSourceLocationLink = true, + onSourceLocationClick, + collapsed, + onToggleCollapsed, + id, + logEntry, + actionButtonsOverride, + openCollapseIconOverride, +}: { + headerTitle: string | React.ReactNode + level: ErrorLevel + logType?: string + sourceLocation?: SourceLocation + showSourceLocationLink?: boolean + onSourceLocationClick?: MouseEventHandler + collapsed: boolean + onToggleCollapsed: () => void + id?: string + logEntry?: LogEntryData + actionButtonsOverride?: React.ReactNode + openCollapseIconOverride?: string +}) { + const { t } = useTranslation() + const logLocationSpanRef = useRef(null) + const [locationSpanOverflown, setLocationSpanOverflown] = useState(false) + + useResizeObserver( + logLocationSpanRef, + locationSpanOverflown, + checkLocationSpanOverflow + ) + + const file = sourceLocation ? sourceLocation.file : null + const line = sourceLocation ? sourceLocation.line : null + const logEntryHeaderTextClasses = classNames('log-entry-header-text', { + 'log-entry-header-text-error': level === 'error', + 'log-entry-header-text-warning': level === 'warning', + 'log-entry-header-text-info': level === 'info' || level === 'typesetting', + 'log-entry-header-text-success': level === 'success', + 'log-entry-header-text-raw': level === 'raw', + }) + + function checkLocationSpanOverflow(observedElement: ResizeObserverEntry) { + const spanEl = observedElement.target + const isOverflowing = spanEl.scrollWidth > spanEl.clientWidth + setLocationSpanOverflown(isOverflowing) + } + + const locationText = + showSourceLocationLink && file ? `${file}${line ? `, ${line}` : ''}` : null + + // Because we want an ellipsis on the left-hand side (e.g. "...longfilename.tex"), the + // `log-entry-location` class has text laid out from right-to-left using the CSS + // rule `direction: rtl;`. + // This works most of the times, except when the first character of the filename is considered + // a punctuation mark, like `/` (e.g. `/foo/bar/baz.sty`). In this case, because of + // right-to-left writing rules, the punctuation mark is moved to the right-side of the string, + // resulting in `...bar/baz.sty/` instead of `...bar/baz.sty`. + // To avoid this edge-case, we wrap the `logLocationLinkText` in two directional formatting + // characters: + // * \u202A LEFT-TO-RIGHT EMBEDDING Treat the following text as embedded left-to-right. + // * \u202C POP DIRECTIONAL FORMATTING End the scope of the last LRE, RLE, RLO, or LRO. + // This essentially tells the browser that, althought the text is laid out from right-to-left, + // the wrapped portion of text should follow left-to-right writing rules. + const formattedLocationText = locationText ? ( + + {`\u202A${locationText}\u202C`} + + ) : null + + const headerTitleText = logType ? `${logType} ${headerTitle}` : headerTitle + + return ( +
    + + + + + {actionButtonsOverride ?? ( +
    + {showSourceLocationLink && ( + + + + )} + {actionComponents.map(({ import: { default: Component }, path }) => ( + + ))} +
    + )} +
    + ) +} + +export default LogEntryHeader diff --git a/services/web/frontend/js/features/ide-redesign/components/error-logs/log-entry.tsx b/services/web/frontend/js/features/ide-redesign/components/error-logs/log-entry.tsx new file mode 100644 index 0000000000..a7539450ce --- /dev/null +++ b/services/web/frontend/js/features/ide-redesign/components/error-logs/log-entry.tsx @@ -0,0 +1,140 @@ +import { + Dispatch, + MouseEventHandler, + useCallback, + memo, + SetStateAction, + useState, +} from 'react' +import HumanReadableLogsHints from '../../../../ide/human-readable-logs/HumanReadableLogsHints' +import { sendMB } from '@/infrastructure/event-tracking' +import { + ErrorLevel, + LogEntry as LogEntryData, + SourceLocation, +} from '@/features/pdf-preview/util/types' +import LogEntryHeader from './log-entry-header' +import PdfLogEntryContent from '@/features/pdf-preview/components/pdf-log-entry-content' +import classNames from 'classnames' + +type LogEntryProps = { + headerTitle: string | React.ReactNode + level: ErrorLevel + ruleId?: string + rawContent?: string + logType?: string + formattedContent?: React.ReactNode + extraInfoURL?: string | null + sourceLocation?: SourceLocation + showSourceLocationLink?: boolean + entryAriaLabel?: string + contentDetails?: string[] + onSourceLocationClick?: (sourceLocation: SourceLocation) => void + index?: number + logEntry?: LogEntryData + id?: string + alwaysExpandRawContent?: boolean + className?: string + actionButtonsOverride?: React.ReactNode + openCollapseIconOverride?: string +} + +function LogEntry(props: LogEntryProps) { + const [collapsed, setCollapsed] = useState(true) + + return ( + + ) +} + +export function ControlledLogEntry({ + ruleId, + headerTitle, + rawContent, + logType, + formattedContent, + extraInfoURL, + level, + sourceLocation, + showSourceLocationLink = true, + entryAriaLabel = undefined, + contentDetails, + onSourceLocationClick, + index, + logEntry, + id, + alwaysExpandRawContent = false, + className, + collapsed, + setCollapsed, + actionButtonsOverride, + openCollapseIconOverride, +}: LogEntryProps & { + collapsed: boolean + setCollapsed: Dispatch> +}) { + if (ruleId && HumanReadableLogsHints[ruleId]) { + const hint = HumanReadableLogsHints[ruleId] + formattedContent = hint.formattedContent(contentDetails) + extraInfoURL = hint.extraInfoURL + } + + const handleLogEntryLinkClick: MouseEventHandler = + useCallback( + event => { + event.preventDefault() + + if (onSourceLocationClick && sourceLocation) { + onSourceLocationClick(sourceLocation) + + const parts = sourceLocation?.file?.split('.') + const extension = + parts?.length && parts?.length > 1 ? parts.pop() : '' + sendMB('log-entry-link-click', { level, ruleId, extension }) + } + }, + [level, onSourceLocationClick, ruleId, sourceLocation] + ) + + return ( +
    + setCollapsed(collapsed => !collapsed)} + id={id} + logEntry={logEntry} + actionButtonsOverride={actionButtonsOverride} + openCollapseIconOverride={openCollapseIconOverride} + /> +
    + +
    + ) +} + +export default memo(LogEntry) diff --git a/services/web/frontend/js/features/ide-redesign/components/error-logs/new-error-logs-promo.tsx b/services/web/frontend/js/features/ide-redesign/components/error-logs/new-error-logs-promo.tsx new file mode 100644 index 0000000000..1589fa819d --- /dev/null +++ b/services/web/frontend/js/features/ide-redesign/components/error-logs/new-error-logs-promo.tsx @@ -0,0 +1,59 @@ +import Close from '@/shared/components/close' +import { useEditorContext } from '@/shared/context/editor-context' +import useTutorial from '@/shared/hooks/promotions/use-tutorial' +import { useCallback, useEffect } from 'react' +import { Overlay, Popover } from 'react-bootstrap' +import { useTranslation } from 'react-i18next' + +const TUTORIAL_KEY = 'new-error-logs-promo' +const EVENT_DATA = { name: 'new-error-logs-promotion' } + +export default function NewErrorLogsPromo({ + target, +}: { + target: HTMLElement | null +}) { + const { t } = useTranslation() + + const { inactiveTutorials } = useEditorContext() + const { showPopup, tryShowingPopup, hideUntilReload, completeTutorial } = + useTutorial(TUTORIAL_KEY, EVENT_DATA) + + useEffect(() => { + if (!inactiveTutorials.includes(TUTORIAL_KEY)) { + tryShowingPopup() + } + }, [tryShowingPopup, inactiveTutorials]) + + const onHide = useCallback(() => { + hideUntilReload() + }, [hideUntilReload]) + + const onClose = useCallback(() => { + completeTutorial({ + action: 'complete', + event: 'promo-dismiss', + }) + }, [completeTutorial]) + + if (!target) { + return null + } + + return ( + + + + {t('error_logs_have_had_an_update')} + + + + + ) +} diff --git a/services/web/frontend/js/features/ide-redesign/components/error-logs/old-error-pane.tsx b/services/web/frontend/js/features/ide-redesign/components/error-logs/old-error-pane.tsx new file mode 100644 index 0000000000..7794747d30 --- /dev/null +++ b/services/web/frontend/js/features/ide-redesign/components/error-logs/old-error-pane.tsx @@ -0,0 +1,10 @@ +import PdfLogsViewer from '@/features/pdf-preview/components/pdf-logs-viewer' +import { PdfPreviewProvider } from '@/features/pdf-preview/components/pdf-preview-provider' + +export default function OldErrorPane() { + return ( + + + + ) +} diff --git a/services/web/frontend/js/features/ide-redesign/components/full-project-search-panel.tsx b/services/web/frontend/js/features/ide-redesign/components/full-project-search-panel.tsx new file mode 100644 index 0000000000..926341ce89 --- /dev/null +++ b/services/web/frontend/js/features/ide-redesign/components/full-project-search-panel.tsx @@ -0,0 +1,19 @@ +import { ElementType } from 'react' +import importOverleafModules from '../../../../macros/import-overleaf-module.macro' + +const componentModule = importOverleafModules('fullProjectSearchPanel')[0] as + | { + import: { default: ElementType } + path: string + } + | undefined + +export const FullProjectSearchPanel = () => { + if (!componentModule) { + return null + } + const FullProjectSearch = componentModule.import.default + return +} + +export const hasFullProjectSearch = Boolean(componentModule) diff --git a/services/web/frontend/js/features/ide-redesign/components/integrations-panel/integrations-panel.tsx b/services/web/frontend/js/features/ide-redesign/components/integrations-panel/integrations-panel.tsx index d1e4358907..e477602e3e 100644 --- a/services/web/frontend/js/features/ide-redesign/components/integrations-panel/integrations-panel.tsx +++ b/services/web/frontend/js/features/ide-redesign/components/integrations-panel/integrations-panel.tsx @@ -1,7 +1,7 @@ import { ElementType } from 'react' import importOverleafModules from '../../../../../macros/import-overleaf-module.macro' -import { RailPanelHeader } from '../rail' import { useTranslation } from 'react-i18next' +import RailPanelHeader from '../rail-panel-header' const integrationPanelComponents = importOverleafModules( 'integrationPanelComponents' diff --git a/services/web/frontend/js/features/ide-redesign/components/main-layout.tsx b/services/web/frontend/js/features/ide-redesign/components/main-layout.tsx index 2c422af279..8ec00a397e 100644 --- a/services/web/frontend/js/features/ide-redesign/components/main-layout.tsx +++ b/services/web/frontend/js/features/ide-redesign/components/main-layout.tsx @@ -56,6 +56,9 @@ export default function MainLayout() { {pdfLayout === 'sideBySide' && ( - } diff --git a/services/web/frontend/js/features/pdf-preview/components/compile-time-warning-upgrade-prompt.tsx b/services/web/frontend/js/features/pdf-preview/components/compile-time-warning-upgrade-prompt.tsx index 6595df854c..164c3c3275 100644 --- a/services/web/frontend/js/features/pdf-preview/components/compile-time-warning-upgrade-prompt.tsx +++ b/services/web/frontend/js/features/pdf-preview/components/compile-time-warning-upgrade-prompt.tsx @@ -1,23 +1,56 @@ -import { memo, useCallback, useEffect, useState } from 'react' +import { memo, useCallback, useEffect, useMemo, useState } from 'react' import * as eventTracking from '@/infrastructure/event-tracking' import { useDetachCompileContext } from '@/shared/context/detach-compile-context' import usePersistedState from '@/shared/hooks/use-persisted-state' import { CompileTimeWarningUpgradePromptInner } from '@/features/pdf-preview/components/compile-time-warning-upgrade-prompt-inner' import getMeta from '@/utils/meta' +import { CompileTimeoutChangingSoon } from './compile-time-changing-soon' function CompileTimeWarningUpgradePrompt() { const { isProjectOwner, deliveryLatencies, compiling, showLogs, error } = useDetachCompileContext() const [showWarning, setShowWarning] = useState(false) + const [showChangingSoon, setShowChangingSoon] = useState(false) const [dismissedUntilWarning, setDismissedUntilWarning] = usePersistedState< Date | undefined >(`has-dismissed-10s-compile-time-warning-until`) + const { reducedTimeoutWarning } = getMeta('ol-compileSettings') + const warningThreshold = reducedTimeoutWarning === 'enabled' ? 7 : 10 + + const sharedSegmentation = useMemo( + () => ({ + '10s-timeout-warning': reducedTimeoutWarning, + 'is-owner': isProjectOwner, + }), + [isProjectOwner, reducedTimeoutWarning] + ) + + const warningSegmentation = useMemo( + () => ({ + content: 'warning', + compileTime: warningThreshold, + ...sharedSegmentation, + }), + [sharedSegmentation, warningThreshold] + ) + + const changingSoonSegmentation = useMemo( + () => ({ + content: 'changes', + compileTime: 10, + ...sharedSegmentation, + }), + [sharedSegmentation] + ) const handleNewCompile = useCallback( (compileTime: number) => { setShowWarning(false) - if (compileTime > 10000) { + setShowChangingSoon(false) + if (reducedTimeoutWarning === 'enabled' && compileTime > 10000) { + setShowChangingSoon(true) + } else if (compileTime > warningThreshold * 1000) { if (isProjectOwner) { if ( !dismissedUntilWarning || @@ -25,26 +58,52 @@ function CompileTimeWarningUpgradePrompt() { ) { setShowWarning(true) eventTracking.sendMB('compile-time-warning-displayed', { - time: 10, + compileTime: warningThreshold, isProjectOwner, }) } } } }, - [isProjectOwner, dismissedUntilWarning] + [ + isProjectOwner, + dismissedUntilWarning, + reducedTimeoutWarning, + warningThreshold, + ] ) const handleDismissWarning = useCallback(() => { eventTracking.sendMB('compile-time-warning-dismissed', { - time: 10, + compileTime: warningThreshold, isProjectOwner, }) + eventTracking.sendMB('paywall-dismiss', { + 'paywall-type': 'compile-time-warning', + content: 'warning', + compileTime: warningThreshold, + ...sharedSegmentation, + }) setShowWarning(false) const until = new Date() until.setDate(until.getDate() + 1) // 1 day setDismissedUntilWarning(until) - }, [isProjectOwner, setDismissedUntilWarning]) + }, [ + isProjectOwner, + setDismissedUntilWarning, + warningThreshold, + sharedSegmentation, + ]) + + const handleDismissChangingSoon = useCallback(() => { + eventTracking.sendMB('paywall-dismiss', { + 'paywall-type': 'compile-time-warning', + compileTime: 10, + content: 'changes', + ...sharedSegmentation, + }) + setShowChangingSoon(false) + }, [sharedSegmentation]) useEffect(() => { if (compiling || error || showLogs) return @@ -55,21 +114,32 @@ function CompileTimeWarningUpgradePrompt() { return null } - if (compiling || error || showLogs) { + if ( + compiling || + error || + showLogs || + !deliveryLatencies.compileTimeServerE2E + ) { return null } - if (!showWarning) { + if (!showWarning && !showChangingSoon) { return null } - // if showWarning is true then the 10s warning is shown - return (
    {showWarning && isProjectOwner && ( + )} + {showChangingSoon && ( + )}
    diff --git a/services/web/frontend/js/features/pdf-preview/components/pdf-compile-button.tsx b/services/web/frontend/js/features/pdf-preview/components/pdf-compile-button.tsx index b2b78d9e19..d693fe071f 100644 --- a/services/web/frontend/js/features/pdf-preview/components/pdf-compile-button.tsx +++ b/services/web/frontend/js/features/pdf-preview/components/pdf-compile-button.tsx @@ -75,11 +75,13 @@ function PdfCompileButton() { 'btn-striped-animated': hasChanges, }, 'no-left-border', - 'dropdown-button-toggle' + 'dropdown-button-toggle', + 'compile-dropdown-toggle' ) const buttonClassName = classNames( 'align-items-center py-0 no-left-radius px-3', + 'compile-button', { 'btn-striped-animated': hasChanges, } diff --git a/services/web/frontend/js/features/pdf-preview/components/pdf-log-entry-content.tsx b/services/web/frontend/js/features/pdf-preview/components/pdf-log-entry-content.tsx index 8c9a9d7761..17378f6c74 100644 --- a/services/web/frontend/js/features/pdf-preview/components/pdf-log-entry-content.tsx +++ b/services/web/frontend/js/features/pdf-preview/components/pdf-log-entry-content.tsx @@ -3,6 +3,7 @@ import PdfLogEntryRawContent from './pdf-log-entry-raw-content' import importOverleafModules from '../../../../macros/import-overleaf-module.macro' import { LogEntry } from '../util/types' import { ElementType } from 'react' +import classNames from 'classnames' const pdfLogEntryComponents = importOverleafModules( 'pdfLogEntryComponents' @@ -17,17 +18,21 @@ export default function PdfLogEntryContent({ extraInfoURL, index, logEntry, + alwaysExpandRawContent = false, + className, }: { rawContent?: string formattedContent?: React.ReactNode extraInfoURL?: string | null index?: number logEntry?: LogEntry + alwaysExpandRawContent?: boolean + className?: string }) { const { t } = useTranslation() return ( -
    +
    {formattedContent && (
    {formattedContent}
    )} @@ -48,7 +53,11 @@ export default function PdfLogEntryContent({ )} {rawContent && ( - + )}
    ) diff --git a/services/web/frontend/js/features/pdf-preview/components/pdf-log-entry-raw-content.tsx b/services/web/frontend/js/features/pdf-preview/components/pdf-log-entry-raw-content.tsx index 39f46fbed3..0e9cc5246d 100644 --- a/services/web/frontend/js/features/pdf-preview/components/pdf-log-entry-raw-content.tsx +++ b/services/web/frontend/js/features/pdf-preview/components/pdf-log-entry-raw-content.tsx @@ -8,20 +8,24 @@ import Icon from '../../../shared/components/icon' export default function PdfLogEntryRawContent({ rawContent, collapsedSize = 0, + alwaysExpanded = false, }: { rawContent: string collapsedSize?: number + alwaysExpanded?: boolean }) { - const [expanded, setExpanded] = useState(false) - const [needsExpander, setNeedsExpander] = useState(true) + const [expanded, setExpanded] = useState(alwaysExpanded) + const [needsExpander, setNeedsExpander] = useState(!alwaysExpanded) const { elementRef } = useResizeObserver( useCallback( (element: Element) => { if (element.scrollHeight === 0) return // skip update when logs-pane is closed - setNeedsExpander(element.scrollHeight > collapsedSize) + setNeedsExpander( + !alwaysExpanded && element.scrollHeight > collapsedSize + ) }, - [collapsedSize] + [collapsedSize, alwaysExpanded] ) ) diff --git a/services/web/frontend/js/features/pdf-preview/components/pdf-log-entry.tsx b/services/web/frontend/js/features/pdf-preview/components/pdf-log-entry.tsx index 349ad79047..23ae2dca5d 100644 --- a/services/web/frontend/js/features/pdf-preview/components/pdf-log-entry.tsx +++ b/services/web/frontend/js/features/pdf-preview/components/pdf-log-entry.tsx @@ -1,4 +1,3 @@ -import classNames from 'classnames' import { memo, MouseEventHandler, useCallback } from 'react' import PreviewLogEntryHeader from '../../preview/components/preview-log-entry-header' import PdfLogEntryContent from './pdf-log-entry-content' @@ -6,6 +5,9 @@ import HumanReadableLogsHints from '../../../ide/human-readable-logs/HumanReadab import { sendMB } from '@/infrastructure/event-tracking' import getMeta from '@/utils/meta' import { ErrorLevel, LogEntry, SourceLocation } from '../util/types' +import { useIsNewEditorEnabled } from '@/features/ide-redesign/utils/new-editor-utils' +import NewLogEntry from '@/features/ide-redesign/components/error-logs/log-entry' +import { useFeatureFlag } from '@/shared/context/split-test-context' function PdfLogEntry({ ruleId, @@ -18,12 +20,9 @@ function PdfLogEntry({ level, sourceLocation, showSourceLocationLink = true, - showCloseButton = false, entryAriaLabel = undefined, - customClass, contentDetails, onSourceLocationClick, - onClose, index, logEntry, id, @@ -38,12 +37,9 @@ function PdfLogEntry({ extraInfoURL?: string | null sourceLocation?: SourceLocation showSourceLocationLink?: boolean - showCloseButton?: boolean entryAriaLabel?: string - customClass?: string contentDetails?: string[] onSourceLocationClick?: (sourceLocation: SourceLocation) => void - onClose?: () => void index?: number logEntry?: LogEntry id?: string @@ -73,9 +69,34 @@ function PdfLogEntry({ [level, onSourceLocationClick, ruleId, sourceLocation] ) + const newEditor = useIsNewEditorEnabled() + const newErrorlogs = useFeatureFlag('new-editor-error-logs-redesign') + + if (newEditor && newErrorlogs) { + return ( + + ) + } + return (
    {(rawContent || formattedContent || showAiErrorAssistant) && ( diff --git a/services/web/frontend/js/features/pdf-preview/components/pdf-logs-viewer.tsx b/services/web/frontend/js/features/pdf-preview/components/pdf-logs-viewer.tsx index ec834432fe..f9fbcae42a 100644 --- a/services/web/frontend/js/features/pdf-preview/components/pdf-logs-viewer.tsx +++ b/services/web/frontend/js/features/pdf-preview/components/pdf-logs-viewer.tsx @@ -21,7 +21,6 @@ function PdfLogsViewer({ alwaysVisible = false }: { alwaysVisible?: boolean }) { const { codeCheckFailed, error, - hasShortCompileTimeout, logEntries, rawLog, validationIssues, @@ -32,6 +31,8 @@ function PdfLogsViewer({ alwaysVisible = false }: { alwaysVisible?: boolean }) { const { loadingError } = usePdfPreviewContext() + const { compileTimeout } = getMeta('ol-compileSettings') + const { t } = useTranslation() const [ @@ -58,7 +59,7 @@ function PdfLogsViewer({ alwaysVisible = false }: { alwaysVisible?: boolean }) { {loadingError && } - {hasShortCompileTimeout && error === 'timedout' ? ( + {compileTimeout < 60 && error === 'timedout' ? ( isCompileTimeoutPaywallDisplay ? ( - startCompile()} - />, - ]} - /> -
    -
    - , - ]} - /> - - } - level="warning" - /> + includeWarnings && ( + + startCompile()} + />, + ]} + /> +
    +
    + , + ]} + /> + + } + level="warning" + /> + ) ) case 'rendering-error': return ( - - {t('something_went_wrong_rendering_pdf')} -   - , - ]} - /> - {getMeta('ol-compilesUserContentDomain') && ( - <> -
    -
    - , - /* eslint-disable-next-line jsx-a11y/anchor-has-content */ -
    , - ]} - /> - - )} - + includeErrors && ( + + {t('something_went_wrong_rendering_pdf')} +   + , + ]} + /> + {getMeta('ol-compilesUserContentDomain') && ( + <> +
    +
    + , + /* eslint-disable-next-line jsx-a11y/anchor-has-content */ +
    , + ]} + /> + + )} + + ) ) case 'clsi-maintenance': return ( - - {t('clsi_maintenance')} - + includeErrors && ( + + {t('clsi_maintenance')} + + ) ) case 'clsi-unavailable': return ( - - {t('clsi_unavailable')} - + includeErrors && ( + + {t('clsi_unavailable')} + + ) ) case 'too-recently-compiled': return ( - - {t('too_recently_compiled')} - + includeErrors && ( + + {t('too_recently_compiled')} + + ) ) case 'terminated': return ( - - {t('compile_terminated_by_user')} - + includeErrors && ( + + {t('compile_terminated_by_user')} + + ) ) case 'rate-limited': return ( - - {t('project_flagged_too_many_compiles')} - + includeErrors && ( + + {t('project_flagged_too_many_compiles')} + + ) ) case 'compile-in-progress': return ( - - {t('pdf_compile_try_again')} - + includeErrors && ( + + {t('pdf_compile_try_again')} + + ) ) case 'autocompile-disabled': return ( - - {t('autocompile_disabled_reason')} - + includeErrors && ( + + {t('autocompile_disabled_reason')} + + ) ) case 'project-too-large': return ( - - {t('project_too_much_editable_text')} - + includeErrors && ( + + {t('project_too_much_editable_text')} + + ) ) case 'timedout': - return + return includeErrors && case 'failure': return ( - - {t('no_pdf_error_explanation')} + includeErrors && ( + + {t('no_pdf_error_explanation')} -
      -
    • {t('no_pdf_error_reason_unrecoverable_error')}
    • -
    • - }} - /> -
    • -
    • - }} - /> -
    • -
    -
    +
      +
    • {t('no_pdf_error_reason_unrecoverable_error')}
    • +
    • + }} + /> +
    • +
    • + }} + /> +
    • +
    +
    + ) ) case 'clear-cache': return ( - - {t('somthing_went_wrong_compiling')} - + includeErrors && ( + + {t('somthing_went_wrong_compiling')} + + ) ) case 'pdf-viewer-loading-error': return ( - - , - // eslint-disable-next-line jsx-a11y/anchor-has-content -
    , - // eslint-disable-next-line jsx-a11y/anchor-has-content - , - ]} - /> - + includeErrors && ( + + , + // eslint-disable-next-line jsx-a11y/anchor-has-content + , + // eslint-disable-next-line jsx-a11y/anchor-has-content + , + ]} + /> + + ) ) case 'validation-problems': @@ -207,9 +241,11 @@ function PdfPreviewError({ error }: { error: string }) { case 'error': default: return ( - - {t('somthing_went_wrong_compiling')} - + includeErrors && ( + + {t('somthing_went_wrong_compiling')} + + ) ) } } diff --git a/services/web/frontend/js/features/pdf-preview/components/pdf-preview-pane.tsx b/services/web/frontend/js/features/pdf-preview/components/pdf-preview-pane.tsx index 7bbecbc327..e063c20c76 100644 --- a/services/web/frontend/js/features/pdf-preview/components/pdf-preview-pane.tsx +++ b/services/web/frontend/js/features/pdf-preview/components/pdf-preview-pane.tsx @@ -12,9 +12,12 @@ import PdfPreviewHybridToolbarNew from '@/features/ide-redesign/components/pdf-p import PdfErrorState from '@/features/ide-redesign/components/pdf-preview/pdf-error-state' import { useIsNewEditorEnabled } from '@/features/ide-redesign/utils/new-editor-utils' import importOverleafModules from '../../../../macros/import-overleaf-module.macro' +import PdfCodeCheckFailedBanner from '@/features/ide-redesign/components/pdf-preview/pdf-code-check-failed-banner' +import getMeta from '@/utils/meta' function PdfPreviewPane() { - const { pdfUrl, hasShortCompileTimeout } = useCompileContext() + const { pdfUrl } = useCompileContext() + const { compileTimeout } = getMeta('ol-compileSettings') const classes = classNames('pdf', 'full-size', { 'pdf-empty': !pdfUrl, }) @@ -32,8 +35,9 @@ function PdfPreviewPane() { ) : ( )} + {newEditor && } - {hasShortCompileTimeout && } + {compileTimeout < 60 && } }>
    diff --git a/services/web/frontend/js/features/pdf-preview/components/timeout-message-after-paywall-dismissal.tsx b/services/web/frontend/js/features/pdf-preview/components/timeout-message-after-paywall-dismissal.tsx index db6140085f..64ef0fbfc1 100644 --- a/services/web/frontend/js/features/pdf-preview/components/timeout-message-after-paywall-dismissal.tsx +++ b/services/web/frontend/js/features/pdf-preview/components/timeout-message-after-paywall-dismissal.tsx @@ -1,39 +1,30 @@ import getMeta from '@/utils/meta' import { Trans, useTranslation } from 'react-i18next' -import { memo, useCallback, useEffect } from 'react' +import { memo, useMemo } from 'react' import { useDetachCompileContext } from '@/shared/context/detach-compile-context' import StartFreeTrialButton from '@/shared/components/start-free-trial-button' import MaterialIcon from '@/shared/components/material-icon' -import { useStopOnFirstError } from '@/shared/hooks/use-stop-on-first-error' import * as eventTracking from '@/infrastructure/event-tracking' import PdfLogEntry from './pdf-log-entry' -function TimeoutMessageAfterPaywallDismissal() { - const { - startCompile, - lastCompileOptions, - setAnimateCompileDropdownArrow, - isProjectOwner, - } = useDetachCompileContext() - - const { enableStopOnFirstError } = useStopOnFirstError({ - eventSource: 'timeout-new', - }) - - const handleEnableStopOnFirstErrorClick = useCallback(() => { - enableStopOnFirstError() - startCompile({ stopOnFirstError: true }) - setAnimateCompileDropdownArrow(true) - }, [enableStopOnFirstError, startCompile, setAnimateCompileDropdownArrow]) +type TimeoutMessageProps = { + segmentation?: eventTracking.Segmentation +} +function TimeoutMessageAfterPaywallDismissal({ + segmentation, +}: TimeoutMessageProps) { + const { lastCompileOptions, isProjectOwner } = useDetachCompileContext() return (
    - + {getMeta('ol-ExposedSettings').enableSubscriptions && ( )}
    @@ -42,26 +33,22 @@ function TimeoutMessageAfterPaywallDismissal() { type CompileTimeoutProps = { isProjectOwner: boolean + segmentation?: eventTracking.Segmentation } const CompileTimeout = memo(function CompileTimeout({ isProjectOwner, + segmentation, }: CompileTimeoutProps) { const { t } = useTranslation() - useEffect(() => { - eventTracking.sendMB('paywall-prompt', { - 'paywall-type': 'compile-timeout', + const eventSegmentation = useMemo( + () => ({ + ...segmentation, 'paywall-version': 'secondary', - }) - }, []) - - function onPaywallClick() { - eventTracking.sendMB('paywall-click', { - 'paywall-type': 'compile-timeout', - 'paywall-version': 'secondary', - }) - } + }), + [segmentation] + ) return ( {t('try_for_free')} @@ -124,22 +111,50 @@ const CompileTimeout = memo(function CompileTimeout({ type PreventTimeoutHelpMessageProps = { lastCompileOptions: any - handleEnableStopOnFirstErrorClick: () => void - isProjectOwner: boolean + segmentation?: eventTracking.Segmentation } const PreventTimeoutHelpMessage = memo(function PreventTimeoutHelpMessage({ lastCompileOptions, - handleEnableStopOnFirstErrorClick, - isProjectOwner, + segmentation, }: PreventTimeoutHelpMessageProps) { const { t } = useTranslation() + function sendInfoClickEvent() { + eventTracking.sendMB('paywall-info-click', { + 'paywall-type': 'compile-timeout', + content: 'blog', + ...segmentation, + }) + } + + const compileTimeoutChangesBlogLink = ( + /* eslint-disable-next-line jsx-a11y/anchor-has-content, react/jsx-key */ +
    + ) + return ( + {segmentation?.['10s-timeout-warning'] === 'enabled' && ( +

    + + + +

    + )} +

    {t('common_causes_of_compile_timeouts_include')}:

    - } - action={ - <> - handleResendConfirmationEmail(userEmail)} - > - {t('resend_confirmation_email')} - - - {isPrimary - ? t('change_primary_email') - : t('remove_email_address')} - - - } - /> - ) - } + const confirmationCodeModal = ( + setIsSuccess(true)} + setGroupLoading={setIsLoading} + groupLoading={isLoading} + triggerVariant="secondary" + /> + ) - if (!isEmailTrusted && !isPrimary && !shouldShowCommonsNotification) { - return ( - - {isLoading ? ( -
    - -
    - ) : isError ? ( -
    {getUserFacingMessage(error)}
    - ) : ( - <> -

    - {t('confirm_secondary_email')} -

    -

    - {t('reconfirm_secondary_email', { - emailAddress: userEmail.email, - })} -

    -

    {t('ensure_recover_account')}

    - - )} -
    - } - action={ - <> - handleResendConfirmationEmail(userEmail)} - > - {t('resend_confirmation_email')} - - - {t('remove_email_address')} - - - } - /> - ) - } + let notificationType: 'info' | 'warning' | undefined + let notificationBody: ReactNode | undefined - // Only show the notification if a) a commons license is available and b) the - // user is on a free or individual plan. Users on a group or Commons plan - // already have premium features. if (shouldShowCommonsNotification) { + notificationType = 'info' + notificationBody = ( + <> + ]} // eslint-disable-line react/jsx-key + /> +
    + ]} // eslint-disable-line react/jsx-key + /> + + ) + } else if (!isEmailConfirmed) { + notificationType = 'warning' + notificationBody = ( + <> +

    + {isPrimary ? ( + , + ]} + /> + ) : ( + , + ]} + /> + )} +

    + {emailDeletionDate && ( +

    {t('email_remove_by_date', { date: emailDeletionDate })}

    + )} + + ) + } else if (!isEmailTrusted && !isPrimary) { + notificationType = 'warning' + notificationBody = ( + <> +

    + {t('confirm_secondary_email')} +

    +

    {t('reconfirm_secondary_email', { emailAddress })}

    +

    {t('ensure_recover_account')}

    + + ) + } + + if (notificationType) { return ( - {isLoading ? ( - - ) : isError ? ( -
    {getUserFacingMessage(error)}
    - ) : ( - <> - ]} // eslint-disable-line react/jsx-key - /> -
    - ]} // eslint-disable-line react/jsx-key - /> - - )} -
    - } - action={ - handleResendConfirmationEmail(userEmail)} - > - {t('resend_email')} - - } + type={notificationType} + content={notificationBody} + action={confirmationCodeModal} /> ) } @@ -302,6 +236,7 @@ function ConfirmEmail() { const { totalProjectsCount } = useProjectListContext() const userEmails = getMeta('ol-userEmails') || [] const signUpDate = getMeta('ol-user')?.signUpDate + const [isLoading, setIsLoading] = useState(false) if (!totalProjectsCount || !userEmails.length || !signUpDate) { return null @@ -315,6 +250,8 @@ function ConfirmEmail() { key={`confirm-email-${userEmail.email}`} userEmail={userEmail} signUpDate={signUpDate} + isLoading={isLoading} + setIsLoading={setIsLoading} /> ) : null })} diff --git a/services/web/frontend/js/features/project-list/components/notifications/groups/group-invitation/hooks/use-group-invitation-notification.tsx b/services/web/frontend/js/features/project-list/components/notifications/groups/group-invitation/hooks/use-group-invitation-notification.tsx index 6c25513124..f62571b722 100644 --- a/services/web/frontend/js/features/project-list/components/notifications/groups/group-invitation/hooks/use-group-invitation-notification.tsx +++ b/services/web/frontend/js/features/project-list/components/notifications/groups/group-invitation/hooks/use-group-invitation-notification.tsx @@ -9,6 +9,7 @@ import type { NotificationGroupInvitation } from '../../../../../../../../../typ import useAsync from '../../../../../../../shared/hooks/use-async' import { FetchError, + getJSON, postJSON, putJSON, } from '../../../../../../../infrastructure/fetch-json' @@ -43,60 +44,65 @@ type UseGroupInvitationNotificationReturnType = { export function useGroupInvitationNotification( notification: NotificationGroupInvitation ): UseGroupInvitationNotificationReturnType { - const { - _id: notificationId, - messageOpts: { token, managedUsersEnabled }, - } = notification - + const { _id: notificationId } = notification const [groupInvitationStatus, setGroupInvitationStatus] = useState(GroupInvitationStatus.Idle) - const { runAsync, isLoading: isAcceptingInvitation } = useAsync< - never, - FetchError - >() + const { runAsync, isLoading } = useAsync() + const { runAsync: runAsyncNotification, isLoading: isLoadingNotification } = + useAsync() const location = useLocation() const { handleDismiss } = useAsyncDismiss() - const hasIndividualRecurlySubscription = getMeta( - 'ol-hasIndividualRecurlySubscription' + const hasIndividualPaidSubscription = getMeta( + 'ol-hasIndividualPaidSubscription' ) useEffect(() => { - if (hasIndividualRecurlySubscription) { + if (hasIndividualPaidSubscription) { setGroupInvitationStatus( GroupInvitationStatus.CancelIndividualSubscription ) } else { setGroupInvitationStatus(GroupInvitationStatus.AskToJoin) } - }, [hasIndividualRecurlySubscription]) + }, [hasIndividualPaidSubscription]) const acceptGroupInvite = useCallback(() => { - if (managedUsersEnabled) { - location.assign(`/subscription/invites/${token}/`) - } else { - runAsync( - putJSON(`/subscription/invites/${token}/`, { - body: { - _csrf: getMeta('ol-csrfToken'), - }, - }) - ) - .then(() => { - setGroupInvitationStatus(GroupInvitationStatus.SuccessfullyJoined) - }) - .catch(err => { - debugConsole.error(err) - setGroupInvitationStatus(GroupInvitationStatus.Error) - }) - .finally(() => { - // remove notification automatically in the browser - window.setTimeout(() => { - setGroupInvitationStatus(GroupInvitationStatus.NotificationIsHidden) - }, SUCCESSFUL_NOTIF_TIME_BEFORE_HIDDEN) - }) - } - }, [runAsync, token, location, managedUsersEnabled]) + // Fetch the latest notification data to ensure it's up-to-date + runAsyncNotification(getJSON(`/user/notification/${notificationId}`)) + .then(notification => { + const { + messageOpts: { token, managedUsersEnabled }, + } = notification + if (managedUsersEnabled) { + location.assign(`/subscription/invites/${token}/`) + } else { + runAsync( + putJSON(`/subscription/invites/${token}/`, { + body: { + _csrf: getMeta('ol-csrfToken'), + }, + }) + ) + .then(() => { + setGroupInvitationStatus(GroupInvitationStatus.SuccessfullyJoined) + }) + .catch(err => { + debugConsole.error(err) + setGroupInvitationStatus(GroupInvitationStatus.Error) + }) + .finally(() => { + // remove notification automatically in the browser + window.setTimeout(() => { + setGroupInvitationStatus( + GroupInvitationStatus.NotificationIsHidden + ) + }, SUCCESSFUL_NOTIF_TIME_BEFORE_HIDDEN) + }) + } + }) + .catch(debugConsole.error) + }, [runAsync, runAsyncNotification, notificationId, location]) const cancelPersonalSubscription = useCallback(() => { setGroupInvitationStatus(GroupInvitationStatus.AskToJoin) @@ -114,6 +120,8 @@ export function useGroupInvitationNotification( setGroupInvitationStatus(GroupInvitationStatus.NotificationIsHidden) }, []) + const isAcceptingInvitation = isLoadingNotification || isLoading + return { isAcceptingInvitation, groupInvitationStatus, diff --git a/services/web/frontend/js/features/project-list/components/sidebar/sidebar-ds-nav.tsx b/services/web/frontend/js/features/project-list/components/sidebar/sidebar-ds-nav.tsx index 2aaed364a7..eba033ea76 100644 --- a/services/web/frontend/js/features/project-list/components/sidebar/sidebar-ds-nav.tsx +++ b/services/web/frontend/js/features/project-list/components/sidebar/sidebar-ds-nav.tsx @@ -154,7 +154,7 @@ function SidebarDsNav() { )}
    -
    +
    Digital Science
    diff --git a/services/web/frontend/js/features/project-list/components/sidebar/tags-list.tsx b/services/web/frontend/js/features/project-list/components/sidebar/tags-list.tsx index 452b003b2b..1c6298603c 100644 --- a/services/web/frontend/js/features/project-list/components/sidebar/tags-list.tsx +++ b/services/web/frontend/js/features/project-list/components/sidebar/tags-list.tsx @@ -57,6 +57,7 @@ export default function TagsList() {
    + + + ) + + cy.findByRole('button', { name: btnText }).as('button') + cy.get('@button').trigger('mouseover') + cy.findByText(description) + cy.get('@button').trigger('mouseout') + cy.get('@button').focus() + cy.findByText(description) + cy.get('body').type('{esc}') + cy.findByText(description).should('not.exist') + }) }) diff --git a/services/web/test/frontend/features/chat/components/chat-pane.test.jsx b/services/web/test/frontend/features/chat/components/chat-pane.test.jsx index 5a59b9b19f..f990a8c6ce 100644 --- a/services/web/test/frontend/features/chat/components/chat-pane.test.jsx +++ b/services/web/test/frontend/features/chat/components/chat-pane.test.jsx @@ -7,10 +7,7 @@ import { import fetchMock from 'fetch-mock' import ChatPane from '../../../../../frontend/js/features/chat/components/chat-pane' -import { - cleanUpContext, - renderWithEditorContext, -} from '../../../helpers/render-with-context' +import { renderWithEditorContext } from '../../../helpers/render-with-context' import { stubMathJax, tearDownMathJaxStubs } from './stubs' describe('', function () { @@ -22,7 +19,6 @@ describe('', function () { beforeEach(function () { window.metaAttributesCache.set('ol-user', user) - window.metaAttributesCache.set('ol-chatEnabled', true) window.metaAttributesCache.set('ol-preventCompileOnLoad', true) }) @@ -47,8 +43,6 @@ describe('', function () { beforeEach(function () { fetchMock.removeRoutes().clearHistory() - cleanUpContext() - stubMathJax() }) diff --git a/services/web/test/frontend/features/chat/context/chat-context.test.jsx b/services/web/test/frontend/features/chat/context/chat-context.test.jsx index ddb69d3025..a930ba3a9c 100644 --- a/services/web/test/frontend/features/chat/context/chat-context.test.jsx +++ b/services/web/test/frontend/features/chat/context/chat-context.test.jsx @@ -9,7 +9,6 @@ import { useChatContext, chatClientIdGenerator, } from '@/features/chat/context/chat-context' -import { cleanUpContext } from '../../../helpers/render-with-context' import { stubMathJax, tearDownMathJaxStubs } from '../components/stubs' import { SocketIOMock } from '@/ide/connection/SocketIoShim' import { EditorProviders } from '../../../helpers/editor-providers' @@ -24,12 +23,10 @@ describe('ChatContext', function () { beforeEach(function () { fetchMock.removeRoutes().clearHistory() - cleanUpContext() stubMathJax() window.metaAttributesCache.set('ol-user', user) - window.metaAttributesCache.set('ol-chatEnabled', true) window.metaAttributesCache.set('ol-preventCompileOnLoad', true) this.stub = sinon.stub(chatClientIdGenerator, 'generate').returns(uuidValue) diff --git a/services/web/test/frontend/features/dictionary/components/dictionary-modal-content.spec.jsx b/services/web/test/frontend/features/dictionary/components/dictionary-modal-content.spec.jsx index c28eef66ef..c8cdd931b3 100644 --- a/services/web/test/frontend/features/dictionary/components/dictionary-modal-content.spec.jsx +++ b/services/web/test/frontend/features/dictionary/components/dictionary-modal-content.spec.jsx @@ -19,7 +19,7 @@ describe('', function () { }) it('list words', function () { - cy.then(win => { + cy.then(() => { learnedWords.global = new Set(['foo', 'bar']) }) @@ -34,7 +34,7 @@ describe('', function () { }) it('shows message when empty', function () { - cy.then(win => { + cy.then(() => { learnedWords.global = new Set([]) }) @@ -50,7 +50,7 @@ describe('', function () { it('removes words', function () { cy.intercept('/spelling/unlearn', { statusCode: 200 }) - cy.then(win => { + cy.then(() => { learnedWords.global = new Set(['Foo', 'bar']) }) @@ -76,7 +76,7 @@ describe('', function () { it('handles errors', function () { cy.intercept('/spelling/unlearn', { statusCode: 500 }).as('unlearn') - cy.then(win => { + cy.then(() => { learnedWords.global = new Set(['foo']) }) diff --git a/services/web/test/frontend/features/editor-navigation-toolbar/components/toolbar-header.test.jsx b/services/web/test/frontend/features/editor-navigation-toolbar/components/toolbar-header.test.jsx index 84b1e680ef..be7894fc73 100644 --- a/services/web/test/frontend/features/editor-navigation-toolbar/components/toolbar-header.test.jsx +++ b/services/web/test/frontend/features/editor-navigation-toolbar/components/toolbar-header.test.jsx @@ -27,7 +27,6 @@ describe('', function () { } beforeEach(function () { - window.metaAttributesCache.set('ol-chatEnabled', true) window.metaAttributesCache.set('ol-preventCompileOnLoad', true) }) diff --git a/services/web/test/frontend/features/group-management/components/members-table/dropdown-button.spec.tsx b/services/web/test/frontend/features/group-management/components/members-table/dropdown-button.spec.tsx index 9213069699..93d24865b2 100644 --- a/services/web/test/frontend/features/group-management/components/members-table/dropdown-button.spec.tsx +++ b/services/web/test/frontend/features/group-management/components/members-table/dropdown-button.spec.tsx @@ -24,6 +24,7 @@ function mountDropDownComponent(user: User, subscriptionId: string) { { win.metaAttributesCache.set('ol-users', [user]) + win.metaAttributesCache.set('ol-isUserGroupManager', true) }) mountDropDownComponent(user, subscriptionId) }) @@ -189,6 +191,7 @@ describe('DropdownButton', function () { cy.findByRole('button', { name: /actions/i }).click() cy.findByTestId('delete-user-action').should('be.visible') + cy.findByTestId('release-user-action') cy.findByTestId('remove-user-action').should('not.exist') cy.findByTestId('resend-managed-user-invite-action').should('not.exist') @@ -566,6 +569,7 @@ describe('DropdownButton', function () { cy.findByTestId('unlink-user-action').should('be.visible') cy.findByTestId('delete-user-action').should('not.exist') + cy.findByTestId('release-user-action').should('not.exist') cy.findByTestId('resend-sso-link-invite-action').should('not.exist') cy.findByTestId('no-actions-available').should('not.exist') }) @@ -608,6 +612,7 @@ describe('DropdownButton', function () { ) cy.findByTestId('remove-user-action').should('be.visible') cy.findByTestId('delete-user-action').should('not.exist') + cy.findByTestId('release-user-action').should('not.exist') cy.findByTestId('resend-sso-link-invite-action').should('exist') cy.findByTestId('no-actions-available').should('not.exist') @@ -633,6 +638,7 @@ describe('DropdownButton', function () { beforeEach(function () { cy.window().then(win => { win.metaAttributesCache.set('ol-users', [user]) + win.metaAttributesCache.set('ol-isUserGroupManager', true) }) mountDropDownComponent(user, subscriptionId) }) @@ -648,6 +654,7 @@ describe('DropdownButton', function () { cy.findByRole('button', { name: /actions/i }).click() cy.findByTestId('delete-user-action').should('be.visible') + cy.findByTestId('release-user-action') cy.findByTestId('remove-user-action').should('not.exist') cy.findByTestId('resend-managed-user-invite-action').should('not.exist') @@ -682,6 +689,7 @@ describe('DropdownButton', function () { beforeEach(function () { cy.window().then(win => { win.metaAttributesCache.set('ol-users', [user]) + win.metaAttributesCache.set('ol-isUserGroupManager', true) }) mountDropDownComponent(user, subscriptionId) }) @@ -697,6 +705,7 @@ describe('DropdownButton', function () { cy.findByRole('button', { name: /actions/i }).click() cy.findByTestId('delete-user-action').should('be.visible') + cy.findByTestId('release-user-action') cy.findByTestId('remove-user-action').should('not.exist') cy.findByTestId('resend-managed-user-invite-action').should('not.exist') @@ -745,6 +754,7 @@ describe('DropdownButton', function () { cy.findByTestId('resend-managed-user-invite-action').should('not.exist') cy.findByTestId('remove-user-action').should('not.exist') cy.findByTestId('delete-user-action').should('not.exist') + cy.findByTestId('release-user-action').should('not.exist') cy.findByTestId('no-actions-available').should('not.exist') }) }) @@ -793,6 +803,7 @@ describe('DropdownButton', function () { cy.findByTestId('no-actions-available').should('not.exist') cy.findByTestId('delete-user-action').should('not.exist') + cy.findByTestId('release-user-action').should('not.exist') cy.findByTestId('remove-user-action').should('not.exist') cy.findByTestId('resend-managed-user-invite-action').should('not.exist') cy.findByTestId('resend-sso-link-invite-action').should('not.exist') diff --git a/services/web/test/frontend/features/group-management/components/members-table/member-row.spec.tsx b/services/web/test/frontend/features/group-management/components/members-table/member-row.spec.tsx index 538e3036b8..0ae6ee8d04 100644 --- a/services/web/test/frontend/features/group-management/components/members-table/member-row.spec.tsx +++ b/services/web/test/frontend/features/group-management/components/members-table/member-row.spec.tsx @@ -30,6 +30,7 @@ describe('MemberRow', function () { + ) + }) + + it('should render the modal', function () { + cy.findByTestId('release-user-form') + }) + + it('should render content', function () { + cy.findByText( + `You’re about to remove ${user.first_name} ${user.last_name} (${user.email}). Doing this will mean:` + ) + cy.findAllByRole('listitem') + .eq(0) + .contains(/they will be removed from the group/i) + cy.findAllByRole('listitem') + .eq(1) + .contains(/they will no longer be a managed user/i) + cy.findAllByRole('listitem') + .eq(2) + .contains( + /they will retain their existing account on the .* free plan/i + ) + cy.findAllByRole('listitem') + .eq(3) + .contains( + /they will retain ownership of projects currently owned by them and any collaborators on those projects will become read-only/i + ) + cy.findAllByRole('listitem') + .eq(4) + .contains( + /they will continue to have access to any projects shared with them/i + ) + cy.findAllByRole('listitem') + .eq(5) + .contains( + /they won’t be able to log in with SSO \(if you have this enabled\)\. they will need to set an .* password/i + ) + cy.contains( + /in cases where a user has left your organization and you need to transfer their projects, the delete user option should be used/i + ) + }) + + it('should disable the remove button if the email does not match the user', function () { + // Button should be disabled initially + cy.findByRole('button', { name: /remove user/i }).should('be.disabled') + + // Fill in the email input, with the wrong email address + cy.findByLabelText( + /to confirm you want to remove .* please type the email address associated with their account/i + ).type('totally.wrong@example.com') + + // Button still disabled + cy.findByRole('button', { name: /remove user/i }).should('be.disabled') + }) + + it('should fill out the form, and enable the remove button', function () { + // Button should be disabled initially + cy.findByRole('button', { name: /remove user/i }).should('be.disabled') + + // Fill in the email input + cy.findByLabelText( + /to confirm you want to remove .* please type the email address associated with their account/i + ).type(user.email) + + // Button should be enabled now + cy.findByRole('button', { name: /remove user/i }).should('be.enabled') + }) + }) +}) diff --git a/services/web/test/frontend/features/history/components/change-list.spec.tsx b/services/web/test/frontend/features/history/components/change-list.spec.tsx index b3a1071015..763845db54 100644 --- a/services/web/test/frontend/features/history/components/change-list.spec.tsx +++ b/services/web/test/frontend/features/history/components/change-list.spec.tsx @@ -372,7 +372,7 @@ describe('change list (Bootstrap 5)', function () { cy.findAllByTestId('history-version-details') .eq(1) .within(() => { - cy.get('[aria-label="Compare"]').click() + cy.findByRole('button', { name: /compare/i }).click() cy.findByRole('menu').within(() => { cy.findByRole('menuitem', { name: /compare up to this version/i, diff --git a/services/web/test/frontend/features/ide-react/unit/share-js-history-ot-type.ts b/services/web/test/frontend/features/ide-react/unit/share-js-history-ot-type.ts new file mode 100644 index 0000000000..8418c59ed0 --- /dev/null +++ b/services/web/test/frontend/features/ide-react/unit/share-js-history-ot-type.ts @@ -0,0 +1,134 @@ +import { expect } from 'chai' +import { + StringFileData, + TextOperation, + AddCommentOperation, + Range, +} from 'overleaf-editor-core' +import { historyOTType } from '@/features/ide-react/editor/share-js-history-ot-type' + +describe('historyOTType', function () { + let snapshot: StringFileData + let opsA: TextOperation[] + let opsB: TextOperation[] + + beforeEach(function () { + snapshot = new StringFileData('one plus two equals three') + + // After opsA: "seven plus five equals twelve" + opsA = [new TextOperation(), new TextOperation(), new TextOperation()] + + opsA[0].remove(3) + opsA[0].insert('seven') + opsA[0].retain(22) + + opsA[1].retain(11) + opsA[1].remove(3) + opsA[1].insert('five') + opsA[1].retain(13) + + opsA[2].retain(23) + opsA[2].remove(5) + opsA[2].insert('twelve') + + // After ops2: "one times two equals two" + opsB = [new TextOperation(), new TextOperation()] + + opsB[0].retain(4) + opsB[0].remove(4) + opsB[0].insert('times') + opsB[0].retain(17) + + opsB[1].retain(21) + opsB[1].remove(5) + opsB[1].insert('two') + }) + + describe('apply', function () { + it('supports an empty operations array', function () { + const result = historyOTType.apply(snapshot, []) + expect(result.getContent()).to.equal('one plus two equals three') + }) + + it('applies operations to the snapshot (opsA)', function () { + const result = historyOTType.apply(snapshot, opsA) + expect(result.getContent()).to.equal('seven plus five equals twelve') + }) + + it('applies operations to the snapshot (opsB)', function () { + const result = historyOTType.apply(snapshot, opsB) + expect(result.getContent()).to.equal('one times two equals two') + }) + }) + + describe('compose', function () { + it('supports empty operations', function () { + const ops = historyOTType.compose([], []) + expect(ops).to.deep.equal([]) + }) + + it('supports an empty operation on the left', function () { + const ops = historyOTType.compose([], opsA) + const result = historyOTType.apply(snapshot, ops) + expect(result.getContent()).to.equal('seven plus five equals twelve') + }) + + it('supports an empty operation on the right', function () { + const ops = historyOTType.compose(opsA, []) + const result = historyOTType.apply(snapshot, ops) + expect(result.getContent()).to.equal('seven plus five equals twelve') + }) + + it('supports operations on both sides', function () { + const ops = historyOTType.compose(opsA.slice(0, 2), opsA.slice(2)) + const result = historyOTType.apply(snapshot, ops) + expect(ops.length).to.equal(1) + expect(result.getContent()).to.equal('seven plus five equals twelve') + }) + + it("supports operations that can't be composed", function () { + const comment = new AddCommentOperation('comment-id', [new Range(3, 10)]) + const ops = historyOTType.compose(opsA.slice(0, 2), [ + comment, + ...opsA.slice(2), + ]) + expect(ops.length).to.equal(3) + const result = historyOTType.apply(snapshot, ops) + expect(result.getContent()).to.equal('seven plus five equals twelve') + }) + }) + + describe('transformX', function () { + it('supports empty operations', function () { + const [aPrime, bPrime] = historyOTType.transformX([], []) + expect(aPrime).to.deep.equal([]) + expect(bPrime).to.deep.equal([]) + }) + + it('supports an empty operation on the left', function () { + const [aPrime, bPrime] = historyOTType.transformX([], opsB) + expect(aPrime).to.deep.equal([]) + expect(bPrime).to.deep.equal(opsB) + }) + + it('supports an empty operation on the right', function () { + const [aPrime, bPrime] = historyOTType.transformX(opsA, []) + expect(aPrime).to.deep.equal(opsA) + expect(bPrime).to.deep.equal([]) + }) + + it('supports operations on both sides (a then b)', function () { + const [, bPrime] = historyOTType.transformX(opsA, opsB) + const ops = historyOTType.compose(opsA, bPrime) + const result = historyOTType.apply(snapshot, ops) + expect(result.getContent()).to.equal('seven times five equals twelvetwo') + }) + + it('supports operations on both sides (b then a)', function () { + const [aPrime] = historyOTType.transformX(opsA, opsB) + const ops = historyOTType.compose(opsB, aPrime) + const result = historyOTType.apply(snapshot, ops) + expect(result.getContent()).to.equal('seven times five equals twelvetwo') + }) + }) +}) diff --git a/services/web/test/frontend/features/project-list/components/notifications.test.tsx b/services/web/test/frontend/features/project-list/components/notifications.test.tsx index 7197ddb365..9a845283d7 100644 --- a/services/web/test/frontend/features/project-list/components/notifications.test.tsx +++ b/services/web/test/frontend/features/project-list/components/notifications.test.tsx @@ -5,7 +5,6 @@ import { render, screen, waitForElementToBeRemoved, - within, } from '@testing-library/react' import fetchMock from 'fetch-mock' import { merge, cloneDeep } from 'lodash' @@ -442,7 +441,7 @@ describe('', function () { ), ]) window.metaAttributesCache.set( - 'ol-hasIndividualRecurlySubscription', + 'ol-hasIndividualPaidSubscription', true ) @@ -672,32 +671,37 @@ describe('', function () { renderWithinProjectListProvider(ConfirmEmail) await fetchMock.callHistory.flush(true) - fetchMock.post('/user/emails/resend_confirmation', 200) + fetchMock.post('/user/emails/send-confirmation-code', 200) const email = userEmails[0].email - const notificationBody = await screen.findByTestId( - 'pro-notification-body' - ) + const alert = await screen.findByRole('alert') if (isPrimary) { - expect(notificationBody.textContent).to.contain( - `Please confirm your primary email address ${email} by clicking on the link in the confirmation email.` + expect(alert.textContent).to.contain( + `Please confirm your primary email address ${email}. To edit it, go to ` ) } else { - expect(notificationBody.textContent).to.contain( - `Please confirm your secondary email address ${email} by clicking on the link in the confirmation email.` + expect(alert.textContent).to.contain( + `Please confirm your secondary email address ${email}. To edit it, go to ` ) } - const resendButton = screen.getByRole('button', { name: /resend/i }) - fireEvent.click(resendButton) + expect( + screen + .getByRole('button', { name: 'Send confirmation code' }) + .classList.contains('button-loading') + ).to.be.false - await waitForElementToBeRemoved(() => - screen.queryByRole('button', { name: /resend/i }) - ) + expect(screen.queryByRole('dialog')).to.be.null + + const sendCodeButton = await screen.findByRole('button', { + name: 'Send confirmation code', + }) + fireEvent.click(sendCodeButton) + + await screen.findByRole('dialog') expect(fetchMock.callHistory.called()).to.be.true - expect(screen.queryByRole('alert')).to.be.null }) } @@ -716,25 +720,22 @@ describe('', function () { renderWithinProjectListProvider(ConfirmEmail) await fetchMock.callHistory.flush(true) - fetchMock.post('/user/emails/resend_confirmation', 200) + fetchMock.post('/user/emails/send-confirmation-code', 200) const email = untrustedUserData.email - const notificationBody = await screen.findByTestId( - 'not-trusted-notification-body' - ) - expect(notificationBody.textContent).to.contain( + const alert = await screen.findByRole('alert') + expect(alert.textContent).to.contain( `To enhance the security of your Overleaf account, please reconfirm your secondary email address ${email}.` ) - const resendButton = screen.getByRole('button', { name: /resend/i }) + const resendButton = screen.getByRole('button', { + name: 'Send confirmation code', + }) fireEvent.click(resendButton) - await waitForElementToBeRemoved(() => - screen.getByRole('button', { name: /resend/i }) - ) + await screen.findByRole('dialog') expect(fetchMock.callHistory.called()).to.be.true - expect(screen.queryByRole('alert')).to.be.null }) it('fails to send', async function () { @@ -742,20 +743,15 @@ describe('', function () { renderWithinProjectListProvider(ConfirmEmail) await fetchMock.callHistory.flush(true) - fetchMock.post('/user/emails/resend_confirmation', 500) + fetchMock.post('/user/emails/send-confirmation-code', 500) const resendButtons = await screen.findAllByRole('button', { - name: /resend/i, + name: 'Send confirmation code', }) const resendButton = resendButtons[0] fireEvent.click(resendButton) - const notificationBody = screen.getByTestId('pro-notification-body') - await waitForElementToBeRemoved(() => - within(notificationBody).getByTestId( - 'loading-resending-confirmation-email' - ) - ) + await screen.findByRole('dialog') expect(fetchMock.callHistory.called()).to.be.true screen.getByText(/something went wrong/i) @@ -773,11 +769,10 @@ describe('', function () { const alert = await screen.findByRole('alert') const email = unconfirmedCommonsUserData.email - const notificationBody = within(alert).getByTestId('notification-body') - expect(notificationBody.textContent).to.contain( + expect(alert.textContent).to.contain( 'You are one step away from accessing Overleaf Professional features' ) - expect(notificationBody.textContent).to.contain( + expect(alert.textContent).to.contain( `Overleaf has an Overleaf subscription. Click the confirmation link sent to ${email} to upgrade to Overleaf Professional` ) }) @@ -794,17 +789,14 @@ describe('', function () { const alert = await screen.findByRole('alert') const email = unconfirmedCommonsUserData.email - const notificationBody = within(alert).getByTestId( - 'pro-notification-body' - ) const isPrimary = unconfirmedCommonsUserData.default if (isPrimary) { - expect(notificationBody.textContent).to.contain( - `Please confirm your primary email address ${email} by clicking on the link in the confirmation email` + expect(alert.textContent).to.contain( + `Please confirm your primary email address ${email}.` ) } else { - expect(notificationBody.textContent).to.contain( - `Please confirm your secondary email address ${email} by clicking on the link in the confirmation email` + expect(alert.textContent).to.contain( + `Please confirm your secondary email address ${email}.` ) } }) diff --git a/services/web/test/frontend/features/settings/components/emails/add-email-input.test.tsx b/services/web/test/frontend/features/settings/components/emails/add-email-input.test.tsx index 50220152c6..694a13f32c 100644 --- a/services/web/test/frontend/features/settings/components/emails/add-email-input.test.tsx +++ b/services/web/test/frontend/features/settings/components/emails/add-email-input.test.tsx @@ -13,7 +13,7 @@ const testInstitutionData = [ describe('', function () { const defaultProps = { - onChange: (value: string) => {}, + onChange: () => {}, handleAddNewEmail: () => {}, } diff --git a/services/web/test/frontend/features/settings/components/emails/emails-section.test.tsx b/services/web/test/frontend/features/settings/components/emails/emails-section.test.tsx index e784f6aaac..55c833df1c 100644 --- a/services/web/test/frontend/features/settings/components/emails/emails-section.test.tsx +++ b/services/web/test/frontend/features/settings/components/emails/emails-section.test.tsx @@ -99,7 +99,7 @@ describe('', function () { fetchMock.get('/user/emails?ensureAffiliation=true', [unconfirmedUserData]) render() - await screen.findByRole('button', { name: /resend confirmation code/i }) + await screen.findByRole('button', { name: 'Send confirmation code' }) }) it('renders professional label', async function () { @@ -121,24 +121,24 @@ describe('', function () { fetchMock.post('/user/emails/send-confirmation-code', 200) const button = screen.getByRole('button', { - name: /resend confirmation code/i, + name: 'Send confirmation code', }) fireEvent.click(button) expect( screen.queryByRole('button', { - name: /resend confirmation code/i, + name: 'Send confirmation code', }) ).to.be.null - await waitForElementToBeRemoved(() => screen.getByText(/sending/i)) + await screen.findByRole('dialog') expect( screen.queryByText(/an error has occurred while performing your request/i) ).to.be.null await screen.findAllByRole('button', { - name: /resend confirmation code/i, + name: 'Resend confirmation code', }) }) @@ -151,17 +151,17 @@ describe('', function () { fetchMock.post('/user/emails/send-confirmation-code', 503) const button = screen.getByRole('button', { - name: /resend confirmation code/i, + name: 'Send confirmation code', }) fireEvent.click(button) - expect(screen.queryByRole('button', { name: /resend confirmation code/i })) - .to.be.null + expect(screen.queryByRole('button', { name: 'Send confirmation code' })).to + .be.null - await waitForElementToBeRemoved(() => screen.getByText(/sending/i)) + await screen.findByRole('dialog') - screen.getByText(/sorry, something went wrong/i) - screen.getByRole('button', { name: /resend confirmation code/i }) + await screen.findByText(/sorry, something went wrong/i) + screen.getByRole('button', { name: 'Resend confirmation code' }) }) it('sorts emails with primary first, then confirmed, then unconfirmed', async function () { diff --git a/services/web/test/frontend/features/share-project-modal/components/share-project-modal.test.jsx b/services/web/test/frontend/features/share-project-modal/components/share-project-modal.test.jsx index 88f3482c4b..b86207fb0f 100644 --- a/services/web/test/frontend/features/share-project-modal/components/share-project-modal.test.jsx +++ b/services/web/test/frontend/features/share-project-modal/components/share-project-modal.test.jsx @@ -5,10 +5,7 @@ import fetchMock from 'fetch-mock' import userEvent from '@testing-library/user-event' import ShareProjectModal from '../../../../../frontend/js/features/share-project-modal/components/share-project-modal' -import { - renderWithEditorContext, - cleanUpContext, -} from '../../../helpers/render-with-context' +import { renderWithEditorContext } from '../../../helpers/render-with-context' import { EditorProviders, USER_EMAIL, @@ -100,7 +97,6 @@ describe('', function () { afterEach(function () { this.locationWrapperSandbox.restore() fetchMock.removeRoutes().clearHistory() - cleanUpContext() }) it('renders the modal', async function () { @@ -617,7 +613,7 @@ describe('', function () { fetchMock.post( 'express:/project/:projectId/invite', - ({ args: [url, req] }) => { + ({ args: [, req] }) => { const data = JSON.parse(req.body) if (data.email === 'a@b.c') { diff --git a/services/web/test/frontend/features/source-editor/helpers/mock-doc.ts b/services/web/test/frontend/features/source-editor/helpers/mock-doc.ts index 4c239c1f60..a4944c1e97 100644 --- a/services/web/test/frontend/features/source-editor/helpers/mock-doc.ts +++ b/services/web/test/frontend/features/source-editor/helpers/mock-doc.ts @@ -1,4 +1,4 @@ -import { ShareDoc } from '../../../../../types/share-doc' +import { ShareLatexOTShareDoc } from '../../../../../types/share-doc' import { EventEmitter } from 'events' export const docId = 'test-doc' @@ -36,6 +36,9 @@ const defaultContent = mockDocContent(contentLines.join('\n')) const MAX_DOC_LENGTH = 2 * 1024 * 1024 // ol-maxDocLength class MockShareDoc extends EventEmitter { + otType = 'sharejs-text-ot' as const + snapshot = '' + constructor(public text: string) { super() } @@ -51,16 +54,21 @@ class MockShareDoc extends EventEmitter { del() { // do nothing } + + submitOp() { + // do nothing + } } export const mockDoc = ( content = defaultContent, { rangesOptions = {} } = {} ) => { - const mockShareJSDoc: ShareDoc = new MockShareDoc(content) + const mockShareJSDoc: ShareLatexOTShareDoc = new MockShareDoc(content) return { doc_id: docId, + getType: () => 'sharejs-text-ot', getSnapshot: () => { return content }, @@ -98,10 +106,11 @@ export const mockDoc = ( removeCommentId: () => {}, ...rangesOptions, }, + // eslint-disable-next-line @typescript-eslint/no-unused-vars submitOp: (op: any) => {}, setTrackChangesIdSeeds: () => {}, getTrackingChanges: () => true, - setTrackingChanges: () => {}, + setTrackChangesUserId: () => {}, getInflightOp: () => null, getPendingOp: () => null, hasBufferedOps: () => false, diff --git a/services/web/test/frontend/features/subscription/components/dashboard/personal-subscription.test.tsx b/services/web/test/frontend/features/subscription/components/dashboard/personal-subscription.test.tsx index 8edc881caa..a61c9fca7f 100644 --- a/services/web/test/frontend/features/subscription/components/dashboard/personal-subscription.test.tsx +++ b/services/web/test/frontend/features/subscription/components/dashboard/personal-subscription.test.tsx @@ -190,7 +190,9 @@ describe('', function () { }) it('shows different payment email address section', async function () { - fetchMock.post('/user/subscription/account/email', 200) + fetchMock.post('/user/subscription/account/email', { + status: 200, + }) const usersEmail = 'foo@example.com' renderWithSubscriptionDashContext(, { metaTags: [ diff --git a/services/web/test/frontend/features/subscription/components/group-invite/group-invite.test.tsx b/services/web/test/frontend/features/subscription/components/group-invite/group-invite.test.tsx index cc70eff90d..d7b769fd20 100644 --- a/services/web/test/frontend/features/subscription/components/group-invite/group-invite.test.tsx +++ b/services/web/test/frontend/features/subscription/components/group-invite/group-invite.test.tsx @@ -18,10 +18,7 @@ describe('group invite', function () { describe('when user has personal subscription', function () { beforeEach(function () { - window.metaAttributesCache.set( - 'ol-hasIndividualRecurlySubscription', - true - ) + window.metaAttributesCache.set('ol-hasIndividualPaidSubscription', true) }) it('renders cancel personal subscription view', async function () { @@ -55,10 +52,7 @@ describe('group invite', function () { describe('when user does not have a personal subscription', function () { beforeEach(function () { - window.metaAttributesCache.set( - 'ol-hasIndividualRecurlySubscription', - false - ) + window.metaAttributesCache.set('ol-hasIndividualPaidSubscription', false) window.metaAttributesCache.set('ol-inviteToken', 'token123') }) diff --git a/services/web/test/frontend/features/subscription/fixtures/subscriptions.ts b/services/web/test/frontend/features/subscription/fixtures/subscriptions.ts index 08690742d3..8011c5206d 100644 --- a/services/web/test/frontend/features/subscription/fixtures/subscriptions.ts +++ b/services/web/test/frontend/features/subscription/fixtures/subscriptions.ts @@ -25,7 +25,6 @@ export const annualActiveSubscription: PaidSubscription = { admin_id: 'abc123', teamInvites: [], planCode: 'collaborator-annual', - recurlySubscription_id: 'ghi789', plan: { planCode: 'collaborator-annual', name: 'Standard (Collaborator) Annual', @@ -68,7 +67,6 @@ export const annualActiveSubscriptionEuro: PaidSubscription = { admin_id: 'abc123', teamInvites: [], planCode: 'collaborator-annual', - recurlySubscription_id: 'ghi789', plan: { planCode: 'collaborator-annual', name: 'Standard (Collaborator) Annual', @@ -111,7 +109,6 @@ export const annualActiveSubscriptionPro: PaidSubscription = { admin_id: 'abc123', teamInvites: [], planCode: 'professional', - recurlySubscription_id: 'ghi789', plan: { planCode: 'professional', name: 'Professional', @@ -153,7 +150,6 @@ export const pastDueExpiredSubscription: PaidSubscription = { admin_id: 'abc123', teamInvites: [], planCode: 'collaborator-annual', - recurlySubscription_id: 'ghi789', plan: { planCode: 'collaborator-annual', name: 'Standard (Collaborator) Annual', @@ -196,7 +192,6 @@ export const canceledSubscription: PaidSubscription = { admin_id: 'abc123', teamInvites: [], planCode: 'collaborator-annual', - recurlySubscription_id: 'ghi789', plan: { planCode: 'collaborator-annual', name: 'Standard (Collaborator) Annual', @@ -239,7 +234,6 @@ export const pendingSubscriptionChange: PaidSubscription = { admin_id: 'abc123', teamInvites: [], planCode: 'collaborator-annual', - recurlySubscription_id: 'ghi789', plan: { planCode: 'collaborator-annual', name: 'Standard (Collaborator) Annual', @@ -290,7 +284,6 @@ export const groupActiveSubscription: GroupSubscription = { admin_id: 'abc123', teamInvites: [], planCode: 'group_collaborator_10_enterprise', - recurlySubscription_id: 'ghi789', plan: { planCode: 'group_collaborator_10_enterprise', name: 'Overleaf Standard (Collaborator) - Group Account (10 licenses) - Enterprise', @@ -338,7 +331,6 @@ export const groupActiveSubscriptionWithPendingLicenseChange: GroupSubscription admin_id: 'abc123', teamInvites: [], planCode: 'group_collaborator_10_enterprise', - recurlySubscription_id: 'ghi789', plan: { planCode: 'group_collaborator_10_enterprise', name: 'Overleaf Standard (Collaborator) - Group Account (10 licenses) - Enterprise', @@ -396,7 +388,6 @@ export const trialSubscription: PaidSubscription = { admin_id: 'abc123', teamInvites: [], planCode: 'paid-personal_free_trial_7_days', - recurlySubscription_id: 'ghi789', plan: { planCode: 'paid-personal_free_trial_7_days', name: 'Personal', @@ -439,7 +430,6 @@ export const customSubscription: CustomSubscription = { admin_id: 'abc123', teamInvites: [], planCode: 'collaborator-annual', - recurlySubscription_id: 'ghi789', plan: { planCode: 'collaborator-annual', name: 'Standard (Collaborator) Annual', @@ -460,7 +450,6 @@ export const trialCollaboratorSubscription: PaidSubscription = { admin_id: 'abc123', teamInvites: [], planCode: 'collaborator_free_trial_7_days', - recurlySubscription_id: 'ghi789', plan: { planCode: 'collaborator_free_trial_7_days', name: 'Standard (Collaborator)', @@ -503,7 +492,6 @@ export const monthlyActiveCollaborator: PaidSubscription = { admin_id: 'abc123', teamInvites: [], planCode: 'collaborator', - recurlySubscription_id: 'ghi789', plan: { planCode: 'collaborator', name: 'Standard (Collaborator)', diff --git a/services/web/test/frontend/features/word-count-modal/components/word-count-modal.spec.tsx b/services/web/test/frontend/features/word-count-modal/components/word-count-modal.spec.tsx index 2ed1dc9448..9fc5887535 100644 --- a/services/web/test/frontend/features/word-count-modal/components/word-count-modal.spec.tsx +++ b/services/web/test/frontend/features/word-count-modal/components/word-count-modal.spec.tsx @@ -12,7 +12,7 @@ describe('', function () { }) cy.mount( - + ) @@ -30,7 +30,7 @@ describe('', function () { }) cy.mount( - + ) @@ -48,7 +48,7 @@ describe('', function () { }) cy.mount( - + ) @@ -64,7 +64,7 @@ describe('', function () { }) cy.mount( - + ) @@ -87,7 +87,7 @@ describe('', function () { }) cy.mount( - + ) diff --git a/services/web/test/frontend/helpers/editor-providers.jsx b/services/web/test/frontend/helpers/editor-providers.jsx index a6bc9c32c6..1fe143a8e3 100644 --- a/services/web/test/frontend/helpers/editor-providers.jsx +++ b/services/web/test/frontend/helpers/editor-providers.jsx @@ -1,7 +1,6 @@ // Disable prop type checks for test harnesses /* eslint-disable react/prop-types */ -import sinon from 'sinon' -import { get, merge } from 'lodash' +import { merge } from 'lodash' import { SocketIOMock } from '@/ide/connection/SocketIoShim' import { IdeContext } from '@/shared/context/ide-context' import React, { useEffect, useState } from 'react' @@ -48,8 +47,7 @@ export function EditorProviders({ compiler = 'pdflatex', socket = new SocketIOMock(), isRestrictedTokenMember = false, - clsiServerId = '1234', - scope = {}, + scope: defaultScope = {}, features = { referencesSearch: true, }, @@ -71,18 +69,6 @@ export function EditorProviders({ }, ], ui = { view: 'editor', pdfLayout: 'sideBySide', chatOpen: true }, - fileTreeManager = { - findEntityById: () => null, - findEntityByPath: () => null, - getEntityPath: () => '', - getRootDocDirname: () => '', - getPreviewByPath: path => ({ url: path, extension: 'png' }), - }, - editorManager = { - getCurrentDocumentId: () => 'foo', - getCurrentDocValue: () => {}, - openDoc: sinon.stub(), - }, userSettings = {}, providers = {}, }) { @@ -99,7 +85,9 @@ export function EditorProviders({ merge({}, defaultUserSettings, userSettings) ) - const $scope = merge( + window.metaAttributesCache.set('ol-capabilities', ['chat', 'dropbox']) + + const scope = merge( { user, editor: { @@ -117,31 +105,17 @@ export function EditorProviders({ name: PROJECT_NAME, owner: projectOwner, features: projectFeatures, - rootDoc_id: rootDocId, + rootDocId, rootFolder, imageName, compiler, }, ui, - $watch: (path, callback) => { - callback(get($scope, path)) - return () => null - }, - $on: sinon.stub(), - $applyAsync: sinon.stub(), permissionsLevel, }, - scope + defaultScope ) - window._ide = { - $scope, - socket, - clsiServerId, - editorManager, - fileTreeManager, - } - // Add details for useUserContext window.metaAttributesCache.set('ol-user', { ...user, features }) window.metaAttributesCache.set('ol-project_id', projectId) @@ -149,8 +123,8 @@ export function EditorProviders({ return ( @@ -159,79 +133,85 @@ export function EditorProviders({ ) } -const ConnectionProvider = ({ children }) => { - const [value] = useState(() => ({ - socket: window._ide.socket, - connectionState: { - readyState: WebSocket.OPEN, - forceDisconnected: false, - inactiveDisconnect: false, - reconnectAt: null, - forcedDisconnectDelay: 0, - lastConnectionAttempt: 0, - error: '', - }, - isConnected: true, - isStillReconnecting: false, - secondsUntilReconnect: () => 0, - tryReconnectNow: () => {}, - registerUserActivity: () => {}, - disconnect: () => {}, - })) - - return ( - - {children} - - ) -} - -const IdeReactProvider = ({ children }) => { - const [startedFreeTrial, setStartedFreeTrial] = useState(false) - - const [ideReactContextValue] = useState(() => ({ - projectId: PROJECT_ID, - eventEmitter: new IdeEventEmitter(), - startedFreeTrial, - setStartedFreeTrial, - reportError: () => {}, - projectJoined: true, - })) - - const [ideContextValue] = useState(() => { - const ide = window._ide - - const scopeStore = createReactScopeValueStore(PROJECT_ID) - for (const [key, value] of Object.entries(ide.$scope)) { - // TODO: path for nested entries - scopeStore.set(key, value) - } - scopeStore.set('editor.sharejs_doc', ide.$scope.editor.sharejs_doc) - scopeStore.set('ui.chatOpen', ide.$scope.ui.chatOpen) - const scopeEventEmitter = new ReactScopeEventEmitter(new IdeEventEmitter()) - - return { - ...ide, - scopeStore, - scopeEventEmitter, - } - }) - - useEffect(() => { - window.overleaf = { - ...window.overleaf, - unstable: { - ...window.overleaf?.unstable, - store: ideContextValue.scopeStore, +const makeConnectionProvider = socket => { + const ConnectionProvider = ({ children }) => { + const [value] = useState(() => ({ + socket, + connectionState: { + readyState: WebSocket.OPEN, + forceDisconnected: false, + inactiveDisconnect: false, + reconnectAt: null, + forcedDisconnectDelay: 0, + lastConnectionAttempt: 0, + error: '', }, - } - }, [ideContextValue.scopeStore]) + isConnected: true, + isStillReconnecting: false, + secondsUntilReconnect: () => 0, + tryReconnectNow: () => {}, + registerUserActivity: () => {}, + disconnect: () => {}, + })) - return ( - - + return ( + {children} - - - ) + + ) + } + return ConnectionProvider +} + +const makeIdeReactProvider = (scope, socket) => { + const IdeReactProvider = ({ children }) => { + const [startedFreeTrial, setStartedFreeTrial] = useState(false) + + const [ideReactContextValue] = useState(() => ({ + projectId: PROJECT_ID, + eventEmitter: new IdeEventEmitter(), + startedFreeTrial, + setStartedFreeTrial, + reportError: () => {}, + projectJoined: true, + })) + + const [ideContextValue] = useState(() => { + const scopeStore = createReactScopeValueStore(PROJECT_ID) + for (const [key, value] of Object.entries(scope)) { + // TODO: path for nested entries + scopeStore.set(key, value) + } + scopeStore.set('editor.sharejs_doc', scope.editor.sharejs_doc) + scopeStore.set('ui.chatOpen', scope.ui.chatOpen) + const scopeEventEmitter = new ReactScopeEventEmitter( + new IdeEventEmitter() + ) + + return { + socket, + scopeStore, + scopeEventEmitter, + } + }) + + useEffect(() => { + window.overleaf = { + ...window.overleaf, + unstable: { + ...window.overleaf?.unstable, + store: ideContextValue.scopeStore, + }, + } + }, [ideContextValue.scopeStore]) + + return ( + + + {children} + + + ) + } + return IdeReactProvider } diff --git a/services/web/test/frontend/helpers/render-with-context.jsx b/services/web/test/frontend/helpers/render-with-context.jsx index e3aba6264d..31ee64d5be 100644 --- a/services/web/test/frontend/helpers/render-with-context.jsx +++ b/services/web/test/frontend/helpers/render-with-context.jsx @@ -18,7 +18,3 @@ export function renderWithEditorContext( ...renderOptions, }) } - -export function cleanUpContext() { - delete window._ide -} diff --git a/services/web/test/frontend/helpers/reset-meta.ts b/services/web/test/frontend/helpers/reset-meta.ts index f5a979828a..e59e62342d 100644 --- a/services/web/test/frontend/helpers/reset-meta.ts +++ b/services/web/test/frontend/helpers/reset-meta.ts @@ -2,6 +2,7 @@ export function resetMeta() { window.metaAttributesCache = new Map() window.metaAttributesCache.set('ol-projectHistoryBlobsEnabled', true) window.metaAttributesCache.set('ol-i18n', { currentLangCode: 'en' }) + window.metaAttributesCache.set('ol-capabilities', ['chat', 'dropbox']) window.metaAttributesCache.set('ol-ExposedSettings', { appName: 'Overleaf', maxEntitiesPerProject: 10, diff --git a/services/web/test/frontend/ide/log-parser/logParserTests.js b/services/web/test/frontend/ide/log-parser/logParserTests.js index 098ee056b9..59cdd5d22e 100644 --- a/services/web/test/frontend/ide/log-parser/logParserTests.js +++ b/services/web/test/frontend/ide/log-parser/logParserTests.js @@ -6,7 +6,7 @@ const fixturePath = '../../helpers/fixtures/logs/' const fs = require('fs') const path = require('path') -describe('logParser', function (done) { +describe('logParser', function () { it('should parse errors', function () { const { errors } = parseLatexLog('errors.log', { ignoreDuplicates: true }) expect(errors.map(e => [e.line, e.message])).to.deep.equal([ diff --git a/services/web/test/unit/bootstrap.js b/services/web/test/unit/bootstrap.js index ee4a022c15..00bcc3e958 100644 --- a/services/web/test/unit/bootstrap.js +++ b/services/web/test/unit/bootstrap.js @@ -1,6 +1,7 @@ const Path = require('path') -const chai = require('chai') const sinon = require('sinon') +require('./common_bootstrap') +const chai = require('chai') /* * Chai configuration @@ -18,13 +19,6 @@ chai.use(require('chai-as-promised')) // Do not truncate assertion errors chai.config.truncateThreshold = 0 - -// add support for mongoose in sinon -require('sinon-mongoose') - -// ensure every ObjectId has the id string as a property for correct comparisons -require('mongodb-legacy').ObjectId.cacheHexString = true - /* * Global stubs */ diff --git a/services/web/test/unit/common_bootstrap.js b/services/web/test/unit/common_bootstrap.js new file mode 100644 index 0000000000..a77aad61c6 --- /dev/null +++ b/services/web/test/unit/common_bootstrap.js @@ -0,0 +1,5 @@ +// add support for mongoose in sinon +require('sinon-mongoose') + +// ensure every ObjectId has the id string as a property for correct comparisons +require('mongodb-legacy').ObjectId.cacheHexString = true diff --git a/services/web/test/unit/src/Analytics/AnalyticsController.test.mjs b/services/web/test/unit/src/Analytics/AnalyticsController.test.mjs new file mode 100644 index 0000000000..4019f2bce9 --- /dev/null +++ b/services/web/test/unit/src/Analytics/AnalyticsController.test.mjs @@ -0,0 +1,134 @@ +import { vi } from 'vitest' +import sinon from 'sinon' +import MockResponse from '../helpers/MockResponse.js' +const modulePath = new URL( + '../../../../app/src/Features/Analytics/AnalyticsController.mjs', + import.meta.url +).pathname + +describe('AnalyticsController', function () { + beforeEach(async function (ctx) { + ctx.SessionManager = { getLoggedInUserId: sinon.stub() } + + ctx.AnalyticsManager = { + updateEditingSession: sinon.stub(), + recordEventForSession: sinon.stub(), + } + + ctx.Features = { + hasFeature: sinon.stub().returns(true), + } + + vi.doMock( + '../../../../app/src/Features/Analytics/AnalyticsManager.js', + () => ({ + default: ctx.AnalyticsManager, + }) + ) + + vi.doMock( + '../../../../app/src/Features/Authentication/SessionManager.js', + () => ({ + default: ctx.SessionManager, + }) + ) + + vi.doMock('../../../../app/src/infrastructure/Features.js', () => ({ + default: ctx.Features, + })) + + vi.doMock('../../../../app/src/infrastructure/GeoIpLookup.js', () => ({ + default: (ctx.GeoIpLookup = { + promises: { + getDetails: sinon.stub().resolves(), + }, + }), + })) + + ctx.controller = (await import(modulePath)).default + + ctx.res = new MockResponse() + }) + + describe('updateEditingSession', function () { + beforeEach(function (ctx) { + ctx.req = { + params: { + projectId: 'a project id', + }, + session: {}, + body: { + segmentation: { + editorType: 'abc', + }, + }, + } + ctx.GeoIpLookup.promises.getDetails = sinon + .stub() + .resolves({ country_code: 'XY' }) + }) + + it('delegates to the AnalyticsManager', function (ctx) { + return new Promise(resolve => { + ctx.SessionManager.getLoggedInUserId.returns('1234') + ctx.res.callback = () => { + sinon.assert.calledWith( + ctx.AnalyticsManager.updateEditingSession, + '1234', + 'a project id', + 'XY', + { editorType: 'abc' } + ) + resolve() + } + ctx.controller.updateEditingSession(ctx.req, ctx.res) + }) + }) + }) + + describe('recordEvent', function () { + beforeEach(function (ctx) { + const body = { + foo: 'stuff', + _csrf: 'atoken123', + } + ctx.req = { + params: { + event: 'i_did_something', + }, + body, + sessionID: 'sessionIDHere', + session: {}, + } + + ctx.expectedData = Object.assign({}, body) + delete ctx.expectedData._csrf + }) + + it('should use the session', function (ctx) { + return new Promise(resolve => { + ctx.controller.recordEvent(ctx.req, ctx.res) + sinon.assert.calledWith( + ctx.AnalyticsManager.recordEventForSession, + ctx.req.session, + ctx.req.params.event, + ctx.expectedData + ) + resolve() + }) + }) + + it('should remove the CSRF token before sending to the manager', function (ctx) { + return new Promise(resolve => { + ctx.controller.recordEvent(ctx.req, ctx.res) + sinon.assert.calledWith( + ctx.AnalyticsManager.recordEventForSession, + ctx.req.session, + ctx.req.params.event, + ctx.expectedData + ) + resolve() + }) + }) + }) +}) diff --git a/services/web/test/unit/src/Analytics/AnalyticsControllerTests.mjs b/services/web/test/unit/src/Analytics/AnalyticsControllerTests.mjs deleted file mode 100644 index cba0e935db..0000000000 --- a/services/web/test/unit/src/Analytics/AnalyticsControllerTests.mjs +++ /dev/null @@ -1,113 +0,0 @@ -import esmock from 'esmock' -import sinon from 'sinon' -import MockResponse from '../helpers/MockResponse.js' -const modulePath = new URL( - '../../../../app/src/Features/Analytics/AnalyticsController.mjs', - import.meta.url -).pathname - -describe('AnalyticsController', function () { - beforeEach(async function () { - this.SessionManager = { getLoggedInUserId: sinon.stub() } - - this.AnalyticsManager = { - updateEditingSession: sinon.stub(), - recordEventForSession: sinon.stub(), - } - - this.Features = { - hasFeature: sinon.stub().returns(true), - } - - this.controller = await esmock.strict(modulePath, { - '../../../../app/src/Features/Analytics/AnalyticsManager.js': - this.AnalyticsManager, - '../../../../app/src/Features/Authentication/SessionManager.js': - this.SessionManager, - '../../../../app/src/infrastructure/Features.js': this.Features, - '../../../../app/src/infrastructure/GeoIpLookup.js': (this.GeoIpLookup = { - promises: { - getDetails: sinon.stub().resolves(), - }, - }), - }) - - this.res = new MockResponse() - }) - - describe('updateEditingSession', function () { - beforeEach(function () { - this.req = { - params: { - projectId: 'a project id', - }, - session: {}, - body: { - segmentation: { - editorType: 'abc', - }, - }, - } - this.GeoIpLookup.promises.getDetails = sinon - .stub() - .resolves({ country_code: 'XY' }) - }) - - it('delegates to the AnalyticsManager', function (done) { - this.SessionManager.getLoggedInUserId.returns('1234') - this.res.callback = () => { - sinon.assert.calledWith( - this.AnalyticsManager.updateEditingSession, - '1234', - 'a project id', - 'XY', - { editorType: 'abc' } - ) - done() - } - this.controller.updateEditingSession(this.req, this.res) - }) - }) - - describe('recordEvent', function () { - beforeEach(function () { - const body = { - foo: 'stuff', - _csrf: 'atoken123', - } - this.req = { - params: { - event: 'i_did_something', - }, - body, - sessionID: 'sessionIDHere', - session: {}, - } - - this.expectedData = Object.assign({}, body) - delete this.expectedData._csrf - }) - - it('should use the session', function (done) { - this.controller.recordEvent(this.req, this.res) - sinon.assert.calledWith( - this.AnalyticsManager.recordEventForSession, - this.req.session, - this.req.params.event, - this.expectedData - ) - done() - }) - - it('should remove the CSRF token before sending to the manager', function (done) { - this.controller.recordEvent(this.req, this.res) - sinon.assert.calledWith( - this.AnalyticsManager.recordEventForSession, - this.req.session, - this.req.params.event, - this.expectedData - ) - done() - }) - }) -}) diff --git a/services/web/test/unit/src/Analytics/AnalyticsUTMTrackingMiddleware.test.mjs b/services/web/test/unit/src/Analytics/AnalyticsUTMTrackingMiddleware.test.mjs new file mode 100644 index 0000000000..463407b180 --- /dev/null +++ b/services/web/test/unit/src/Analytics/AnalyticsUTMTrackingMiddleware.test.mjs @@ -0,0 +1,206 @@ +import { assert, vi } from 'vitest' +import sinon from 'sinon' +import MockRequest from '../helpers/MockRequest.js' +import MockResponse from '../helpers/MockResponse.js' + +const MODULE_PATH = new URL( + '../../../../app/src/Features/Analytics/AnalyticsUTMTrackingMiddleware', + import.meta.url +).pathname + +describe('AnalyticsUTMTrackingMiddleware', function () { + beforeEach(async function (ctx) { + ctx.analyticsId = 'ecdb935a-52f3-4f91-aebc-7a70d2ffbb55' + ctx.userId = '61795fcb013504bb7b663092' + + ctx.req = new MockRequest() + ctx.res = new MockResponse() + ctx.next = sinon.stub().returns() + ctx.req.session = { + user: { + _id: ctx.userId, + analyticsId: ctx.analyticsId, + }, + } + + vi.doMock( + '../../../../app/src/Features/Analytics/AnalyticsManager.js', + () => ({ + default: (ctx.AnalyticsManager = { + recordEventForSession: sinon.stub().resolves(), + setUserPropertyForSessionInBackground: sinon.stub(), + }), + }) + ) + + vi.doMock('@overleaf/settings', () => ({ + default: { + siteUrl: 'https://www.overleaf.com', + }, + })) + + ctx.AnalyticsUTMTrackingMiddleware = (await import(MODULE_PATH)).default + + ctx.middleware = ctx.AnalyticsUTMTrackingMiddleware.recordUTMTags() + }) + + describe('without UTM tags in query', function () { + beforeEach(function (ctx) { + ctx.req.url = '/project' + ctx.middleware(ctx.req, ctx.res, ctx.next) + }) + + it('user is not redirected', function (ctx) { + assert.isFalse(ctx.res.redirected) + }) + + it('next middleware is executed', function (ctx) { + sinon.assert.calledOnce(ctx.next) + }) + + it('no event or user property is recorded', function (ctx) { + sinon.assert.notCalled(ctx.AnalyticsManager.recordEventForSession) + sinon.assert.notCalled( + ctx.AnalyticsManager.setUserPropertyForSessionInBackground + ) + }) + }) + + describe('with all UTM tags in query', function () { + beforeEach(function (ctx) { + ctx.req.url = + '/project?utm_source=Organic&utm_medium=Facebook&utm_campaign=Some%20Campaign&utm_content=foo-bar&utm_term=overridden' + ctx.req.query = { + utm_source: 'Organic', + utm_medium: 'Facebook', + utm_campaign: 'Some Campaign', + utm_content: 'foo-bar', + utm_term: 'overridden', + } + ctx.middleware(ctx.req, ctx.res, ctx.next) + }) + + it('user is redirected', function (ctx) { + assert.isTrue(ctx.res.redirected) + assert.equal('/project', ctx.res.redirectedTo) + }) + + it('next middleware is not executed', function (ctx) { + sinon.assert.notCalled(ctx.next) + }) + + it('page-view event is recorded for session', function (ctx) { + sinon.assert.calledWith( + ctx.AnalyticsManager.recordEventForSession, + ctx.req.session, + 'page-view', + { + path: '/project', + utm_source: 'Organic', + utm_medium: 'Facebook', + utm_campaign: 'Some Campaign', + utm_content: 'foo-bar', + utm_term: 'overridden', + } + ) + }) + + it('utm-tags user property is set for session', function (ctx) { + sinon.assert.calledWith( + ctx.AnalyticsManager.setUserPropertyForSessionInBackground, + ctx.req.session, + 'utm-tags', + 'Organic;Facebook;Some Campaign;foo-bar' + ) + }) + }) + + describe('with some UTM tags in query', function () { + beforeEach(function (ctx) { + ctx.req.url = + '/project?utm_medium=Facebook&utm_campaign=Some%20Campaign&utm_term=foo' + ctx.req.query = { + utm_medium: 'Facebook', + utm_campaign: 'Some Campaign', + utm_term: 'foo', + } + ctx.middleware(ctx.req, ctx.res, ctx.next) + }) + + it('user is redirected', function (ctx) { + assert.isTrue(ctx.res.redirected) + assert.equal('/project', ctx.res.redirectedTo) + }) + + it('next middleware is not executed', function (ctx) { + sinon.assert.notCalled(ctx.next) + }) + + it('page-view event is recorded for session', function (ctx) { + sinon.assert.calledWith( + ctx.AnalyticsManager.recordEventForSession, + ctx.req.session, + 'page-view', + { + path: '/project', + utm_medium: 'Facebook', + utm_campaign: 'Some Campaign', + utm_term: 'foo', + } + ) + }) + + it('utm-tags user property is set for session', function (ctx) { + sinon.assert.calledWith( + ctx.AnalyticsManager.setUserPropertyForSessionInBackground, + ctx.req.session, + 'utm-tags', + 'N/A;Facebook;Some Campaign;foo' + ) + }) + }) + + describe('with some UTM tags and additional parameters in query', function () { + beforeEach(function (ctx) { + ctx.req.url = + '/project?utm_medium=Facebook&utm_campaign=Some%20Campaign&other_param=some-value' + ctx.req.query = { + utm_medium: 'Facebook', + utm_campaign: 'Some Campaign', + other_param: 'some-value', + } + ctx.middleware(ctx.req, ctx.res, ctx.next) + }) + + it('user is redirected', function (ctx) { + assert.isTrue(ctx.res.redirected) + assert.equal('/project?other_param=some-value', ctx.res.redirectedTo) + }) + + it('next middleware is not executed', function (ctx) { + sinon.assert.notCalled(ctx.next) + }) + + it('page-view event is recorded for session', function (ctx) { + sinon.assert.calledWith( + ctx.AnalyticsManager.recordEventForSession, + ctx.req.session, + 'page-view', + { + path: '/project', + utm_medium: 'Facebook', + utm_campaign: 'Some Campaign', + } + ) + }) + + it('utm-tags user property is set for session', function (ctx) { + sinon.assert.calledWith( + ctx.AnalyticsManager.setUserPropertyForSessionInBackground, + ctx.req.session, + 'utm-tags', + 'N/A;Facebook;Some Campaign;N/A' + ) + }) + }) +}) diff --git a/services/web/test/unit/src/Analytics/AnalyticsUTMTrackingMiddlewareTests.mjs b/services/web/test/unit/src/Analytics/AnalyticsUTMTrackingMiddlewareTests.mjs deleted file mode 100644 index 461a2a70d1..0000000000 --- a/services/web/test/unit/src/Analytics/AnalyticsUTMTrackingMiddlewareTests.mjs +++ /dev/null @@ -1,200 +0,0 @@ -import esmock from 'esmock' -import sinon from 'sinon' -import MockRequest from '../helpers/MockRequest.js' -import MockResponse from '../helpers/MockResponse.js' -import { assert } from 'chai' - -const MODULE_PATH = new URL( - '../../../../app/src/Features/Analytics/AnalyticsUTMTrackingMiddleware', - import.meta.url -).pathname - -describe('AnalyticsUTMTrackingMiddleware', function () { - beforeEach(async function () { - this.analyticsId = 'ecdb935a-52f3-4f91-aebc-7a70d2ffbb55' - this.userId = '61795fcb013504bb7b663092' - - this.req = new MockRequest() - this.res = new MockResponse() - this.next = sinon.stub().returns() - this.req.session = { - user: { - _id: this.userId, - analyticsId: this.analyticsId, - }, - } - - this.AnalyticsUTMTrackingMiddleware = await esmock.strict(MODULE_PATH, { - '../../../../app/src/Features/Analytics/AnalyticsManager.js': - (this.AnalyticsManager = { - recordEventForSession: sinon.stub().resolves(), - setUserPropertyForSessionInBackground: sinon.stub(), - }), - '@overleaf/settings': { - siteUrl: 'https://www.overleaf.com', - }, - }) - - this.middleware = this.AnalyticsUTMTrackingMiddleware.recordUTMTags() - }) - - describe('without UTM tags in query', function () { - beforeEach(function () { - this.req.url = '/project' - this.middleware(this.req, this.res, this.next) - }) - - it('user is not redirected', function () { - assert.isFalse(this.res.redirected) - }) - - it('next middleware is executed', function () { - sinon.assert.calledOnce(this.next) - }) - - it('no event or user property is recorded', function () { - sinon.assert.notCalled(this.AnalyticsManager.recordEventForSession) - sinon.assert.notCalled( - this.AnalyticsManager.setUserPropertyForSessionInBackground - ) - }) - }) - - describe('with all UTM tags in query', function () { - beforeEach(function () { - this.req.url = - '/project?utm_source=Organic&utm_medium=Facebook&utm_campaign=Some%20Campaign&utm_content=foo-bar&utm_term=overridden' - this.req.query = { - utm_source: 'Organic', - utm_medium: 'Facebook', - utm_campaign: 'Some Campaign', - utm_content: 'foo-bar', - utm_term: 'overridden', - } - this.middleware(this.req, this.res, this.next) - }) - - it('user is redirected', function () { - assert.isTrue(this.res.redirected) - assert.equal('/project', this.res.redirectedTo) - }) - - it('next middleware is not executed', function () { - sinon.assert.notCalled(this.next) - }) - - it('page-view event is recorded for session', function () { - sinon.assert.calledWith( - this.AnalyticsManager.recordEventForSession, - this.req.session, - 'page-view', - { - path: '/project', - utm_source: 'Organic', - utm_medium: 'Facebook', - utm_campaign: 'Some Campaign', - utm_content: 'foo-bar', - utm_term: 'overridden', - } - ) - }) - - it('utm-tags user property is set for session', function () { - sinon.assert.calledWith( - this.AnalyticsManager.setUserPropertyForSessionInBackground, - this.req.session, - 'utm-tags', - 'Organic;Facebook;Some Campaign;foo-bar' - ) - }) - }) - - describe('with some UTM tags in query', function () { - beforeEach(function () { - this.req.url = - '/project?utm_medium=Facebook&utm_campaign=Some%20Campaign&utm_term=foo' - this.req.query = { - utm_medium: 'Facebook', - utm_campaign: 'Some Campaign', - utm_term: 'foo', - } - this.middleware(this.req, this.res, this.next) - }) - - it('user is redirected', function () { - assert.isTrue(this.res.redirected) - assert.equal('/project', this.res.redirectedTo) - }) - - it('next middleware is not executed', function () { - sinon.assert.notCalled(this.next) - }) - - it('page-view event is recorded for session', function () { - sinon.assert.calledWith( - this.AnalyticsManager.recordEventForSession, - this.req.session, - 'page-view', - { - path: '/project', - utm_medium: 'Facebook', - utm_campaign: 'Some Campaign', - utm_term: 'foo', - } - ) - }) - - it('utm-tags user property is set for session', function () { - sinon.assert.calledWith( - this.AnalyticsManager.setUserPropertyForSessionInBackground, - this.req.session, - 'utm-tags', - 'N/A;Facebook;Some Campaign;foo' - ) - }) - }) - - describe('with some UTM tags and additional parameters in query', function () { - beforeEach(function () { - this.req.url = - '/project?utm_medium=Facebook&utm_campaign=Some%20Campaign&other_param=some-value' - this.req.query = { - utm_medium: 'Facebook', - utm_campaign: 'Some Campaign', - other_param: 'some-value', - } - this.middleware(this.req, this.res, this.next) - }) - - it('user is redirected', function () { - assert.isTrue(this.res.redirected) - assert.equal('/project?other_param=some-value', this.res.redirectedTo) - }) - - it('next middleware is not executed', function () { - sinon.assert.notCalled(this.next) - }) - - it('page-view event is recorded for session', function () { - sinon.assert.calledWith( - this.AnalyticsManager.recordEventForSession, - this.req.session, - 'page-view', - { - path: '/project', - utm_medium: 'Facebook', - utm_campaign: 'Some Campaign', - } - ) - }) - - it('utm-tags user property is set for session', function () { - sinon.assert.calledWith( - this.AnalyticsManager.setUserPropertyForSessionInBackground, - this.req.session, - 'utm-tags', - 'N/A;Facebook;Some Campaign;N/A' - ) - }) - }) -}) diff --git a/services/web/test/unit/src/Authorization/AuthorizationManagerTests.js b/services/web/test/unit/src/Authorization/AuthorizationManagerTests.js index 7463bbdeb7..e4c67d2f77 100644 --- a/services/web/test/unit/src/Authorization/AuthorizationManagerTests.js +++ b/services/web/test/unit/src/Authorization/AuthorizationManagerTests.js @@ -27,7 +27,10 @@ describe('AuthorizationManager', function () { this.CollaboratorsGetter = { promises: { - getMemberIdPrivilegeLevel: sinon.stub().resolves(PrivilegeLevels.NONE), + getProjectAccess: sinon.stub().resolves({ + publicAccessLevel: sinon.stub().returns(PublicAccessLevels.PRIVATE), + privilegeLevelForUser: sinon.stub().returns(PrivilegeLevels.NONE), + }), }, } @@ -113,9 +116,17 @@ describe('AuthorizationManager', function () { describe('with a user id with a privilege level', function () { beforeEach(async function () { - this.CollaboratorsGetter.promises.getMemberIdPrivilegeLevel - .withArgs(this.user._id, this.project._id) - .resolves(PrivilegeLevels.READ_ONLY) + this.CollaboratorsGetter.promises.getProjectAccess + .withArgs(this.project._id) + .resolves({ + publicAccessLevel: sinon + .stub() + .returns(PublicAccessLevels.PRIVATE), + privilegeLevelForUser: sinon + .stub() + .withArgs(this.user._id) + .returns(PrivilegeLevels.READ_ONLY), + }) this.result = await this.AuthorizationManager.promises.getPrivilegeLevelForProject( this.user._id, @@ -171,8 +182,8 @@ describe('AuthorizationManager', function () { ) }) - it('should not call CollaboratorsGetter.getMemberIdPrivilegeLevel', function () { - this.CollaboratorsGetter.promises.getMemberIdPrivilegeLevel.called.should.equal( + it('should not call CollaboratorsGetter.getProjectAccess', function () { + this.CollaboratorsGetter.promises.getProjectAccess.called.should.equal( false ) }) @@ -204,8 +215,8 @@ describe('AuthorizationManager', function () { ) }) - it('should not call CollaboratorsGetter.getMemberIdPrivilegeLevel', function () { - this.CollaboratorsGetter.promises.getMemberIdPrivilegeLevel.called.should.equal( + it('should not call CollaboratorsGetter.getProjectAccess', function () { + this.CollaboratorsGetter.promises.getProjectAccess.called.should.equal( false ) }) @@ -237,8 +248,8 @@ describe('AuthorizationManager', function () { ) }) - it('should not call CollaboratorsGetter.getMemberIdPrivilegeLevel', function () { - this.CollaboratorsGetter.promises.getMemberIdPrivilegeLevel.called.should.equal( + it('should not call CollaboratorsGetter.getProjectAccess', function () { + this.CollaboratorsGetter.promises.getProjectAccess.called.should.equal( false ) }) @@ -264,9 +275,17 @@ describe('AuthorizationManager', function () { describe('with a user id with a privilege level', function () { beforeEach(async function () { - this.CollaboratorsGetter.promises.getMemberIdPrivilegeLevel - .withArgs(this.user._id, this.project._id) - .resolves(PrivilegeLevels.READ_ONLY) + this.CollaboratorsGetter.promises.getProjectAccess + .withArgs(this.project._id) + .resolves({ + publicAccessLevel: sinon + .stub() + .returns(PublicAccessLevels.PRIVATE), + privilegeLevelForUser: sinon + .stub() + .withArgs(this.user._id) + .returns(PrivilegeLevels.READ_ONLY), + }) this.result = await this.AuthorizationManager.promises.getPrivilegeLevelForProject( this.user._id, @@ -321,8 +340,8 @@ describe('AuthorizationManager', function () { ) }) - it('should not call CollaboratorsGetter.getMemberIdPrivilegeLevel', function () { - this.CollaboratorsGetter.promises.getMemberIdPrivilegeLevel.called.should.equal( + it('should not call CollaboratorsGetter.getProjectAccess', function () { + this.CollaboratorsGetter.promises.getProjectAccess.called.should.equal( false ) }) @@ -336,13 +355,32 @@ describe('AuthorizationManager', function () { describe('with a public project', function () { beforeEach(function () { this.project.publicAccesLevel = 'readAndWrite' + this.CollaboratorsGetter.promises.getProjectAccess + .withArgs(this.project._id) + .resolves({ + publicAccessLevel: sinon + .stub() + .returns(this.project.publicAccesLevel), + privilegeLevelForUser: sinon + .stub() + .withArgs(this.user._id) + .returns(PrivilegeLevels.NONE), + }) }) describe('with a user id with a privilege level', function () { beforeEach(async function () { - this.CollaboratorsGetter.promises.getMemberIdPrivilegeLevel - .withArgs(this.user._id, this.project._id) - .resolves(PrivilegeLevels.READ_ONLY) + this.CollaboratorsGetter.promises.getProjectAccess + .withArgs(this.project._id) + .resolves({ + publicAccessLevel: sinon + .stub() + .returns(this.project.publicAccesLevel), + privilegeLevelForUser: sinon + .stub() + .withArgs(this.user._id) + .returns(PrivilegeLevels.READ_ONLY), + }) this.result = await this.AuthorizationManager.promises.getPrivilegeLevelForProject( this.user._id, @@ -397,8 +435,8 @@ describe('AuthorizationManager', function () { ) }) - it('should not call CollaboratorsGetter.getMemberIdPrivilegeLevel', function () { - this.CollaboratorsGetter.promises.getMemberIdPrivilegeLevel.called.should.equal( + it('should not call CollaboratorsGetter.getProjectAccess', function () { + this.CollaboratorsGetter.promises.getProjectAccess.called.should.equal( false ) }) @@ -410,6 +448,11 @@ describe('AuthorizationManager', function () { }) describe("when the project doesn't exist", function () { + beforeEach(function () { + this.CollaboratorsGetter.promises.getProjectAccess.rejects( + new Errors.NotFoundError() + ) + }) it('should return a NotFoundError', async function () { const someOtherId = new ObjectId() await expect( @@ -424,9 +467,15 @@ describe('AuthorizationManager', function () { describe('when the project id is not valid', function () { beforeEach(function () { - this.CollaboratorsGetter.promises.getMemberIdPrivilegeLevel - .withArgs(this.user._id, this.project._id) - .resolves(PrivilegeLevels.READ_ONLY) + this.CollaboratorsGetter.promises.getProjectAccess + .withArgs(this.project._id) + .resolves({ + publicAccessLevel: sinon.stub().returns(PublicAccessLevels.PRIVATE), + privilegeLevelForUser: sinon + .stub() + .withArgs(this.user._id) + .returns(PrivilegeLevels.READ_ONLY), + }) }) it('should return a error', async function () { @@ -529,9 +578,15 @@ describe('AuthorizationManager', function () { describe('canUserDeleteOrResolveThread', function () { it('should return true when user has write permissions', async function () { - this.CollaboratorsGetter.promises.getMemberIdPrivilegeLevel - .withArgs(this.user._id, this.project._id) - .resolves(PrivilegeLevels.READ_AND_WRITE) + this.CollaboratorsGetter.promises.getProjectAccess + .withArgs(this.project._id) + .resolves({ + publicAccessLevel: sinon.stub().returns(PublicAccessLevels.PRIVATE), + privilegeLevelForUser: sinon + .stub() + .withArgs(this.user._id) + .returns(PrivilegeLevels.READ_AND_WRITE), + }) const canResolve = await this.AuthorizationManager.promises.canUserDeleteOrResolveThread( @@ -546,9 +601,15 @@ describe('AuthorizationManager', function () { }) it('should return false when user has read permission', async function () { - this.CollaboratorsGetter.promises.getMemberIdPrivilegeLevel - .withArgs(this.user._id, this.project._id) - .resolves(PrivilegeLevels.READ_ONLY) + this.CollaboratorsGetter.promises.getProjectAccess + .withArgs(this.project._id) + .resolves({ + publicAccessLevel: sinon.stub().returns(PublicAccessLevels.PRIVATE), + privilegeLevelForUser: sinon + .stub() + .withArgs(this.user._id) + .returns(PrivilegeLevels.READ_ONLY), + }) const canResolve = await this.AuthorizationManager.promises.canUserDeleteOrResolveThread( @@ -564,9 +625,15 @@ describe('AuthorizationManager', function () { describe('when user has review permission', function () { beforeEach(function () { - this.CollaboratorsGetter.promises.getMemberIdPrivilegeLevel - .withArgs(this.user._id, this.project._id) - .resolves(PrivilegeLevels.REVIEW) + this.CollaboratorsGetter.promises.getProjectAccess + .withArgs(this.project._id) + .resolves({ + publicAccessLevel: sinon.stub().returns(PublicAccessLevels.PRIVATE), + privilegeLevelForUser: sinon + .stub() + .withArgs(this.user._id) + .returns(PrivilegeLevels.REVIEW), + }) }) it('should return false when user is not the comment author', async function () { @@ -691,15 +758,27 @@ function testPermission(permission, privilegeLevels) { function setupUserPrivilegeLevel(privilegeLevel) { beforeEach(`set user privilege level to ${privilegeLevel}`, function () { - this.CollaboratorsGetter.promises.getMemberIdPrivilegeLevel - .withArgs(this.user._id, this.project._id) - .resolves(privilegeLevel) + this.CollaboratorsGetter.promises.getProjectAccess + .withArgs(this.project._id) + .resolves({ + publicAccessLevel: sinon.stub().returns(PublicAccessLevels.PRIVATE), + privilegeLevelForUser: sinon + .stub() + .withArgs(this.user._id) + .returns(privilegeLevel), + }) }) } function setupPublicAccessLevel(level) { beforeEach(`set public access level to ${level}`, function () { this.project.publicAccesLevel = level + this.CollaboratorsGetter.promises.getProjectAccess + .withArgs(this.project._id) + .resolves({ + publicAccessLevel: sinon.stub().returns(this.project.publicAccesLevel), + privilegeLevelForUser: sinon.stub().returns(PrivilegeLevels.NONE), + }) }) } diff --git a/services/web/test/unit/src/BetaProgram/BetaProgramController.test.mjs b/services/web/test/unit/src/BetaProgram/BetaProgramController.test.mjs new file mode 100644 index 0000000000..23dd4dc1c8 --- /dev/null +++ b/services/web/test/unit/src/BetaProgram/BetaProgramController.test.mjs @@ -0,0 +1,241 @@ +import { expect, vi } from 'vitest' +import path from 'node:path' +import sinon from 'sinon' +import MockResponse from '../helpers/MockResponse.js' +import { fileURLToPath } from 'node:url' + +const __dirname = fileURLToPath(new URL('.', import.meta.url)) + +const modulePath = path.join( + __dirname, + '../../../../app/src/Features/BetaProgram/BetaProgramController' +) + +describe('BetaProgramController', function () { + beforeEach(async function (ctx) { + ctx.user = { + _id: (ctx.user_id = 'a_simple_id'), + email: 'user@example.com', + features: {}, + betaProgram: false, + } + ctx.req = { + query: {}, + session: { + user: ctx.user, + }, + } + ctx.SplitTestSessionHandler = { + promises: { + sessionMaintenance: sinon.stub(), + }, + } + + vi.doMock( + '../../../../app/src/Features/SplitTests/SplitTestSessionHandler', + () => ({ + default: ctx.SplitTestSessionHandler, + }) + ) + + vi.doMock( + '../../../../app/src/Features/BetaProgram/BetaProgramHandler', + () => ({ + default: (ctx.BetaProgramHandler = { + promises: { + optIn: sinon.stub().resolves(), + optOut: sinon.stub().resolves(), + }, + }), + }) + ) + + vi.doMock('../../../../app/src/Features/User/UserGetter', () => ({ + default: (ctx.UserGetter = { + promises: { + getUser: sinon.stub().resolves(), + }, + }), + })) + + vi.doMock('@overleaf/settings', () => ({ + default: (ctx.settings = { + languages: {}, + }), + })) + + vi.doMock( + '../../../../app/src/Features/Authentication/AuthenticationController', + () => ({ + default: (ctx.AuthenticationController = { + getLoggedInUserId: sinon.stub().returns(ctx.user._id), + }), + }) + ) + + ctx.BetaProgramController = (await import(modulePath)).default + ctx.res = new MockResponse() + ctx.next = sinon.stub() + }) + + describe('optIn', function () { + it("should redirect to '/beta/participate'", function (ctx) { + return new Promise(resolve => { + ctx.res.callback = () => { + ctx.res.redirectedTo.should.equal('/beta/participate') + resolve() + } + ctx.BetaProgramController.optIn(ctx.req, ctx.res, resolve) + }) + }) + + it('should not call next with an error', function (ctx) { + ctx.BetaProgramController.optIn(ctx.req, ctx.res, ctx.next) + ctx.next.callCount.should.equal(0) + }) + + it('should call BetaProgramHandler.optIn', function (ctx) { + ctx.BetaProgramController.optIn(ctx.req, ctx.res, ctx.next) + ctx.BetaProgramHandler.promises.optIn.callCount.should.equal(1) + }) + + it('should invoke the session maintenance', function (ctx) { + return new Promise(resolve => { + ctx.res.callback = () => { + ctx.SplitTestSessionHandler.promises.sessionMaintenance.should.have.been.calledWith( + ctx.req + ) + resolve() + } + ctx.BetaProgramController.optIn(ctx.req, ctx.res, resolve) + }) + }) + + describe('when BetaProgramHandler.opIn produces an error', function () { + beforeEach(function (ctx) { + ctx.BetaProgramHandler.promises.optIn.throws(new Error('woops')) + }) + + it("should not redirect to '/beta/participate'", function (ctx) { + ctx.BetaProgramController.optIn(ctx.req, ctx.res, ctx.next) + ctx.res.redirect.callCount.should.equal(0) + }) + + it('should produce an error', function (ctx) { + return new Promise(resolve => { + ctx.BetaProgramController.optIn(ctx.req, ctx.res, err => { + expect(err).to.be.instanceof(Error) + resolve() + }) + }) + }) + }) + }) + + describe('optOut', function () { + it("should redirect to '/beta/participate'", function (ctx) { + return new Promise(resolve => { + ctx.res.callback = () => { + expect(ctx.res.redirectedTo).to.equal('/beta/participate') + resolve() + } + ctx.BetaProgramController.optOut(ctx.req, ctx.res, resolve) + }) + }) + + it('should not call next with an error', function (ctx) { + return new Promise(resolve => { + ctx.res.callback = () => { + ctx.next.callCount.should.equal(0) + resolve() + } + ctx.BetaProgramController.optOut(ctx.req, ctx.res, resolve) + }) + }) + + it('should call BetaProgramHandler.optOut', function (ctx) { + return new Promise(resolve => { + ctx.res.callback = () => { + ctx.BetaProgramHandler.promises.optOut.callCount.should.equal(1) + resolve() + } + ctx.BetaProgramController.optOut(ctx.req, ctx.res, resolve) + }) + }) + + it('should invoke the session maintenance', function (ctx) { + return new Promise(resolve => { + ctx.res.callback = () => { + ctx.SplitTestSessionHandler.promises.sessionMaintenance.should.have.been.calledWith( + ctx.req, + null + ) + resolve() + } + ctx.BetaProgramController.optOut(ctx.req, ctx.res, resolve) + }) + }) + + describe('when BetaProgramHandler.optOut produces an error', function () { + beforeEach(function (ctx) { + ctx.BetaProgramHandler.promises.optOut.throws(new Error('woops')) + }) + + it("should not redirect to '/beta/participate'", function (ctx) { + return new Promise(resolve => { + ctx.BetaProgramController.optOut(ctx.req, ctx.res, error => { + expect(error).to.exist + expect(ctx.res.redirected).to.equal(false) + resolve() + }) + }) + }) + + it('should produce an error', function (ctx) { + return new Promise(resolve => { + ctx.BetaProgramController.optOut(ctx.req, ctx.res, error => { + expect(error).to.exist + resolve() + }) + }) + }) + }) + }) + + describe('optInPage', function () { + beforeEach(function (ctx) { + ctx.UserGetter.promises.getUser.resolves(ctx.user) + }) + + it('should render the opt-in page', function (ctx) { + return new Promise(resolve => { + ctx.res.callback = () => { + expect(ctx.res.renderedTemplate).to.equal('beta_program/opt_in') + resolve() + } + ctx.BetaProgramController.optInPage(ctx.req, ctx.res, resolve) + }) + }) + + describe('when UserGetter.getUser produces an error', function () { + beforeEach(function (ctx) { + ctx.UserGetter.promises.getUser.throws(new Error('woops')) + }) + + it('should not render the opt-in page', function (ctx) { + ctx.BetaProgramController.optInPage(ctx.req, ctx.res, ctx.next) + ctx.res.render.callCount.should.equal(0) + }) + + it('should produce an error', function (ctx) { + return new Promise(resolve => { + ctx.BetaProgramController.optInPage(ctx.req, ctx.res, error => { + expect(error).to.exist + expect(error).to.be.instanceof(Error) + resolve() + }) + }) + }) + }) + }) +}) diff --git a/services/web/test/unit/src/BetaProgram/BetaProgramControllerTests.mjs b/services/web/test/unit/src/BetaProgram/BetaProgramControllerTests.mjs deleted file mode 100644 index 78747b8880..0000000000 --- a/services/web/test/unit/src/BetaProgram/BetaProgramControllerTests.mjs +++ /dev/null @@ -1,199 +0,0 @@ -import esmock from 'esmock' -import path from 'node:path' -import sinon from 'sinon' -import { expect } from 'chai' -import MockResponse from '../helpers/MockResponse.js' -import { fileURLToPath } from 'node:url' - -const __dirname = fileURLToPath(new URL('.', import.meta.url)) - -const modulePath = path.join( - __dirname, - '../../../../app/src/Features/BetaProgram/BetaProgramController' -) - -describe('BetaProgramController', function () { - beforeEach(async function () { - this.user = { - _id: (this.user_id = 'a_simple_id'), - email: 'user@example.com', - features: {}, - betaProgram: false, - } - this.req = { - query: {}, - session: { - user: this.user, - }, - } - this.SplitTestSessionHandler = { - promises: { - sessionMaintenance: sinon.stub(), - }, - } - this.BetaProgramController = await esmock.strict(modulePath, { - '../../../../app/src/Features/SplitTests/SplitTestSessionHandler': - this.SplitTestSessionHandler, - '../../../../app/src/Features/BetaProgram/BetaProgramHandler': - (this.BetaProgramHandler = { - promises: { - optIn: sinon.stub().resolves(), - optOut: sinon.stub().resolves(), - }, - }), - '../../../../app/src/Features/User/UserGetter': (this.UserGetter = { - promises: { - getUser: sinon.stub().resolves(), - }, - }), - '@overleaf/settings': (this.settings = { - languages: {}, - }), - '../../../../app/src/Features/Authentication/AuthenticationController': - (this.AuthenticationController = { - getLoggedInUserId: sinon.stub().returns(this.user._id), - }), - }) - this.res = new MockResponse() - this.next = sinon.stub() - }) - - describe('optIn', function () { - it("should redirect to '/beta/participate'", function (done) { - this.res.callback = () => { - this.res.redirectedTo.should.equal('/beta/participate') - done() - } - this.BetaProgramController.optIn(this.req, this.res, done) - }) - - it('should not call next with an error', function () { - this.BetaProgramController.optIn(this.req, this.res, this.next) - this.next.callCount.should.equal(0) - }) - - it('should call BetaProgramHandler.optIn', function () { - this.BetaProgramController.optIn(this.req, this.res, this.next) - this.BetaProgramHandler.promises.optIn.callCount.should.equal(1) - }) - - it('should invoke the session maintenance', function (done) { - this.res.callback = () => { - this.SplitTestSessionHandler.promises.sessionMaintenance.should.have.been.calledWith( - this.req - ) - done() - } - this.BetaProgramController.optIn(this.req, this.res, done) - }) - - describe('when BetaProgramHandler.opIn produces an error', function () { - beforeEach(function () { - this.BetaProgramHandler.promises.optIn.throws(new Error('woops')) - }) - - it("should not redirect to '/beta/participate'", function () { - this.BetaProgramController.optIn(this.req, this.res, this.next) - this.res.redirect.callCount.should.equal(0) - }) - - it('should produce an error', function (done) { - this.BetaProgramController.optIn(this.req, this.res, err => { - expect(err).to.be.instanceof(Error) - done() - }) - }) - }) - }) - - describe('optOut', function () { - it("should redirect to '/beta/participate'", function (done) { - this.res.callback = () => { - expect(this.res.redirectedTo).to.equal('/beta/participate') - done() - } - this.BetaProgramController.optOut(this.req, this.res, done) - }) - - it('should not call next with an error', function (done) { - this.res.callback = () => { - this.next.callCount.should.equal(0) - done() - } - this.BetaProgramController.optOut(this.req, this.res, done) - }) - - it('should call BetaProgramHandler.optOut', function (done) { - this.res.callback = () => { - this.BetaProgramHandler.promises.optOut.callCount.should.equal(1) - done() - } - this.BetaProgramController.optOut(this.req, this.res, done) - }) - - it('should invoke the session maintenance', function (done) { - this.res.callback = () => { - this.SplitTestSessionHandler.promises.sessionMaintenance.should.have.been.calledWith( - this.req, - null - ) - done() - } - this.BetaProgramController.optOut(this.req, this.res, done) - }) - - describe('when BetaProgramHandler.optOut produces an error', function () { - beforeEach(function () { - this.BetaProgramHandler.promises.optOut.throws(new Error('woops')) - }) - - it("should not redirect to '/beta/participate'", function (done) { - this.BetaProgramController.optOut(this.req, this.res, error => { - expect(error).to.exist - expect(this.res.redirected).to.equal(false) - done() - }) - }) - - it('should produce an error', function (done) { - this.BetaProgramController.optOut(this.req, this.res, error => { - expect(error).to.exist - done() - }) - }) - }) - }) - - describe('optInPage', function () { - beforeEach(function () { - this.UserGetter.promises.getUser.resolves(this.user) - }) - - it('should render the opt-in page', function (done) { - this.res.callback = () => { - expect(this.res.renderedTemplate).to.equal('beta_program/opt_in') - done() - } - this.BetaProgramController.optInPage(this.req, this.res, done) - }) - - describe('when UserGetter.getUser produces an error', function () { - beforeEach(function () { - this.UserGetter.promises.getUser.throws(new Error('woops')) - }) - - it('should not render the opt-in page', function () { - this.BetaProgramController.optInPage(this.req, this.res, this.next) - this.res.render.callCount.should.equal(0) - }) - - it('should produce an error', function (done) { - this.BetaProgramController.optInPage(this.req, this.res, error => { - expect(error).to.exist - expect(error).to.be.instanceof(Error) - done() - }) - }) - }) - }) -}) diff --git a/services/web/test/unit/src/BetaProgram/BetaProgramHandler.test.mjs b/services/web/test/unit/src/BetaProgram/BetaProgramHandler.test.mjs new file mode 100644 index 0000000000..4034835666 --- /dev/null +++ b/services/web/test/unit/src/BetaProgram/BetaProgramHandler.test.mjs @@ -0,0 +1,168 @@ +import { expect, vi } from 'vitest' +import path from 'node:path' + +import sinon from 'sinon' +import { fileURLToPath } from 'node:url' + +const __dirname = fileURLToPath(new URL('.', import.meta.url)) + +const modulePath = path.join( + __dirname, + '../../../../app/src/Features/BetaProgram/BetaProgramHandler' +) + +describe('BetaProgramHandler', function () { + beforeEach(async function (ctx) { + ctx.user_id = 'some_id' + ctx.user = { + _id: ctx.user_id, + email: 'user@example.com', + features: {}, + betaProgram: false, + save: sinon.stub().callsArgWith(0, null), + } + + vi.doMock('@overleaf/metrics', () => ({ + default: { + inc: sinon.stub(), + }, + })) + + vi.doMock('../../../../app/src/Features/User/UserUpdater', () => ({ + default: (ctx.UserUpdater = { + promises: { + updateUser: sinon.stub().resolves(), + }, + }), + })) + + vi.doMock( + '../../../../app/src/Features/Analytics/AnalyticsManager', + () => ({ + default: (ctx.AnalyticsManager = { + setUserPropertyForUserInBackground: sinon.stub(), + }), + }) + ) + + ctx.handler = (await import(modulePath)).default + }) + + describe('optIn', function () { + beforeEach(function (ctx) { + ctx.user.betaProgram = false + ctx.call = callback => { + ctx.handler.optIn(ctx.user_id, callback) + } + }) + + it('should call userUpdater', function (ctx) { + return new Promise(resolve => { + ctx.call(err => { + expect(err).to.not.exist + ctx.UserUpdater.promises.updateUser.callCount.should.equal(1) + resolve() + }) + }) + }) + + it('should set beta-program user property to true', function (ctx) { + return new Promise(resolve => { + ctx.call(err => { + expect(err).to.not.exist + sinon.assert.calledWith( + ctx.AnalyticsManager.setUserPropertyForUserInBackground, + ctx.user_id, + 'beta-program', + true + ) + resolve() + }) + }) + }) + + it('should not produce an error', function (ctx) { + return new Promise(resolve => { + ctx.call(err => { + expect(err).to.not.exist + resolve() + }) + }) + }) + + describe('when userUpdater produces an error', function () { + beforeEach(function (ctx) { + ctx.UserUpdater.promises.updateUser.rejects() + }) + + it('should produce an error', function (ctx) { + return new Promise(resolve => { + ctx.call(err => { + expect(err).to.exist + expect(err).to.be.instanceof(Error) + resolve() + }) + }) + }) + }) + }) + + describe('optOut', function () { + beforeEach(function (ctx) { + ctx.user.betaProgram = true + ctx.call = callback => { + ctx.handler.optOut(ctx.user_id, callback) + } + }) + + it('should call userUpdater', function (ctx) { + return new Promise(resolve => { + ctx.call(err => { + expect(err).to.not.exist + ctx.UserUpdater.promises.updateUser.callCount.should.equal(1) + resolve() + }) + }) + }) + + it('should set beta-program user property to false', function (ctx) { + return new Promise(resolve => { + ctx.call(err => { + expect(err).to.not.exist + sinon.assert.calledWith( + ctx.AnalyticsManager.setUserPropertyForUserInBackground, + ctx.user_id, + 'beta-program', + false + ) + resolve() + }) + }) + }) + + it('should not produce an error', function (ctx) { + return new Promise(resolve => { + ctx.call(err => { + expect(err).to.not.exist + resolve() + }) + }) + }) + + describe('when userUpdater produces an error', function () { + beforeEach(function (ctx) { + ctx.UserUpdater.promises.updateUser.rejects() + }) + + it('should produce an error', function (ctx) { + return new Promise(resolve => { + ctx.call(err => { + expect(err).to.exist + expect(err).to.be.instanceof(Error) + resolve() + }) + }) + }) + }) + }) +}) diff --git a/services/web/test/unit/src/BetaProgram/BetaProgramHandlerTests.mjs b/services/web/test/unit/src/BetaProgram/BetaProgramHandlerTests.mjs deleted file mode 100644 index 2b72271fd5..0000000000 --- a/services/web/test/unit/src/BetaProgram/BetaProgramHandlerTests.mjs +++ /dev/null @@ -1,142 +0,0 @@ -import esmock from 'esmock' -import path from 'node:path' - -import sinon from 'sinon' -import { expect } from 'chai' -import { fileURLToPath } from 'node:url' - -const __dirname = fileURLToPath(new URL('.', import.meta.url)) - -const modulePath = path.join( - __dirname, - '../../../../app/src/Features/BetaProgram/BetaProgramHandler' -) - -describe('BetaProgramHandler', function () { - beforeEach(async function () { - this.user_id = 'some_id' - this.user = { - _id: this.user_id, - email: 'user@example.com', - features: {}, - betaProgram: false, - save: sinon.stub().callsArgWith(0, null), - } - this.handler = await esmock.strict(modulePath, { - '@overleaf/metrics': { - inc: sinon.stub(), - }, - '../../../../app/src/Features/User/UserUpdater': (this.UserUpdater = { - promises: { - updateUser: sinon.stub().resolves(), - }, - }), - '../../../../app/src/Features/Analytics/AnalyticsManager': - (this.AnalyticsManager = { - setUserPropertyForUserInBackground: sinon.stub(), - }), - }) - }) - - describe('optIn', function () { - beforeEach(function () { - this.user.betaProgram = false - this.call = callback => { - this.handler.optIn(this.user_id, callback) - } - }) - - it('should call userUpdater', function (done) { - this.call(err => { - expect(err).to.not.exist - this.UserUpdater.promises.updateUser.callCount.should.equal(1) - done() - }) - }) - - it('should set beta-program user property to true', function (done) { - this.call(err => { - expect(err).to.not.exist - sinon.assert.calledWith( - this.AnalyticsManager.setUserPropertyForUserInBackground, - this.user_id, - 'beta-program', - true - ) - done() - }) - }) - - it('should not produce an error', function (done) { - this.call(err => { - expect(err).to.not.exist - done() - }) - }) - - describe('when userUpdater produces an error', function () { - beforeEach(function () { - this.UserUpdater.promises.updateUser.rejects() - }) - - it('should produce an error', function (done) { - this.call(err => { - expect(err).to.exist - expect(err).to.be.instanceof(Error) - done() - }) - }) - }) - }) - - describe('optOut', function () { - beforeEach(function () { - this.user.betaProgram = true - this.call = callback => { - this.handler.optOut(this.user_id, callback) - } - }) - - it('should call userUpdater', function (done) { - this.call(err => { - expect(err).to.not.exist - this.UserUpdater.promises.updateUser.callCount.should.equal(1) - done() - }) - }) - - it('should set beta-program user property to false', function (done) { - this.call(err => { - expect(err).to.not.exist - sinon.assert.calledWith( - this.AnalyticsManager.setUserPropertyForUserInBackground, - this.user_id, - 'beta-program', - false - ) - done() - }) - }) - - it('should not produce an error', function (done) { - this.call(err => { - expect(err).to.not.exist - done() - }) - }) - - describe('when userUpdater produces an error', function () { - beforeEach(function () { - this.UserUpdater.promises.updateUser.rejects() - }) - - it('should produce an error', function (done) { - this.call(err => { - expect(err).to.exist - expect(err).to.be.instanceof(Error) - done() - }) - }) - }) - }) -}) diff --git a/services/web/test/unit/src/Chat/ChatManagerTests.js b/services/web/test/unit/src/Chat/ChatManagerTests.js index bdd3042513..5578b8b167 100644 --- a/services/web/test/unit/src/Chat/ChatManagerTests.js +++ b/services/web/test/unit/src/Chat/ChatManagerTests.js @@ -12,7 +12,7 @@ describe('ChatManager', function () { this.user_id = 'mock-user-id' this.ChatManager = SandboxedModule.require(modulePath, { requires: { - '../User/UserInfoManager': (this.UserInfoManager = {}), + '../User/UserGetter': (this.UserGetter = { promises: {} }), '../User/UserInfoController': (this.UserInfoController = {}), }, }) @@ -32,18 +32,22 @@ describe('ChatManager', function () { beforeEach(function () { this.users = { user_id_1: { - mock: 'user_1', + _id: 'user_id_1', }, user_id_2: { - mock: 'user_2', + _id: 'user_id_2', }, } - this.UserInfoManager.getPersonalInfo = (userId, callback) => { - return callback(null, this.users[userId]) - } - sinon.spy(this.UserInfoManager, 'getPersonalInfo') + this.UserGetter.promises.getUsers = userIds => + Promise.resolve( + Array.from(userIds) + .map(id => this.users[id]) + .filter(u => !!u) + ) + + sinon.spy(this.UserGetter.promises, 'getUsers') return (this.UserInfoController.formatPersonalInfo = user => ({ - formatted: user.mock, + formatted: { id: user._id.toString() }, })) }) @@ -79,16 +83,16 @@ describe('ChatManager', function () { thread1: { resolved: true, resolved_by_user_id: 'user_id_1', - resolved_by_user: { formatted: 'user_1' }, + resolved_by_user: { formatted: { id: 'user_id_1' } }, messages: [ { user_id: 'user_id_1', - user: { formatted: 'user_1' }, + user: { formatted: { id: 'user_id_1' } }, content: 'foo', }, { user_id: 'user_id_2', - user: { formatted: 'user_2' }, + user: { formatted: { id: 'user_id_2' } }, content: 'bar', }, ], @@ -97,7 +101,7 @@ describe('ChatManager', function () { messages: [ { user_id: 'user_id_1', - user: { formatted: 'user_1' }, + user: { formatted: { id: 'user_id_1' } }, content: 'baz', }, ], @@ -105,7 +109,7 @@ describe('ChatManager', function () { }) }) - it('should only need to look up each user once', async function () { + it('should lookup all users in a single batch', async function () { await this.ChatManager.promises.injectUserInfoIntoThreads([ { messages: [ @@ -121,7 +125,7 @@ describe('ChatManager', function () { }, ]) - this.UserInfoManager.getPersonalInfo.calledOnce.should.equal(true) + this.UserGetter.promises.getUsers.should.have.been.calledOnce }) }) }) diff --git a/services/web/test/unit/src/Collaborators/CollaboratorsController.test.mjs b/services/web/test/unit/src/Collaborators/CollaboratorsController.test.mjs new file mode 100644 index 0000000000..1d8345a195 --- /dev/null +++ b/services/web/test/unit/src/Collaborators/CollaboratorsController.test.mjs @@ -0,0 +1,519 @@ +import { expect, vi } from 'vitest' +import sinon from 'sinon' +import mongodb from 'mongodb-legacy' +import Errors from '../../../../app/src/Features/Errors/Errors.js' +import MockRequest from '../helpers/MockRequest.js' +import MockResponse from '../helpers/MockResponse.js' + +const ObjectId = mongodb.ObjectId + +const MODULE_PATH = + '../../../../app/src/Features/Collaborators/CollaboratorsController.mjs' + +vi.mock('../../../../app/src/Features/Errors/Errors.js', () => + vi.importActual('../../../../app/src/Features/Errors/Errors.js') +) + +describe('CollaboratorsController', function () { + beforeEach(async function (ctx) { + ctx.res = new MockResponse() + ctx.req = new MockRequest() + + ctx.user = { _id: new ObjectId() } + ctx.projectId = new ObjectId() + ctx.callback = sinon.stub() + + ctx.CollaboratorsHandler = { + promises: { + removeUserFromProject: sinon.stub().resolves(), + setCollaboratorPrivilegeLevel: sinon.stub().resolves(), + }, + createTokenHashPrefix: sinon.stub().returns('abc123'), + } + ctx.CollaboratorsGetter = { + promises: { + getAllInvitedMembers: sinon.stub(), + }, + } + ctx.EditorRealTimeController = { + emitToRoom: sinon.stub(), + } + ctx.HttpErrorHandler = { + forbidden: sinon.stub(), + notFound: sinon.stub(), + } + ctx.TagsHandler = { + promises: { + removeProjectFromAllTags: sinon.stub().resolves(), + }, + } + ctx.SessionManager = { + getSessionUser: sinon.stub().returns(ctx.user), + getLoggedInUserId: sinon.stub().returns(ctx.user._id), + } + ctx.OwnershipTransferHandler = { + promises: { + transferOwnership: sinon.stub().resolves(), + }, + } + ctx.TokenAccessHandler = { + getRequestToken: sinon.stub().returns('access-token'), + } + + ctx.ProjectAuditLogHandler = { + addEntryInBackground: sinon.stub(), + } + + ctx.ProjectGetter = { + promises: { + getProject: sinon.stub().resolves({ owner_ref: ctx.user._id }), + }, + } + + ctx.SplitTestHandler = { + promises: { + getAssignmentForUser: sinon.stub().resolves({ variant: 'default' }), + }, + } + + ctx.LimitationsManager = { + promises: { + canAddXEditCollaborators: sinon.stub().resolves(), + canChangeCollaboratorPrivilegeLevel: sinon.stub().resolves(true), + }, + } + + vi.doMock('mongodb-legacy', () => ({ + default: { ObjectId }, + })) + + vi.doMock( + '../../../../app/src/Features/Collaborators/CollaboratorsHandler.js', + () => ({ + default: ctx.CollaboratorsHandler, + }) + ) + + vi.doMock( + '../../../../app/src/Features/Collaborators/CollaboratorsGetter.js', + () => ({ + default: ctx.CollaboratorsGetter, + }) + ) + + vi.doMock( + '../../../../app/src/Features/Collaborators/OwnershipTransferHandler.js', + () => ({ + default: ctx.OwnershipTransferHandler, + }) + ) + + vi.doMock( + '../../../../app/src/Features/Editor/EditorRealTimeController', + () => ({ + default: ctx.EditorRealTimeController, + }) + ) + + vi.doMock( + '../../../../app/src/Features/Errors/HttpErrorHandler.js', + () => ({ + default: ctx.HttpErrorHandler, + }) + ) + + vi.doMock('../../../../app/src/Features/Tags/TagsHandler.js', () => ({ + default: ctx.TagsHandler, + })) + + vi.doMock( + '../../../../app/src/Features/Authentication/SessionManager.js', + () => ({ + default: ctx.SessionManager, + }) + ) + + vi.doMock( + '../../../../app/src/Features/TokenAccess/TokenAccessHandler.js', + () => ({ + default: ctx.TokenAccessHandler, + }) + ) + + vi.doMock( + '../../../../app/src/Features/Project/ProjectAuditLogHandler.js', + () => ({ + default: ctx.ProjectAuditLogHandler, + }) + ) + + vi.doMock('../../../../app/src/Features/Project/ProjectGetter.js', () => ({ + default: ctx.ProjectGetter, + })) + + vi.doMock( + '../../../../app/src/Features/SplitTests/SplitTestHandler.js', + () => ({ + default: ctx.SplitTestHandler, + }) + ) + + vi.doMock( + '../../../../app/src/Features/Subscription/LimitationsManager.js', + () => ({ + default: ctx.LimitationsManager, + }) + ) + + ctx.CollaboratorsController = (await import(MODULE_PATH)).default + }) + + describe('removeUserFromProject', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.req.params = { + Project_id: ctx.projectId, + user_id: ctx.user._id, + } + ctx.res.sendStatus = sinon.spy(() => { + resolve() + }) + ctx.CollaboratorsController.removeUserFromProject(ctx.req, ctx.res) + }) + }) + + it('should from the user from the project', function (ctx) { + expect( + ctx.CollaboratorsHandler.promises.removeUserFromProject + ).to.have.been.calledWith(ctx.projectId, ctx.user._id) + }) + + it('should emit a userRemovedFromProject event to the proejct', function (ctx) { + expect(ctx.EditorRealTimeController.emitToRoom).to.have.been.calledWith( + ctx.projectId, + 'userRemovedFromProject', + ctx.user._id + ) + }) + + it('should send the back a success response', function (ctx) { + ctx.res.sendStatus.calledWith(204).should.equal(true) + }) + + it('should have called emitToRoom', function (ctx) { + expect(ctx.EditorRealTimeController.emitToRoom).to.have.been.calledWith( + ctx.projectId, + 'project:membership:changed' + ) + }) + + it('should write a project audit log', function (ctx) { + ctx.ProjectAuditLogHandler.addEntryInBackground.should.have.been.calledWith( + ctx.projectId, + 'remove-collaborator', + ctx.user._id, + ctx.req.ip, + { userId: ctx.user._id } + ) + }) + }) + + describe('removeSelfFromProject', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.req.params = { Project_id: ctx.projectId } + ctx.res.sendStatus = sinon.spy(() => { + resolve() + }) + ctx.CollaboratorsController.removeSelfFromProject(ctx.req, ctx.res) + }) + }) + + it('should remove the logged in user from the project', function (ctx) { + expect( + ctx.CollaboratorsHandler.promises.removeUserFromProject + ).to.have.been.calledWith(ctx.projectId, ctx.user._id) + }) + + it('should emit a userRemovedFromProject event to the proejct', function (ctx) { + expect(ctx.EditorRealTimeController.emitToRoom).to.have.been.calledWith( + ctx.projectId, + 'userRemovedFromProject', + ctx.user._id + ) + }) + + it('should remove the project from all tags', function (ctx) { + expect( + ctx.TagsHandler.promises.removeProjectFromAllTags + ).to.have.been.calledWith(ctx.user._id, ctx.projectId) + }) + + it('should return a success code', function (ctx) { + ctx.res.sendStatus.calledWith(204).should.equal(true) + }) + + it('should write a project audit log', function (ctx) { + ctx.ProjectAuditLogHandler.addEntryInBackground.should.have.been.calledWith( + ctx.projectId, + 'leave-project', + ctx.user._id, + ctx.req.ip + ) + }) + }) + + describe('getAllMembers', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.req.params = { Project_id: ctx.projectId } + ctx.res.json = sinon.spy(() => { + resolve() + }) + ctx.next = sinon.stub() + ctx.members = [{ a: 1 }] + ctx.CollaboratorsGetter.promises.getAllInvitedMembers.resolves( + ctx.members + ) + ctx.CollaboratorsController.getAllMembers(ctx.req, ctx.res, ctx.next) + }) + }) + + it('should not produce an error', function (ctx) { + ctx.next.callCount.should.equal(0) + }) + + it('should produce a json response', function (ctx) { + ctx.res.json.callCount.should.equal(1) + ctx.res.json.calledWith({ members: ctx.members }).should.equal(true) + }) + + it('should call CollaboratorsGetter.getAllInvitedMembers', function (ctx) { + expect(ctx.CollaboratorsGetter.promises.getAllInvitedMembers).to.have.been + .calledOnce + }) + + describe('when CollaboratorsGetter.getAllInvitedMembers produces an error', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.res.json = sinon.stub() + ctx.next = sinon.spy(() => { + resolve() + }) + ctx.CollaboratorsGetter.promises.getAllInvitedMembers.rejects( + new Error('woops') + ) + ctx.CollaboratorsController.getAllMembers(ctx.req, ctx.res, ctx.next) + }) + }) + + it('should produce an error', function (ctx) { + expect(ctx.next).to.have.been.calledOnce + expect(ctx.next).to.have.been.calledWithMatch( + sinon.match.instanceOf(Error) + ) + }) + + it('should not produce a json response', function (ctx) { + ctx.res.json.callCount.should.equal(0) + }) + }) + }) + + describe('setCollaboratorInfo', function () { + beforeEach(function (ctx) { + ctx.req.params = { + Project_id: ctx.projectId, + user_id: ctx.user._id, + } + ctx.req.body = { privilegeLevel: 'readOnly' } + }) + + it('should set the collaborator privilege level', function (ctx) { + return new Promise(resolve => { + ctx.res.sendStatus = status => { + expect(status).to.equal(204) + expect( + ctx.CollaboratorsHandler.promises.setCollaboratorPrivilegeLevel + ).to.have.been.calledWith(ctx.projectId, ctx.user._id, 'readOnly') + resolve() + } + ctx.CollaboratorsController.setCollaboratorInfo(ctx.req, ctx.res) + }) + }) + + it('should return a 404 when the project or collaborator is not found', function (ctx) { + return new Promise(resolve => { + ctx.HttpErrorHandler.notFound = sinon.spy((req, res) => { + expect(req).to.equal(ctx.req) + expect(res).to.equal(ctx.res) + resolve() + }) + + ctx.CollaboratorsHandler.promises.setCollaboratorPrivilegeLevel.rejects( + new Errors.NotFoundError() + ) + ctx.CollaboratorsController.setCollaboratorInfo(ctx.req, ctx.res) + }) + }) + + it('should pass the error to the next handler when setting the privilege level fails', function (ctx) { + return new Promise(resolve => { + ctx.next = sinon.spy(err => { + expect(err).instanceOf(Error) + resolve() + }) + + ctx.CollaboratorsHandler.promises.setCollaboratorPrivilegeLevel.rejects( + new Error() + ) + ctx.CollaboratorsController.setCollaboratorInfo( + ctx.req, + ctx.res, + ctx.next + ) + }) + }) + + describe('when setting privilege level to readAndWrite', function () { + beforeEach(function (ctx) { + ctx.req.body = { privilegeLevel: 'readAndWrite' } + }) + + describe('when owner can add new edit collaborators', function () { + it('should set privilege level after checking collaborators can be added', function (ctx) { + return new Promise(resolve => { + ctx.res.sendStatus = status => { + expect(status).to.equal(204) + expect( + ctx.LimitationsManager.promises + .canChangeCollaboratorPrivilegeLevel + ).to.have.been.calledWith( + ctx.projectId, + ctx.user._id, + 'readAndWrite' + ) + resolve() + } + ctx.CollaboratorsController.setCollaboratorInfo(ctx.req, ctx.res) + }) + }) + }) + + describe('when owner cannot add edit collaborators', function () { + beforeEach(function (ctx) { + ctx.LimitationsManager.promises.canChangeCollaboratorPrivilegeLevel.resolves( + false + ) + }) + + it('should return a 403 if trying to set a new edit collaborator', function (ctx) { + return new Promise(resolve => { + ctx.HttpErrorHandler.forbidden = sinon.spy((req, res) => { + expect(req).to.equal(ctx.req) + expect(res).to.equal(ctx.res) + expect( + ctx.LimitationsManager.promises + .canChangeCollaboratorPrivilegeLevel + ).to.have.been.calledWith( + ctx.projectId, + ctx.user._id, + 'readAndWrite' + ) + expect( + ctx.CollaboratorsHandler.promises.setCollaboratorPrivilegeLevel + ).to.not.have.been.called + resolve() + }) + ctx.CollaboratorsController.setCollaboratorInfo(ctx.req, ctx.res) + }) + }) + }) + }) + + describe('when setting privilege level to readOnly', function () { + beforeEach(function (ctx) { + ctx.req.body = { privilegeLevel: 'readOnly' } + }) + + describe('when owner cannot add edit collaborators', function () { + beforeEach(function (ctx) { + ctx.LimitationsManager.promises.canAddXEditCollaborators.resolves( + false + ) + }) + + it('should always allow setting a collaborator to viewer even if user cant add edit collaborators', function (ctx) { + return new Promise(resolve => { + ctx.res.sendStatus = status => { + expect(status).to.equal(204) + expect(ctx.LimitationsManager.promises.canAddXEditCollaborators) + .to.not.have.been.called + expect( + ctx.CollaboratorsHandler.promises.setCollaboratorPrivilegeLevel + ).to.have.been.calledWith(ctx.projectId, ctx.user._id, 'readOnly') + resolve() + } + ctx.CollaboratorsController.setCollaboratorInfo(ctx.req, ctx.res) + }) + }) + }) + }) + }) + + describe('transferOwnership', function () { + beforeEach(function (ctx) { + ctx.req.body = { user_id: ctx.user._id.toString() } + }) + + it('returns 204 on success', function (ctx) { + return new Promise(resolve => { + ctx.res.sendStatus = status => { + expect(status).to.equal(204) + resolve() + } + ctx.CollaboratorsController.transferOwnership(ctx.req, ctx.res) + }) + }) + + it('returns 404 if the project does not exist', function (ctx) { + return new Promise(resolve => { + ctx.HttpErrorHandler.notFound = sinon.spy((req, res, message) => { + expect(req).to.equal(ctx.req) + expect(res).to.equal(ctx.res) + expect(message).to.match(/project not found/) + resolve() + }) + ctx.OwnershipTransferHandler.promises.transferOwnership.rejects( + new Errors.ProjectNotFoundError() + ) + ctx.CollaboratorsController.transferOwnership(ctx.req, ctx.res) + }) + }) + + it('returns 404 if the user does not exist', function (ctx) { + return new Promise(resolve => { + ctx.HttpErrorHandler.notFound = sinon.spy((req, res, message) => { + expect(req).to.equal(ctx.req) + expect(res).to.equal(ctx.res) + expect(message).to.match(/user not found/) + resolve() + }) + ctx.OwnershipTransferHandler.promises.transferOwnership.rejects( + new Errors.UserNotFoundError() + ) + ctx.CollaboratorsController.transferOwnership(ctx.req, ctx.res) + }) + }) + + it('invokes HTTP forbidden error handler if the user is not a collaborator', function (ctx) { + return new Promise(resolve => { + ctx.HttpErrorHandler.forbidden = sinon.spy(() => resolve()) + ctx.OwnershipTransferHandler.promises.transferOwnership.rejects( + new Errors.UserNotCollaboratorError() + ) + ctx.CollaboratorsController.transferOwnership(ctx.req, ctx.res) + }) + }) + }) +}) diff --git a/services/web/test/unit/src/Collaborators/CollaboratorsControllerTests.mjs b/services/web/test/unit/src/Collaborators/CollaboratorsControllerTests.mjs deleted file mode 100644 index 27460da148..0000000000 --- a/services/web/test/unit/src/Collaborators/CollaboratorsControllerTests.mjs +++ /dev/null @@ -1,435 +0,0 @@ -import sinon from 'sinon' -import { expect } from 'chai' -import esmock from 'esmock' -import mongodb from 'mongodb-legacy' -import Errors from '../../../../app/src/Features/Errors/Errors.js' -import MockRequest from '../helpers/MockRequest.js' -import MockResponse from '../helpers/MockResponse.js' - -const ObjectId = mongodb.ObjectId - -const MODULE_PATH = - '../../../../app/src/Features/Collaborators/CollaboratorsController.mjs' - -describe('CollaboratorsController', function () { - beforeEach(async function () { - this.res = new MockResponse() - this.req = new MockRequest() - - this.user = { _id: new ObjectId() } - this.projectId = new ObjectId() - this.callback = sinon.stub() - - this.CollaboratorsHandler = { - promises: { - removeUserFromProject: sinon.stub().resolves(), - setCollaboratorPrivilegeLevel: sinon.stub().resolves(), - }, - createTokenHashPrefix: sinon.stub().returns('abc123'), - } - this.CollaboratorsGetter = { - promises: { - getAllInvitedMembers: sinon.stub(), - }, - } - this.EditorRealTimeController = { - emitToRoom: sinon.stub(), - } - this.HttpErrorHandler = { - forbidden: sinon.stub(), - notFound: sinon.stub(), - } - this.TagsHandler = { - promises: { - removeProjectFromAllTags: sinon.stub().resolves(), - }, - } - this.SessionManager = { - getSessionUser: sinon.stub().returns(this.user), - getLoggedInUserId: sinon.stub().returns(this.user._id), - } - this.OwnershipTransferHandler = { - promises: { - transferOwnership: sinon.stub().resolves(), - }, - } - this.TokenAccessHandler = { - getRequestToken: sinon.stub().returns('access-token'), - } - - this.ProjectAuditLogHandler = { - addEntryInBackground: sinon.stub(), - } - - this.ProjectGetter = { - promises: { - getProject: sinon.stub().resolves({ owner_ref: this.user._id }), - }, - } - - this.SplitTestHandler = { - promises: { - getAssignmentForUser: sinon.stub().resolves({ variant: 'default' }), - }, - } - - this.LimitationsManager = { - promises: { - canAddXEditCollaborators: sinon.stub().resolves(), - canChangeCollaboratorPrivilegeLevel: sinon.stub().resolves(true), - }, - } - - this.CollaboratorsController = await esmock.strict(MODULE_PATH, { - 'mongodb-legacy': { ObjectId }, - '../../../../app/src/Features/Collaborators/CollaboratorsHandler.js': - this.CollaboratorsHandler, - '../../../../app/src/Features/Collaborators/CollaboratorsGetter.js': - this.CollaboratorsGetter, - '../../../../app/src/Features/Collaborators/OwnershipTransferHandler.js': - this.OwnershipTransferHandler, - '../../../../app/src/Features/Editor/EditorRealTimeController': - this.EditorRealTimeController, - '../../../../app/src/Features/Errors/HttpErrorHandler.js': - this.HttpErrorHandler, - '../../../../app/src/Features/Tags/TagsHandler.js': this.TagsHandler, - '../../../../app/src/Features/Authentication/SessionManager.js': - this.SessionManager, - '../../../../app/src/Features/TokenAccess/TokenAccessHandler.js': - this.TokenAccessHandler, - '../../../../app/src/Features/Project/ProjectAuditLogHandler.js': - this.ProjectAuditLogHandler, - '../../../../app/src/Features/Project/ProjectGetter.js': - this.ProjectGetter, - '../../../../app/src/Features/SplitTests/SplitTestHandler.js': - this.SplitTestHandler, - '../../../../app/src/Features/Subscription/LimitationsManager.js': - this.LimitationsManager, - }) - }) - - describe('removeUserFromProject', function () { - beforeEach(function (done) { - this.req.params = { - Project_id: this.projectId, - user_id: this.user._id, - } - this.res.sendStatus = sinon.spy(() => { - done() - }) - this.CollaboratorsController.removeUserFromProject(this.req, this.res) - }) - - it('should from the user from the project', function () { - expect( - this.CollaboratorsHandler.promises.removeUserFromProject - ).to.have.been.calledWith(this.projectId, this.user._id) - }) - - it('should emit a userRemovedFromProject event to the proejct', function () { - expect(this.EditorRealTimeController.emitToRoom).to.have.been.calledWith( - this.projectId, - 'userRemovedFromProject', - this.user._id - ) - }) - - it('should send the back a success response', function () { - this.res.sendStatus.calledWith(204).should.equal(true) - }) - - it('should have called emitToRoom', function () { - expect(this.EditorRealTimeController.emitToRoom).to.have.been.calledWith( - this.projectId, - 'project:membership:changed' - ) - }) - - it('should write a project audit log', function () { - this.ProjectAuditLogHandler.addEntryInBackground.should.have.been.calledWith( - this.projectId, - 'remove-collaborator', - this.user._id, - this.req.ip, - { userId: this.user._id } - ) - }) - }) - - describe('removeSelfFromProject', function () { - beforeEach(function (done) { - this.req.params = { Project_id: this.projectId } - this.res.sendStatus = sinon.spy(() => { - done() - }) - this.CollaboratorsController.removeSelfFromProject(this.req, this.res) - }) - - it('should remove the logged in user from the project', function () { - expect( - this.CollaboratorsHandler.promises.removeUserFromProject - ).to.have.been.calledWith(this.projectId, this.user._id) - }) - - it('should emit a userRemovedFromProject event to the proejct', function () { - expect(this.EditorRealTimeController.emitToRoom).to.have.been.calledWith( - this.projectId, - 'userRemovedFromProject', - this.user._id - ) - }) - - it('should remove the project from all tags', function () { - expect( - this.TagsHandler.promises.removeProjectFromAllTags - ).to.have.been.calledWith(this.user._id, this.projectId) - }) - - it('should return a success code', function () { - this.res.sendStatus.calledWith(204).should.equal(true) - }) - - it('should write a project audit log', function () { - this.ProjectAuditLogHandler.addEntryInBackground.should.have.been.calledWith( - this.projectId, - 'leave-project', - this.user._id, - this.req.ip - ) - }) - }) - - describe('getAllMembers', function () { - beforeEach(function (done) { - this.req.params = { Project_id: this.projectId } - this.res.json = sinon.spy(() => { - done() - }) - this.next = sinon.stub() - this.members = [{ a: 1 }] - this.CollaboratorsGetter.promises.getAllInvitedMembers.resolves( - this.members - ) - this.CollaboratorsController.getAllMembers(this.req, this.res, this.next) - }) - - it('should not produce an error', function () { - this.next.callCount.should.equal(0) - }) - - it('should produce a json response', function () { - this.res.json.callCount.should.equal(1) - this.res.json.calledWith({ members: this.members }).should.equal(true) - }) - - it('should call CollaboratorsGetter.getAllInvitedMembers', function () { - expect(this.CollaboratorsGetter.promises.getAllInvitedMembers).to.have - .been.calledOnce - }) - - describe('when CollaboratorsGetter.getAllInvitedMembers produces an error', function () { - beforeEach(function (done) { - this.res.json = sinon.stub() - this.next = sinon.spy(() => { - done() - }) - this.CollaboratorsGetter.promises.getAllInvitedMembers.rejects( - new Error('woops') - ) - this.CollaboratorsController.getAllMembers( - this.req, - this.res, - this.next - ) - }) - - it('should produce an error', function () { - expect(this.next).to.have.been.calledOnce - expect(this.next).to.have.been.calledWithMatch( - sinon.match.instanceOf(Error) - ) - }) - - it('should not produce a json response', function () { - this.res.json.callCount.should.equal(0) - }) - }) - }) - - describe('setCollaboratorInfo', function () { - beforeEach(function () { - this.req.params = { - Project_id: this.projectId, - user_id: this.user._id, - } - this.req.body = { privilegeLevel: 'readOnly' } - }) - - it('should set the collaborator privilege level', function (done) { - this.res.sendStatus = status => { - expect(status).to.equal(204) - expect( - this.CollaboratorsHandler.promises.setCollaboratorPrivilegeLevel - ).to.have.been.calledWith(this.projectId, this.user._id, 'readOnly') - done() - } - this.CollaboratorsController.setCollaboratorInfo(this.req, this.res) - }) - - it('should return a 404 when the project or collaborator is not found', function (done) { - this.HttpErrorHandler.notFound = sinon.spy((req, res) => { - expect(req).to.equal(this.req) - expect(res).to.equal(this.res) - done() - }) - - this.CollaboratorsHandler.promises.setCollaboratorPrivilegeLevel.rejects( - new Errors.NotFoundError() - ) - this.CollaboratorsController.setCollaboratorInfo(this.req, this.res) - }) - - it('should pass the error to the next handler when setting the privilege level fails', function (done) { - this.next = sinon.spy(err => { - expect(err).instanceOf(Error) - done() - }) - - this.CollaboratorsHandler.promises.setCollaboratorPrivilegeLevel.rejects( - new Error() - ) - this.CollaboratorsController.setCollaboratorInfo( - this.req, - this.res, - this.next - ) - }) - - describe('when setting privilege level to readAndWrite', function () { - beforeEach(function () { - this.req.body = { privilegeLevel: 'readAndWrite' } - }) - - describe('when owner can add new edit collaborators', function () { - it('should set privilege level after checking collaborators can be added', function (done) { - this.res.sendStatus = status => { - expect(status).to.equal(204) - expect( - this.LimitationsManager.promises - .canChangeCollaboratorPrivilegeLevel - ).to.have.been.calledWith( - this.projectId, - this.user._id, - 'readAndWrite' - ) - done() - } - this.CollaboratorsController.setCollaboratorInfo(this.req, this.res) - }) - }) - - describe('when owner cannot add edit collaborators', function () { - beforeEach(function () { - this.LimitationsManager.promises.canChangeCollaboratorPrivilegeLevel.resolves( - false - ) - }) - - it('should return a 403 if trying to set a new edit collaborator', function (done) { - this.HttpErrorHandler.forbidden = sinon.spy((req, res) => { - expect(req).to.equal(this.req) - expect(res).to.equal(this.res) - expect( - this.LimitationsManager.promises - .canChangeCollaboratorPrivilegeLevel - ).to.have.been.calledWith( - this.projectId, - this.user._id, - 'readAndWrite' - ) - expect( - this.CollaboratorsHandler.promises.setCollaboratorPrivilegeLevel - ).to.not.have.been.called - done() - }) - this.CollaboratorsController.setCollaboratorInfo(this.req, this.res) - }) - }) - }) - - describe('when setting privilege level to readOnly', function () { - beforeEach(function () { - this.req.body = { privilegeLevel: 'readOnly' } - }) - - describe('when owner cannot add edit collaborators', function () { - beforeEach(function () { - this.LimitationsManager.promises.canAddXEditCollaborators.resolves( - false - ) - }) - - it('should always allow setting a collaborator to viewer even if user cant add edit collaborators', function (done) { - this.res.sendStatus = status => { - expect(status).to.equal(204) - expect(this.LimitationsManager.promises.canAddXEditCollaborators).to - .not.have.been.called - expect( - this.CollaboratorsHandler.promises.setCollaboratorPrivilegeLevel - ).to.have.been.calledWith(this.projectId, this.user._id, 'readOnly') - done() - } - this.CollaboratorsController.setCollaboratorInfo(this.req, this.res) - }) - }) - }) - }) - - describe('transferOwnership', function () { - beforeEach(function () { - this.req.body = { user_id: this.user._id.toString() } - }) - - it('returns 204 on success', function (done) { - this.res.sendStatus = status => { - expect(status).to.equal(204) - done() - } - this.CollaboratorsController.transferOwnership(this.req, this.res) - }) - - it('returns 404 if the project does not exist', function (done) { - this.HttpErrorHandler.notFound = sinon.spy((req, res, message) => { - expect(req).to.equal(this.req) - expect(res).to.equal(this.res) - expect(message).to.match(/project not found/) - done() - }) - this.OwnershipTransferHandler.promises.transferOwnership.rejects( - new Errors.ProjectNotFoundError() - ) - this.CollaboratorsController.transferOwnership(this.req, this.res) - }) - - it('returns 404 if the user does not exist', function (done) { - this.HttpErrorHandler.notFound = sinon.spy((req, res, message) => { - expect(req).to.equal(this.req) - expect(res).to.equal(this.res) - expect(message).to.match(/user not found/) - done() - }) - this.OwnershipTransferHandler.promises.transferOwnership.rejects( - new Errors.UserNotFoundError() - ) - this.CollaboratorsController.transferOwnership(this.req, this.res) - }) - - it('invokes HTTP forbidden error handler if the user is not a collaborator', function (done) { - this.HttpErrorHandler.forbidden = sinon.spy(() => done()) - this.OwnershipTransferHandler.promises.transferOwnership.rejects( - new Errors.UserNotCollaboratorError() - ) - this.CollaboratorsController.transferOwnership(this.req, this.res) - }) - }) -}) diff --git a/services/web/test/unit/src/Collaborators/CollaboratorsGetterTests.js b/services/web/test/unit/src/Collaborators/CollaboratorsGetterTests.js index dda99e04f3..10542c4564 100644 --- a/services/web/test/unit/src/Collaborators/CollaboratorsGetterTests.js +++ b/services/web/test/unit/src/Collaborators/CollaboratorsGetterTests.js @@ -62,7 +62,7 @@ describe('CollaboratorsGetter', function () { }, } this.ProjectEditorHandler = { - buildOwnerAndMembersViews: sinon.stub(), + buildUserModelView: sinon.stub(), } this.CollaboratorsGetter = SandboxedModule.require(MODULE_PATH, { requires: { @@ -204,30 +204,6 @@ describe('CollaboratorsGetter', function () { }) }) - describe('getInvitedMembersWithPrivilegeLevels', function () { - beforeEach(function () { - this.UserGetter.promises.getUsers.resolves([ - { _id: this.readOnlyRef1 }, - { _id: this.readOnlyTokenRef }, - { _id: this.readWriteRef2 }, - { _id: this.readWriteTokenRef }, - { _id: this.reviewer1Ref }, - ]) - }) - - it('should return an array of invited members with their privilege levels', async function () { - const result = - await this.CollaboratorsGetter.promises.getInvitedMembersWithPrivilegeLevels( - this.project._id - ) - expect(result).to.have.deep.members([ - { user: { _id: this.readOnlyRef1 }, privilegeLevel: 'readOnly' }, - { user: { _id: this.readWriteRef2 }, privilegeLevel: 'readAndWrite' }, - { user: { _id: this.reviewer1Ref }, privilegeLevel: 'review' }, - ]) - }) - }) - describe('getMemberIdPrivilegeLevel', function () { it('should return the privilege level if it exists', async function () { const level = @@ -401,20 +377,21 @@ describe('CollaboratorsGetter', function () { { user: this.readWriteUser, privilegeLevel: 'readAndWrite' }, { user: this.reviewUser, privilegeLevel: 'review' }, ] - this.views = { - owner: this.owningUser, - ownerFeatures: this.owningUser.features, - members: [ - { _id: this.readWriteUser._id, email: this.readWriteUser.email }, - { _id: this.reviewUser._id, email: this.reviewUser.email }, - ], - } + this.memberViews = [ + { _id: this.readWriteUser._id, email: this.readWriteUser.email }, + { _id: this.reviewUser._id, email: this.reviewUser.email }, + ] this.UserGetter.promises.getUsers.resolves([ this.owningUser, this.readWriteUser, this.reviewUser, ]) - this.ProjectEditorHandler.buildOwnerAndMembersViews.returns(this.views) + this.ProjectEditorHandler.buildUserModelView + .withArgs(this.members[1]) + .returns(this.memberViews[0]) + this.ProjectEditorHandler.buildUserModelView + .withArgs(this.members[2]) + .returns(this.memberViews[1]) this.result = await this.CollaboratorsGetter.promises.getAllInvitedMembers( this.project._id @@ -422,15 +399,18 @@ describe('CollaboratorsGetter', function () { }) it('should produce a list of members', function () { - expect(this.result).to.deep.equal(this.views.members) + expect(this.result).to.deep.equal(this.memberViews) }) - it('should call ProjectEditorHandler.buildOwnerAndMembersViews', function () { - expect(this.ProjectEditorHandler.buildOwnerAndMembersViews).to.have.been - .calledOnce + it('should call ProjectEditorHandler.buildUserModelView', function () { + expect(this.ProjectEditorHandler.buildUserModelView).to.have.been + .calledTwice expect( - this.ProjectEditorHandler.buildOwnerAndMembersViews - ).to.have.been.calledWith(this.members) + this.ProjectEditorHandler.buildUserModelView + ).to.have.been.calledWith(this.members[1]) + expect( + this.ProjectEditorHandler.buildUserModelView + ).to.have.been.calledWith(this.members[2]) }) }) diff --git a/services/web/test/unit/src/Collaborators/CollaboratorsHandlerTests.js b/services/web/test/unit/src/Collaborators/CollaboratorsHandlerTests.js index 8542bd8355..73fb699772 100644 --- a/services/web/test/unit/src/Collaborators/CollaboratorsHandlerTests.js +++ b/services/web/test/unit/src/Collaborators/CollaboratorsHandlerTests.js @@ -447,6 +447,40 @@ describe('CollaboratorsHandler', function () { }) }) + describe('when user already exists as a reviewer', function () { + beforeEach(function () { + this.project.collaberator_refs = [] + this.project.reviewer_refs = [this.userId] + this.project.readOnly_refs = [] + }) + + it('should not add the user again', async function () { + await this.CollaboratorsHandler.promises.addUserIdToProject( + this.project._id, + this.addingUserId, + this.userId, + 'readAndWrite' + ) + }) + }) + + describe('when user already exists as a read-only user', function () { + beforeEach(function () { + this.project.collaberator_refs = [] + this.project.reviewer_refs = [] + this.project.readOnly_refs = [this.userId] + }) + + it('should not add the user again', async function () { + await this.CollaboratorsHandler.promises.addUserIdToProject( + this.project._id, + this.addingUserId, + this.userId, + 'readAndWrite' + ) + }) + }) + describe('with null addingUserId', function () { beforeEach(async function () { this.project.collaberator_refs = [] diff --git a/services/web/test/unit/src/Collaborators/CollaboratorsInviteController.test.mjs b/services/web/test/unit/src/Collaborators/CollaboratorsInviteController.test.mjs new file mode 100644 index 0000000000..edac9c6c92 --- /dev/null +++ b/services/web/test/unit/src/Collaborators/CollaboratorsInviteController.test.mjs @@ -0,0 +1,1674 @@ +import { expect, vi } from 'vitest' +import sinon from 'sinon' +import MockRequest from '../helpers/MockRequest.js' +import MockResponse from '../helpers/MockResponse.js' +import mongodb from 'mongodb-legacy' +import Errors from '../../../../app/src/Features/Errors/Errors.js' +import _ from 'lodash' + +const ObjectId = mongodb.ObjectId + +const MODULE_PATH = + '../../../../app/src/Features/Collaborators/CollaboratorsInviteController.mjs' + +vi.mock('../../../../app/src/Features/Errors/Errors.js', () => + vi.importActual('../../../../app/src/Features/Errors/Errors.js') +) + +describe('CollaboratorsInviteController', function () { + beforeEach(async function (ctx) { + ctx.projectId = 'project-id-123' + ctx.token = 'some-opaque-token' + ctx.tokenHmac = 'some-hmac-token' + ctx.targetEmail = 'user@example.com' + ctx.privileges = 'readAndWrite' + ctx.projectOwner = { + _id: 'project-owner-id', + email: 'project-owner@example.com', + } + ctx.currentUser = { + _id: 'current-user-id', + email: 'current-user@example.com', + } + ctx.invite = { + _id: new ObjectId(), + token: ctx.token, + tokenHmac: ctx.tokenHmac, + sendingUserId: ctx.currentUser._id, + projectId: ctx.projectId, + email: ctx.targetEmail, + privileges: ctx.privileges, + createdAt: new Date(), + } + ctx.inviteReducedData = _.pick(ctx.invite, ['_id', 'email', 'privileges']) + ctx.project = { + _id: ctx.projectId, + owner_ref: ctx.projectOwner._id, + } + + ctx.SessionManager = { + getSessionUser: sinon.stub().returns(ctx.currentUser), + } + + ctx.AnalyticsManger = { recordEventForUserInBackground: sinon.stub() } + + ctx.rateLimiter = { + consume: sinon.stub().resolves(), + } + ctx.RateLimiter = { + RateLimiter: sinon.stub().returns(ctx.rateLimiter), + } + + ctx.LimitationsManager = { + promises: { + allowedNumberOfCollaboratorsForUser: sinon.stub(), + canAddXEditCollaborators: sinon.stub().resolves(true), + }, + } + + ctx.UserGetter = { + promises: { + getUserByAnyEmail: sinon.stub(), + getUser: sinon.stub(), + }, + } + + ctx.ProjectGetter = { + promises: { + getProject: sinon.stub(), + }, + } + + ctx.CollaboratorsGetter = { + promises: { + isUserInvitedMemberOfProject: sinon.stub(), + }, + } + + ctx.CollaboratorsInviteHandler = { + promises: { + inviteToProject: sinon.stub().resolves(ctx.inviteReducedData), + generateNewInvite: sinon.stub().resolves(ctx.invite), + revokeInvite: sinon.stub().resolves(ctx.invite), + acceptInvite: sinon.stub(), + }, + } + + ctx.CollaboratorsInviteGetter = { + promises: { + getAllInvites: sinon.stub(), + getInviteByToken: sinon.stub().resolves(ctx.invite), + }, + } + + ctx.EditorRealTimeController = { + emitToRoom: sinon.stub(), + } + + ctx.settings = {} + + ctx.ProjectAuditLogHandler = { + promises: { + addEntry: sinon.stub().resolves(), + }, + addEntryInBackground: sinon.stub(), + } + + ctx.AuthenticationController = { + setRedirectInSession: sinon.stub(), + } + + ctx.SplitTestHandler = { + promises: { + getAssignment: sinon.stub().resolves({ variant: 'default' }), + getAssignmentForUser: sinon.stub().resolves({ variant: 'default' }), + }, + } + + vi.doMock('../../../../app/src/Features/Project/ProjectGetter.js', () => ({ + default: ctx.ProjectGetter, + })) + + vi.doMock( + '../../../../app/src/Features/Project/ProjectAuditLogHandler.js', + () => ({ + default: ctx.ProjectAuditLogHandler, + }) + ) + + vi.doMock( + '../../../../app/src/Features/Subscription/LimitationsManager.js', + () => ({ + default: ctx.LimitationsManager, + }) + ) + + vi.doMock('../../../../app/src/Features/User/UserGetter.js', () => ({ + default: ctx.UserGetter, + })) + + vi.doMock( + '../../../../app/src/Features/Collaborators/CollaboratorsGetter.js', + () => ({ + default: ctx.CollaboratorsGetter, + }) + ) + + vi.doMock( + '../../../../app/src/Features/Collaborators/CollaboratorsInviteHandler.mjs', + () => ({ + default: ctx.CollaboratorsInviteHandler, + }) + ) + + vi.doMock( + '../../../../app/src/Features/Collaborators/CollaboratorsInviteGetter.js', + () => ({ + default: ctx.CollaboratorsInviteGetter, + }) + ) + + vi.doMock( + '../../../../app/src/Features/Editor/EditorRealTimeController.js', + () => ({ + default: ctx.EditorRealTimeController, + }) + ) + + vi.doMock( + '../../../../app/src/Features/Analytics/AnalyticsManager.js', + () => ({ + default: ctx.AnalyticsManger, + }) + ) + + vi.doMock( + '../../../../app/src/Features/Authentication/SessionManager.js', + () => ({ + default: ctx.SessionManager, + }) + ) + + vi.doMock('@overleaf/settings', () => ({ + default: ctx.settings, + })) + + vi.doMock( + '../../../../app/src/infrastructure/RateLimiter', + () => ctx.RateLimiter + ) + + vi.doMock( + '../../../../app/src/Features/Authentication/AuthenticationController', + () => ({ + default: ctx.AuthenticationController, + }) + ) + + vi.doMock( + '../../../../app/src/Features/SplitTests/SplitTestHandler', + () => ({ + default: ctx.SplitTestHandler, + }) + ) + + ctx.CollaboratorsInviteController = (await import(MODULE_PATH)).default + + ctx.res = new MockResponse() + ctx.req = new MockRequest() + ctx.next = sinon.stub() + }) + + describe('getAllInvites', function () { + beforeEach(function (ctx) { + ctx.fakeInvites = [ + { _id: new ObjectId(), one: 1 }, + { _id: new ObjectId(), two: 2 }, + ] + ctx.req.params = { Project_id: ctx.projectId } + }) + + describe('when all goes well', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.CollaboratorsInviteGetter.promises.getAllInvites.resolves( + ctx.fakeInvites + ) + ctx.res.callback = () => resolve() + ctx.CollaboratorsInviteController.getAllInvites( + ctx.req, + ctx.res, + ctx.next + ) + }) + }) + + it('should not produce an error', function (ctx) { + ctx.next.callCount.should.equal(0) + }) + + it('should produce a list of invite objects', function (ctx) { + ctx.res.json.callCount.should.equal(1) + ctx.res.json.calledWith({ invites: ctx.fakeInvites }).should.equal(true) + }) + + it('should have called CollaboratorsInviteHandler.getAllInvites', function (ctx) { + ctx.CollaboratorsInviteGetter.promises.getAllInvites.callCount.should.equal( + 1 + ) + ctx.CollaboratorsInviteGetter.promises.getAllInvites + .calledWith(ctx.projectId) + .should.equal(true) + }) + }) + + describe('when CollaboratorsInviteHandler.getAllInvites produces an error', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.CollaboratorsInviteGetter.promises.getAllInvites.rejects( + new Error('woops') + ) + ctx.next.callsFake(() => resolve()) + ctx.CollaboratorsInviteController.getAllInvites( + ctx.req, + ctx.res, + ctx.next + ) + }) + }) + + it('should produce an error', function (ctx) { + ctx.next.callCount.should.equal(1) + ctx.next.firstCall.args[0].should.be.instanceof(Error) + }) + }) + }) + + describe('inviteToProject', function () { + beforeEach(function (ctx) { + ctx.req.params = { Project_id: ctx.projectId } + ctx.req.body = { + email: ctx.targetEmail, + privileges: ctx.privileges, + } + ctx.ProjectGetter.promises.getProject.resolves({ + owner_ref: ctx.project.owner_ref, + }) + }) + + describe('when all goes well', function (done) { + beforeEach(async function (ctx) { + ctx.CollaboratorsInviteController._checkShouldInviteEmail = sinon + .stub() + .resolves(true) + ctx.CollaboratorsInviteController._checkRateLimit = sinon + .stub() + .resolves(true) + + await ctx.CollaboratorsInviteController.inviteToProject( + ctx.req, + ctx.res + ) + }) + + it('should produce json response', function (ctx) { + ctx.res.json.callCount.should.equal(1) + expect(ctx.res.json.firstCall.args[0]).to.deep.equal({ + invite: ctx.inviteReducedData, + }) + }) + + it('should have called canAddXEditCollaborators', function (ctx) { + ctx.LimitationsManager.promises.canAddXEditCollaborators.callCount.should.equal( + 1 + ) + ctx.LimitationsManager.promises.canAddXEditCollaborators + .calledWith(ctx.projectId) + .should.equal(true) + }) + + it('should have called _checkShouldInviteEmail', function (ctx) { + ctx.CollaboratorsInviteController._checkShouldInviteEmail.callCount.should.equal( + 1 + ) + + ctx.CollaboratorsInviteController._checkShouldInviteEmail + .calledWith(ctx.targetEmail) + .should.equal(true) + }) + + it('should have called inviteToProject', function (ctx) { + ctx.CollaboratorsInviteHandler.promises.inviteToProject.callCount.should.equal( + 1 + ) + ctx.CollaboratorsInviteHandler.promises.inviteToProject + .calledWith( + ctx.projectId, + ctx.currentUser, + ctx.targetEmail, + ctx.privileges + ) + .should.equal(true) + }) + + it('should have called emitToRoom', function (ctx) { + ctx.EditorRealTimeController.emitToRoom.callCount.should.equal(1) + ctx.EditorRealTimeController.emitToRoom + .calledWith(ctx.projectId, 'project:membership:changed') + .should.equal(true) + }) + + it('adds a project audit log entry', function (ctx) { + ctx.ProjectAuditLogHandler.addEntryInBackground.should.have.been.calledWith( + ctx.projectId, + 'send-invite', + ctx.currentUser._id, + ctx.req.ip, + { + inviteId: ctx.invite._id, + privileges: ctx.privileges, + } + ) + }) + }) + + describe('when the user is not allowed to add more edit collaborators', function () { + beforeEach(function (ctx) { + ctx.LimitationsManager.promises.canAddXEditCollaborators.resolves(false) + }) + + describe('readAndWrite collaborator', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.privileges = 'readAndWrite' + ctx.CollaboratorsInviteController._checkShouldInviteEmail = sinon + .stub() + .resolves(true) + ctx.CollaboratorsInviteController._checkRateLimit = sinon + .stub() + .resolves(true) + ctx.res.callback = () => resolve() + ctx.CollaboratorsInviteController.inviteToProject( + ctx.req, + ctx.res, + ctx.next + ) + }) + }) + + it('should produce json response without an invite', function (ctx) { + ctx.res.json.callCount.should.equal(1) + expect(ctx.res.json.firstCall.args[0]).to.deep.equal({ + invite: null, + }) + }) + + it('should not have called _checkShouldInviteEmail', function (ctx) { + ctx.CollaboratorsInviteController._checkShouldInviteEmail.callCount.should.equal( + 0 + ) + ctx.CollaboratorsInviteController._checkShouldInviteEmail + .calledWith(ctx.currentUser, ctx.targetEmail) + .should.equal(false) + }) + + it('should not have called inviteToProject', function (ctx) { + ctx.CollaboratorsInviteHandler.promises.inviteToProject.callCount.should.equal( + 0 + ) + }) + }) + + describe('readOnly collaborator (always allowed)', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.req.body = { + email: ctx.targetEmail, + privileges: (ctx.privileges = 'readOnly'), + } + ctx.CollaboratorsInviteController._checkShouldInviteEmail = sinon + .stub() + .resolves(true) + ctx.CollaboratorsInviteController._checkRateLimit = sinon + .stub() + .resolves(true) + ctx.res.callback = () => resolve() + ctx.CollaboratorsInviteController.inviteToProject( + ctx.req, + ctx.res, + ctx.next + ) + }) + }) + + it('should produce json response', function (ctx) { + ctx.res.json.callCount.should.equal(1) + expect(ctx.res.json.firstCall.args[0]).to.deep.equal({ + invite: ctx.inviteReducedData, + }) + }) + + it('should not have called canAddXEditCollaborators', function (ctx) { + ctx.LimitationsManager.promises.canAddXEditCollaborators.callCount.should.equal( + 0 + ) + }) + + it('should have called _checkShouldInviteEmail', function (ctx) { + ctx.CollaboratorsInviteController._checkShouldInviteEmail.callCount.should.equal( + 1 + ) + ctx.CollaboratorsInviteController._checkShouldInviteEmail + .calledWith(ctx.targetEmail) + .should.equal(true) + }) + + it('should have called inviteToProject', function (ctx) { + ctx.CollaboratorsInviteHandler.promises.inviteToProject.callCount.should.equal( + 1 + ) + ctx.CollaboratorsInviteHandler.promises.inviteToProject + .calledWith( + ctx.projectId, + ctx.currentUser, + ctx.targetEmail, + ctx.privileges + ) + .should.equal(true) + }) + + it('should have called emitToRoom', function (ctx) { + ctx.EditorRealTimeController.emitToRoom.callCount.should.equal(1) + ctx.EditorRealTimeController.emitToRoom + .calledWith(ctx.projectId, 'project:membership:changed') + .should.equal(true) + }) + + it('adds a project audit log entry', function (ctx) { + ctx.ProjectAuditLogHandler.addEntryInBackground.should.have.been.calledWith( + ctx.projectId, + 'send-invite', + ctx.currentUser._id, + ctx.req.ip, + { + inviteId: ctx.invite._id, + privileges: ctx.privileges, + } + ) + }) + }) + }) + + describe('when inviteToProject produces an error', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.CollaboratorsInviteController._checkShouldInviteEmail = sinon + .stub() + .resolves(true) + ctx.CollaboratorsInviteController._checkRateLimit = sinon + .stub() + .resolves(true) + ctx.CollaboratorsInviteHandler.promises.inviteToProject.rejects( + new Error('woops') + ) + ctx.next.callsFake(() => resolve()) + ctx.CollaboratorsInviteController.inviteToProject( + ctx.req, + ctx.res, + ctx.next + ) + }) + }) + + it('should call next with an error', function (ctx) { + ctx.next.callCount.should.equal(1) + expect(ctx.next).to.have.been.calledWith(sinon.match.instanceOf(Error)) + }) + + it('should have called canAddXEditCollaborators', function (ctx) { + ctx.LimitationsManager.promises.canAddXEditCollaborators.callCount.should.equal( + 1 + ) + ctx.LimitationsManager.promises.canAddXEditCollaborators + .calledWith(ctx.projectId) + .should.equal(true) + }) + + it('should have called _checkShouldInviteEmail', function (ctx) { + ctx.CollaboratorsInviteController._checkShouldInviteEmail.callCount.should.equal( + 1 + ) + ctx.CollaboratorsInviteController._checkShouldInviteEmail + .calledWith(ctx.targetEmail) + .should.equal(true) + }) + + it('should have called inviteToProject', function (ctx) { + ctx.CollaboratorsInviteHandler.promises.inviteToProject.callCount.should.equal( + 1 + ) + ctx.CollaboratorsInviteHandler.promises.inviteToProject + .calledWith( + ctx.projectId, + ctx.currentUser, + ctx.targetEmail, + ctx.privileges + ) + .should.equal(true) + }) + }) + + describe('when _checkShouldInviteEmail disallows the invite', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.CollaboratorsInviteController._checkShouldInviteEmail = sinon + .stub() + .resolves(false) + ctx.CollaboratorsInviteController._checkRateLimit = sinon + .stub() + .resolves(true) + ctx.res.callback = () => resolve() + ctx.CollaboratorsInviteController.inviteToProject( + ctx.req, + ctx.res, + ctx.next + ) + }) + }) + + it('should produce json response with no invite, and an error property', function (ctx) { + ctx.res.json.callCount.should.equal(1) + expect(ctx.res.json.firstCall.args[0]).to.deep.equal({ + invite: null, + error: 'cannot_invite_non_user', + }) + }) + + it('should have called _checkShouldInviteEmail', function (ctx) { + ctx.CollaboratorsInviteController._checkShouldInviteEmail.callCount.should.equal( + 1 + ) + ctx.CollaboratorsInviteController._checkShouldInviteEmail + .calledWith(ctx.targetEmail) + .should.equal(true) + }) + + it('should not have called inviteToProject', function (ctx) { + ctx.CollaboratorsInviteHandler.promises.inviteToProject.callCount.should.equal( + 0 + ) + }) + }) + + describe('when _checkShouldInviteEmail produces an error', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.CollaboratorsInviteController._checkShouldInviteEmail = sinon + .stub() + .rejects(new Error('woops')) + ctx.CollaboratorsInviteController._checkRateLimit = sinon + .stub() + .resolves(true) + ctx.next.callsFake(() => resolve()) + ctx.CollaboratorsInviteController.inviteToProject( + ctx.req, + ctx.res, + ctx.next + ) + }) + }) + + it('should call next with an error', function (ctx) { + ctx.next.callCount.should.equal(1) + ctx.next.calledWith(sinon.match.instanceOf(Error)).should.equal(true) + }) + + it('should have called _checkShouldInviteEmail', function (ctx) { + ctx.CollaboratorsInviteController._checkShouldInviteEmail.callCount.should.equal( + 1 + ) + ctx.CollaboratorsInviteController._checkShouldInviteEmail + .calledWith(ctx.targetEmail) + .should.equal(true) + }) + + it('should not have called inviteToProject', function (ctx) { + ctx.CollaboratorsInviteHandler.promises.inviteToProject.callCount.should.equal( + 0 + ) + }) + }) + + describe('when the user invites themselves to the project', function () { + beforeEach(function (ctx) { + ctx.req.body.email = ctx.currentUser.email + ctx.CollaboratorsInviteController._checkShouldInviteEmail = sinon + .stub() + .resolves(true) + ctx.CollaboratorsInviteController._checkRateLimit = sinon + .stub() + .resolves(true) + ctx.CollaboratorsInviteController.inviteToProject( + ctx.req, + ctx.res, + ctx.next + ) + }) + + it('should reject action, return json response with error code', function (ctx) { + ctx.res.json.callCount.should.equal(1) + expect(ctx.res.json.firstCall.args[0]).to.deep.equal({ + invite: null, + error: 'cannot_invite_self', + }) + }) + + it('should not have called canAddXEditCollaborators', function (ctx) { + ctx.LimitationsManager.promises.canAddXEditCollaborators.callCount.should.equal( + 0 + ) + }) + + it('should not have called _checkShouldInviteEmail', function (ctx) { + ctx.CollaboratorsInviteController._checkShouldInviteEmail.callCount.should.equal( + 0 + ) + }) + + it('should not have called inviteToProject', function (ctx) { + ctx.CollaboratorsInviteHandler.promises.inviteToProject.callCount.should.equal( + 0 + ) + }) + + it('should not have called emitToRoom', function (ctx) { + ctx.EditorRealTimeController.emitToRoom.callCount.should.equal(0) + }) + }) + + describe('when _checkRateLimit returns false', function () { + beforeEach(async function (ctx) { + ctx.CollaboratorsInviteController._checkShouldInviteEmail = sinon + .stub() + .resolves(true) + ctx.CollaboratorsInviteController._checkRateLimit = sinon + .stub() + .resolves(false) + await ctx.CollaboratorsInviteController.inviteToProject( + ctx.req, + ctx.res, + ctx.next + ) + }) + + it('should send a 429 response', function (ctx) { + ctx.res.sendStatus.calledWith(429).should.equal(true) + }) + + it('should not call inviteToProject', function (ctx) { + ctx.CollaboratorsInviteHandler.promises.inviteToProject.called.should.equal( + false + ) + }) + + it('should not call emitToRoom', function (ctx) { + ctx.EditorRealTimeController.emitToRoom.called.should.equal(false) + }) + }) + }) + + describe('viewInvite', function () { + beforeEach(function (ctx) { + ctx.req.params = { + Project_id: ctx.projectId, + token: ctx.token, + } + ctx.fakeProject = { + _id: ctx.projectId, + name: 'some project', + owner_ref: ctx.invite.sendingUserId, + collaberator_refs: [], + readOnly_refs: [], + } + ctx.owner = { + _id: ctx.fakeProject.owner_ref, + first_name: 'John', + last_name: 'Doe', + email: 'john@example.com', + } + + ctx.CollaboratorsGetter.promises.isUserInvitedMemberOfProject.resolves( + false + ) + ctx.CollaboratorsInviteGetter.promises.getInviteByToken.resolves( + ctx.invite + ) + ctx.ProjectGetter.promises.getProject.resolves(ctx.fakeProject) + ctx.UserGetter.promises.getUser.resolves(ctx.owner) + }) + + describe('when the token is valid', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.res.callback = () => resolve() + ctx.CollaboratorsInviteController.viewInvite( + ctx.req, + ctx.res, + ctx.next + ) + }) + }) + + it('should render the view template', function (ctx) { + ctx.res.render.callCount.should.equal(1) + ctx.res.render.calledWith('project/invite/show').should.equal(true) + }) + + it('should not call next', function (ctx) { + ctx.next.callCount.should.equal(0) + }) + + it('should call CollaboratorsGetter.isUserInvitedMemberOfProject', function (ctx) { + ctx.CollaboratorsGetter.promises.isUserInvitedMemberOfProject.callCount.should.equal( + 1 + ) + ctx.CollaboratorsGetter.promises.isUserInvitedMemberOfProject + .calledWith(ctx.currentUser._id, ctx.projectId) + .should.equal(true) + }) + + it('should call getInviteByToken', function (ctx) { + ctx.CollaboratorsInviteGetter.promises.getInviteByToken.callCount.should.equal( + 1 + ) + ctx.CollaboratorsInviteGetter.promises.getInviteByToken + .calledWith(ctx.fakeProject._id, ctx.invite.token) + .should.equal(true) + }) + + it('should call User.getUser', function (ctx) { + ctx.UserGetter.promises.getUser.callCount.should.equal(1) + ctx.UserGetter.promises.getUser + .calledWith({ _id: ctx.fakeProject.owner_ref }) + .should.equal(true) + }) + + it('should call ProjectGetter.getProject', function (ctx) { + ctx.ProjectGetter.promises.getProject.callCount.should.equal(1) + ctx.ProjectGetter.promises.getProject + .calledWith(ctx.projectId) + .should.equal(true) + }) + }) + + describe('when not logged in', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.SessionManager.getSessionUser.returns(null) + + ctx.res.callback = () => resolve() + ctx.CollaboratorsInviteController.viewInvite( + ctx.req, + ctx.res, + ctx.next + ) + }) + }) + it('should not check member status', function (ctx) { + expect(ctx.CollaboratorsGetter.promises.isUserInvitedMemberOfProject).to + .not.have.been.called + }) + + it('should set redirect back to invite', function (ctx) { + expect( + ctx.AuthenticationController.setRedirectInSession + ).to.have.been.calledWith(ctx.req) + }) + + it('should redirect to the register page', function (ctx) { + expect(ctx.res.render).to.not.have.been.called + expect(ctx.res.redirect).to.have.been.calledOnce + expect(ctx.res.redirect).to.have.been.calledWith('/register') + }) + }) + + describe('when user is already a member of the project', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.CollaboratorsGetter.promises.isUserInvitedMemberOfProject.resolves( + true + ) + ctx.res.callback = () => resolve() + ctx.CollaboratorsInviteController.viewInvite( + ctx.req, + ctx.res, + ctx.next + ) + }) + }) + + it('should redirect to the project page', function (ctx) { + ctx.res.redirect.callCount.should.equal(1) + ctx.res.redirect + .calledWith(`/project/${ctx.projectId}`) + .should.equal(true) + }) + + it('should not call next with an error', function (ctx) { + ctx.next.callCount.should.equal(0) + }) + + it('should call CollaboratorsGetter.isUserInvitedMemberOfProject', function (ctx) { + ctx.CollaboratorsGetter.promises.isUserInvitedMemberOfProject.callCount.should.equal( + 1 + ) + ctx.CollaboratorsGetter.promises.isUserInvitedMemberOfProject + .calledWith(ctx.currentUser._id, ctx.projectId) + .should.equal(true) + }) + + it('should not call getInviteByToken', function (ctx) { + ctx.CollaboratorsInviteGetter.promises.getInviteByToken.callCount.should.equal( + 0 + ) + }) + + it('should not call User.getUser', function (ctx) { + ctx.UserGetter.promises.getUser.callCount.should.equal(0) + }) + + it('should not call ProjectGetter.getProject', function (ctx) { + ctx.ProjectGetter.promises.getProject.callCount.should.equal(0) + }) + }) + + describe('when isUserInvitedMemberOfProject produces an error', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.CollaboratorsGetter.promises.isUserInvitedMemberOfProject.rejects( + new Error('woops') + ) + ctx.next.callsFake(() => resolve()) + ctx.CollaboratorsInviteController.viewInvite( + ctx.req, + ctx.res, + ctx.next + ) + }) + }) + + it('should call next with an error', function (ctx) { + ctx.next.callCount.should.equal(1) + expect(ctx.next.firstCall.args[0]).to.be.instanceof(Error) + }) + + it('should call CollaboratorsGetter.isUserInvitedMemberOfProject', function (ctx) { + ctx.CollaboratorsGetter.promises.isUserInvitedMemberOfProject.callCount.should.equal( + 1 + ) + ctx.CollaboratorsGetter.promises.isUserInvitedMemberOfProject + .calledWith(ctx.currentUser._id, ctx.projectId) + .should.equal(true) + }) + + it('should not call getInviteByToken', function (ctx) { + ctx.CollaboratorsInviteGetter.promises.getInviteByToken.callCount.should.equal( + 0 + ) + }) + + it('should not call User.getUser', function (ctx) { + ctx.UserGetter.promises.getUser.callCount.should.equal(0) + }) + + it('should not call ProjectGetter.getProject', function (ctx) { + ctx.ProjectGetter.promises.getProject.callCount.should.equal(0) + }) + }) + + describe('when the getInviteByToken produces an error', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.CollaboratorsInviteGetter.promises.getInviteByToken.rejects( + new Error('woops') + ) + ctx.next.callsFake(() => resolve()) + ctx.CollaboratorsInviteController.viewInvite( + ctx.req, + ctx.res, + ctx.next + ) + }) + }) + + it('should call next with the error', function (ctx) { + ctx.next.callCount.should.equal(1) + ctx.next.calledWith(sinon.match.instanceOf(Error)).should.equal(true) + }) + + it('should call CollaboratorsGetter.isUserInvitedMemberOfProject', function (ctx) { + ctx.CollaboratorsGetter.promises.isUserInvitedMemberOfProject.callCount.should.equal( + 1 + ) + ctx.CollaboratorsGetter.promises.isUserInvitedMemberOfProject + .calledWith(ctx.currentUser._id, ctx.projectId) + .should.equal(true) + }) + + it('should call getInviteByToken', function (ctx) { + ctx.CollaboratorsInviteGetter.promises.getInviteByToken.callCount.should.equal( + 1 + ) + ctx.CollaboratorsGetter.promises.isUserInvitedMemberOfProject + .calledWith(ctx.currentUser._id, ctx.projectId) + .should.equal(true) + }) + + it('should not call User.getUser', function (ctx) { + ctx.UserGetter.promises.getUser.callCount.should.equal(0) + }) + + it('should not call ProjectGetter.getProject', function (ctx) { + ctx.ProjectGetter.promises.getProject.callCount.should.equal(0) + }) + }) + + describe('when the getInviteByToken does not produce an invite', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.CollaboratorsInviteGetter.promises.getInviteByToken.resolves(null) + ctx.res.callback = () => resolve() + ctx.CollaboratorsInviteController.viewInvite( + ctx.req, + ctx.res, + ctx.next + ) + }) + }) + + it('should render the not-valid view template', function (ctx) { + ctx.res.render.callCount.should.equal(1) + ctx.res.render.calledWith('project/invite/not-valid').should.equal(true) + }) + + it('should not call next', function (ctx) { + ctx.next.callCount.should.equal(0) + }) + + it('should call CollaboratorsGetter.isUserInvitedMemberOfProject', function (ctx) { + ctx.CollaboratorsGetter.promises.isUserInvitedMemberOfProject.callCount.should.equal( + 1 + ) + ctx.CollaboratorsGetter.promises.isUserInvitedMemberOfProject + .calledWith(ctx.currentUser._id, ctx.projectId) + .should.equal(true) + }) + + it('should call getInviteByToken', function (ctx) { + ctx.CollaboratorsInviteGetter.promises.getInviteByToken.callCount.should.equal( + 1 + ) + ctx.CollaboratorsGetter.promises.isUserInvitedMemberOfProject + .calledWith(ctx.currentUser._id, ctx.projectId) + .should.equal(true) + }) + + it('should not call User.getUser', function (ctx) { + ctx.UserGetter.promises.getUser.callCount.should.equal(0) + }) + + it('should not call ProjectGetter.getProject', function (ctx) { + ctx.ProjectGetter.promises.getProject.callCount.should.equal(0) + }) + }) + + describe('when User.getUser produces an error', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.UserGetter.promises.getUser.rejects(new Error('woops')) + ctx.next.callsFake(() => resolve()) + ctx.CollaboratorsInviteController.viewInvite( + ctx.req, + ctx.res, + ctx.next + ) + }) + }) + + it('should produce an error', function (ctx) { + ctx.next.callCount.should.equal(1) + expect(ctx.next.firstCall.args[0]).to.be.instanceof(Error) + }) + + it('should call CollaboratorsGetter.isUserInvitedMemberOfProject', function (ctx) { + ctx.CollaboratorsGetter.promises.isUserInvitedMemberOfProject.callCount.should.equal( + 1 + ) + ctx.CollaboratorsGetter.promises.isUserInvitedMemberOfProject + .calledWith(ctx.currentUser._id, ctx.projectId) + .should.equal(true) + }) + + it('should call getInviteByToken', function (ctx) { + ctx.CollaboratorsInviteGetter.promises.getInviteByToken.callCount.should.equal( + 1 + ) + }) + + it('should call User.getUser', function (ctx) { + ctx.UserGetter.promises.getUser.callCount.should.equal(1) + ctx.UserGetter.promises.getUser + .calledWith({ _id: ctx.fakeProject.owner_ref }) + .should.equal(true) + }) + + it('should not call ProjectGetter.getProject', function (ctx) { + ctx.ProjectGetter.promises.getProject.callCount.should.equal(0) + }) + }) + + describe('when User.getUser does not find a user', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.UserGetter.promises.getUser.resolves(null) + ctx.res.callback = () => resolve() + ctx.CollaboratorsInviteController.viewInvite( + ctx.req, + ctx.res, + ctx.next + ) + }) + }) + + it('should render the not-valid view template', function (ctx) { + ctx.res.render.callCount.should.equal(1) + ctx.res.render.calledWith('project/invite/not-valid').should.equal(true) + }) + + it('should not call next', function (ctx) { + ctx.next.callCount.should.equal(0) + }) + + it('should call CollaboratorsGetter.isUserInvitedMemberOfProject', function (ctx) { + ctx.CollaboratorsGetter.promises.isUserInvitedMemberOfProject.callCount.should.equal( + 1 + ) + ctx.CollaboratorsGetter.promises.isUserInvitedMemberOfProject + .calledWith(ctx.currentUser._id, ctx.projectId) + .should.equal(true) + }) + + it('should call getInviteByToken', function (ctx) { + ctx.CollaboratorsInviteGetter.promises.getInviteByToken.callCount.should.equal( + 1 + ) + }) + + it('should call User.getUser', function (ctx) { + ctx.UserGetter.promises.getUser.callCount.should.equal(1) + ctx.UserGetter.promises.getUser + .calledWith({ _id: ctx.fakeProject.owner_ref }) + .should.equal(true) + }) + + it('should not call ProjectGetter.getProject', function (ctx) { + ctx.ProjectGetter.promises.getProject.callCount.should.equal(0) + }) + }) + + describe('when getProject produces an error', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.ProjectGetter.promises.getProject.rejects(new Error('woops')) + ctx.next.callsFake(() => resolve()) + ctx.CollaboratorsInviteController.viewInvite( + ctx.req, + ctx.res, + ctx.next + ) + }) + }) + + it('should produce an error', function (ctx) { + ctx.next.callCount.should.equal(1) + expect(ctx.next.firstCall.args[0]).to.be.instanceof(Error) + }) + + it('should call CollaboratorsGetter.isUserInvitedMemberOfProject', function (ctx) { + ctx.CollaboratorsGetter.promises.isUserInvitedMemberOfProject.callCount.should.equal( + 1 + ) + ctx.CollaboratorsGetter.promises.isUserInvitedMemberOfProject + .calledWith(ctx.currentUser._id, ctx.projectId) + .should.equal(true) + }) + + it('should call getInviteByToken', function (ctx) { + ctx.CollaboratorsInviteGetter.promises.getInviteByToken.callCount.should.equal( + 1 + ) + }) + + it('should call User.getUser', function (ctx) { + ctx.UserGetter.promises.getUser.callCount.should.equal(1) + ctx.UserGetter.promises.getUser + .calledWith({ _id: ctx.fakeProject.owner_ref }) + .should.equal(true) + }) + + it('should call ProjectGetter.getProject', function (ctx) { + ctx.ProjectGetter.promises.getProject.callCount.should.equal(1) + }) + }) + + describe('when Project.getUser does not find a user', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.ProjectGetter.promises.getProject.resolves(null) + ctx.res.callback = () => resolve() + ctx.CollaboratorsInviteController.viewInvite( + ctx.req, + ctx.res, + ctx.next + ) + }) + }) + + it('should render the not-valid view template', function (ctx) { + ctx.res.render.callCount.should.equal(1) + ctx.res.render.calledWith('project/invite/not-valid').should.equal(true) + }) + + it('should not call next', function (ctx) { + ctx.next.callCount.should.equal(0) + }) + + it('should call CollaboratorsGetter.isUserInvitedMemberOfProject', function (ctx) { + ctx.CollaboratorsGetter.promises.isUserInvitedMemberOfProject.callCount.should.equal( + 1 + ) + ctx.CollaboratorsGetter.promises.isUserInvitedMemberOfProject + .calledWith(ctx.currentUser._id, ctx.projectId) + .should.equal(true) + }) + + it('should call getInviteByToken', function (ctx) { + ctx.CollaboratorsInviteGetter.promises.getInviteByToken.callCount.should.equal( + 1 + ) + }) + + it('should call getUser', function (ctx) { + ctx.UserGetter.promises.getUser.callCount.should.equal(1) + ctx.UserGetter.promises.getUser + .calledWith({ _id: ctx.fakeProject.owner_ref }) + .should.equal(true) + }) + + it('should call ProjectGetter.getProject', function (ctx) { + ctx.ProjectGetter.promises.getProject.callCount.should.equal(1) + }) + }) + }) + + describe('generateNewInvite', function () { + beforeEach(function (ctx) { + ctx.req.params = { + Project_id: ctx.projectId, + invite_id: ctx.invite._id.toString(), + } + ctx.CollaboratorsInviteController._checkRateLimit = sinon + .stub() + .resolves(true) + }) + + describe('when generateNewInvite does not produce an error', function () { + describe('and returns an invite object', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.res.callback = () => resolve() + ctx.CollaboratorsInviteController.generateNewInvite( + ctx.req, + ctx.res, + ctx.next + ) + }) + }) + + it('should produce a 201 response', function (ctx) { + ctx.res.sendStatus.callCount.should.equal(1) + ctx.res.sendStatus.calledWith(201).should.equal(true) + }) + + it('should have called generateNewInvite', function (ctx) { + ctx.CollaboratorsInviteHandler.promises.generateNewInvite.callCount.should.equal( + 1 + ) + }) + + it('should have called emitToRoom', function (ctx) { + ctx.EditorRealTimeController.emitToRoom.callCount.should.equal(1) + ctx.EditorRealTimeController.emitToRoom + .calledWith(ctx.projectId, 'project:membership:changed') + .should.equal(true) + }) + + it('should check the rate limit', function (ctx) { + ctx.CollaboratorsInviteController._checkRateLimit.callCount.should.equal( + 1 + ) + }) + + it('should add a project audit log entry', function (ctx) { + ctx.ProjectAuditLogHandler.addEntryInBackground.should.have.been.calledWith( + ctx.projectId, + 'resend-invite', + ctx.currentUser._id, + ctx.req.ip, + { + inviteId: ctx.invite._id, + privileges: ctx.privileges, + } + ) + }) + }) + + describe('and returns a null invite', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.CollaboratorsInviteHandler.promises.generateNewInvite.resolves( + null + ) + ctx.res.callback = () => resolve() + ctx.CollaboratorsInviteController.generateNewInvite( + ctx.req, + ctx.res, + ctx.next + ) + }) + }) + + it('should have called emitToRoom', function (ctx) { + ctx.EditorRealTimeController.emitToRoom.callCount.should.equal(1) + ctx.EditorRealTimeController.emitToRoom + .calledWith(ctx.projectId, 'project:membership:changed') + .should.equal(true) + }) + + it('should produce a 404 response when invite is null', function (ctx) { + ctx.res.sendStatus.callCount.should.equal(1) + ctx.res.sendStatus.should.have.been.calledWith(404) + }) + }) + }) + + describe('when generateNewInvite produces an error', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.CollaboratorsInviteHandler.promises.generateNewInvite.rejects( + new Error('woops') + ) + ctx.next.callsFake(() => resolve()) + ctx.CollaboratorsInviteController.generateNewInvite( + ctx.req, + ctx.res, + ctx.next + ) + }) + }) + + it('should not produce a 201 response', function (ctx) { + ctx.res.sendStatus.callCount.should.equal(0) + }) + + it('should call next with the error', function (ctx) { + ctx.next.callCount.should.equal(1) + ctx.next.calledWith(sinon.match.instanceOf(Error)).should.equal(true) + }) + + it('should have called generateNewInvite', function (ctx) { + ctx.CollaboratorsInviteHandler.promises.generateNewInvite.callCount.should.equal( + 1 + ) + }) + }) + }) + + describe('revokeInvite', function () { + beforeEach(function (ctx) { + ctx.req.params = { + Project_id: ctx.projectId, + invite_id: ctx.invite._id.toString(), + } + }) + + describe('when revokeInvite does not produce an error', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.res.callback = () => resolve() + ctx.CollaboratorsInviteController.revokeInvite( + ctx.req, + ctx.res, + ctx.next + ) + }) + }) + + it('should produce a 204 response', function (ctx) { + ctx.res.sendStatus.callCount.should.equal(1) + ctx.res.sendStatus.should.have.been.calledWith(204) + }) + + it('should have called revokeInvite', function (ctx) { + ctx.CollaboratorsInviteHandler.promises.revokeInvite.callCount.should.equal( + 1 + ) + }) + + it('should have called emitToRoom', function (ctx) { + ctx.EditorRealTimeController.emitToRoom.callCount.should.equal(1) + ctx.EditorRealTimeController.emitToRoom + .calledWith(ctx.projectId, 'project:membership:changed') + .should.equal(true) + }) + + it('should add a project audit log entry', function (ctx) { + ctx.ProjectAuditLogHandler.addEntryInBackground.should.have.been.calledWith( + ctx.projectId, + 'revoke-invite', + ctx.currentUser._id, + ctx.req.ip, + { + inviteId: ctx.invite._id, + privileges: ctx.privileges, + } + ) + }) + }) + + describe('when revokeInvite produces an error', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.CollaboratorsInviteHandler.promises.revokeInvite.rejects( + new Error('woops') + ) + ctx.next.callsFake(() => resolve()) + ctx.CollaboratorsInviteController.revokeInvite( + ctx.req, + ctx.res, + ctx.next + ) + }) + }) + + it('should not produce a 201 response', function (ctx) { + ctx.res.sendStatus.callCount.should.equal(0) + }) + + it('should call next with the error', function (ctx) { + ctx.next.callCount.should.equal(1) + ctx.next.calledWith(sinon.match.instanceOf(Error)).should.equal(true) + }) + + it('should have called revokeInvite', function (ctx) { + ctx.CollaboratorsInviteHandler.promises.revokeInvite.callCount.should.equal( + 1 + ) + }) + }) + }) + + describe('acceptInvite', function () { + beforeEach(function (ctx) { + ctx.req.params = { + Project_id: ctx.projectId, + token: ctx.token, + } + }) + + describe('when acceptInvite does not produce an error', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.res.callback = () => resolve() + ctx.CollaboratorsInviteController.acceptInvite( + ctx.req, + ctx.res, + ctx.next + ) + }) + }) + + it('should redirect to project page', function (ctx) { + ctx.res.redirect.should.have.been.calledOnce + ctx.res.redirect.should.have.been.calledWith( + `/project/${ctx.projectId}` + ) + }) + + it('should have called acceptInvite', function (ctx) { + ctx.CollaboratorsInviteHandler.promises.acceptInvite.should.have.been.calledWith( + ctx.invite, + ctx.projectId, + ctx.currentUser + ) + }) + + it('should have called emitToRoom', function (ctx) { + ctx.EditorRealTimeController.emitToRoom.should.have.been.calledOnce + ctx.EditorRealTimeController.emitToRoom.should.have.been.calledWith( + ctx.projectId, + 'project:membership:changed' + ) + }) + + it('should add a project audit log entry', function (ctx) { + ctx.ProjectAuditLogHandler.promises.addEntry.should.have.been.calledWith( + ctx.projectId, + 'accept-invite', + ctx.currentUser._id, + ctx.req.ip, + { + inviteId: ctx.invite._id, + privileges: ctx.privileges, + } + ) + }) + }) + + describe('when the invite is not found', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.CollaboratorsInviteGetter.promises.getInviteByToken.resolves(null) + ctx.next.callsFake(() => resolve()) + ctx.CollaboratorsInviteController.acceptInvite( + ctx.req, + ctx.res, + ctx.next + ) + }) + }) + + it('throws a NotFoundError', function (ctx) { + expect(ctx.next).to.have.been.calledWith( + sinon.match.instanceOf(Errors.NotFoundError) + ) + }) + }) + + describe('when acceptInvite produces an error', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.CollaboratorsInviteHandler.promises.acceptInvite.rejects( + new Error('woops') + ) + ctx.next.callsFake(() => resolve()) + ctx.CollaboratorsInviteController.acceptInvite( + ctx.req, + ctx.res, + ctx.next + ) + }) + }) + + it('should not redirect to project page', function (ctx) { + ctx.res.redirect.callCount.should.equal(0) + }) + + it('should call next with the error', function (ctx) { + ctx.next.callCount.should.equal(1) + ctx.next.calledWith(sinon.match.instanceOf(Error)).should.equal(true) + }) + + it('should have called acceptInvite', function (ctx) { + ctx.CollaboratorsInviteHandler.promises.acceptInvite.callCount.should.equal( + 1 + ) + }) + }) + + describe('when the project audit log entry fails', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.ProjectAuditLogHandler.promises.addEntry.rejects( + new Error('oops') + ) + ctx.next.callsFake(() => resolve()) + ctx.CollaboratorsInviteController.acceptInvite( + ctx.req, + ctx.res, + ctx.next + ) + }) + }) + + it('should not accept the invite', function (ctx) { + ctx.CollaboratorsInviteHandler.promises.acceptInvite.should.not.have + .been.called + }) + }) + }) + + describe('_checkShouldInviteEmail', function () { + beforeEach(function (ctx) { + ctx.email = 'user@example.com' + }) + + describe('when we should be restricting to existing accounts', function () { + beforeEach(function (ctx) { + ctx.settings.restrictInvitesToExistingAccounts = true + ctx.call = () => + ctx.CollaboratorsInviteController._checkShouldInviteEmail(ctx.email) + }) + + describe('when user account is present', function () { + beforeEach(function (ctx) { + ctx.user = { _id: new ObjectId().toString() } + ctx.UserGetter.promises.getUserByAnyEmail.resolves(ctx.user) + }) + + it('should callback with `true`', async function (ctx) { + const shouldAllow = + await ctx.CollaboratorsInviteController._checkShouldInviteEmail( + ctx.email + ) + expect(shouldAllow).to.equal(true) + }) + }) + + describe('when user account is absent', function () { + beforeEach(function (ctx) { + ctx.user = null + ctx.UserGetter.promises.getUserByAnyEmail.resolves(ctx.user) + }) + + it('should callback with `false`', async function (ctx) { + const shouldAllow = + await ctx.CollaboratorsInviteController._checkShouldInviteEmail( + ctx.email + ) + expect(shouldAllow).to.equal(false) + }) + + it('should have called getUser', async function (ctx) { + await ctx.CollaboratorsInviteController._checkShouldInviteEmail( + ctx.email + ) + ctx.UserGetter.promises.getUserByAnyEmail.callCount.should.equal(1) + ctx.UserGetter.promises.getUserByAnyEmail + .calledWith(ctx.email, { _id: 1 }) + .should.equal(true) + }) + }) + + describe('when getUser produces an error', function () { + beforeEach(function (ctx) { + ctx.user = null + ctx.UserGetter.promises.getUserByAnyEmail.rejects(new Error('woops')) + }) + + it('should callback with an error', async function (ctx) { + await expect( + ctx.CollaboratorsInviteController._checkShouldInviteEmail(ctx.email) + ).to.be.rejected + }) + }) + }) + }) + + describe('_checkRateLimit', function () { + beforeEach(function (ctx) { + ctx.settings.restrictInvitesToExistingAccounts = false + ctx.currentUserId = '32312313' + ctx.LimitationsManager.promises.allowedNumberOfCollaboratorsForUser + .withArgs(ctx.currentUserId) + .resolves(17) + }) + + it('should callback with `true` when rate limit under', async function (ctx) { + const result = await ctx.CollaboratorsInviteController._checkRateLimit( + ctx.currentUserId + ) + expect(ctx.rateLimiter.consume).to.have.been.calledWith(ctx.currentUserId) + result.should.equal(true) + }) + + it('should callback with `false` when rate limit hit', async function (ctx) { + ctx.rateLimiter.consume.rejects({ remainingPoints: 0 }) + const result = await ctx.CollaboratorsInviteController._checkRateLimit( + ctx.currentUserId + ) + expect(ctx.rateLimiter.consume).to.have.been.calledWith(ctx.currentUserId) + result.should.equal(false) + }) + + it('should allow 10x the collaborators', async function (ctx) { + await ctx.CollaboratorsInviteController._checkRateLimit(ctx.currentUserId) + expect(ctx.rateLimiter.consume).to.have.been.calledWith( + ctx.currentUserId, + Math.floor(40000 / 170) + ) + }) + + it('should allow 200 requests when collaborators is -1', async function (ctx) { + ctx.LimitationsManager.promises.allowedNumberOfCollaboratorsForUser + .withArgs(ctx.currentUserId) + .resolves(-1) + await ctx.CollaboratorsInviteController._checkRateLimit(ctx.currentUserId) + expect(ctx.rateLimiter.consume).to.have.been.calledWith( + ctx.currentUserId, + Math.floor(40000 / 200) + ) + }) + + it('should allow 10 requests when user has no collaborators set', async function (ctx) { + ctx.LimitationsManager.promises.allowedNumberOfCollaboratorsForUser + .withArgs(ctx.currentUserId) + .resolves(null) + await ctx.CollaboratorsInviteController._checkRateLimit(ctx.currentUserId) + expect(ctx.rateLimiter.consume).to.have.been.calledWith( + ctx.currentUserId, + Math.floor(40000 / 10) + ) + }) + }) +}) diff --git a/services/web/test/unit/src/Collaborators/CollaboratorsInviteControllerTests.mjs b/services/web/test/unit/src/Collaborators/CollaboratorsInviteControllerTests.mjs deleted file mode 100644 index 3e7d4c3daa..0000000000 --- a/services/web/test/unit/src/Collaborators/CollaboratorsInviteControllerTests.mjs +++ /dev/null @@ -1,1578 +0,0 @@ -import sinon from 'sinon' -import { expect } from 'chai' -import esmock from 'esmock' -import MockRequest from '../helpers/MockRequest.js' -import MockResponse from '../helpers/MockResponse.js' -import mongodb from 'mongodb-legacy' -import Errors from '../../../../app/src/Features/Errors/Errors.js' -import _ from 'lodash' - -const ObjectId = mongodb.ObjectId - -const MODULE_PATH = - '../../../../app/src/Features/Collaborators/CollaboratorsInviteController.mjs' - -describe('CollaboratorsInviteController', function () { - beforeEach(async function () { - this.projectId = 'project-id-123' - this.token = 'some-opaque-token' - this.tokenHmac = 'some-hmac-token' - this.targetEmail = 'user@example.com' - this.privileges = 'readAndWrite' - this.projectOwner = { - _id: 'project-owner-id', - email: 'project-owner@example.com', - } - this.currentUser = { - _id: 'current-user-id', - email: 'current-user@example.com', - } - this.invite = { - _id: new ObjectId(), - token: this.token, - tokenHmac: this.tokenHmac, - sendingUserId: this.currentUser._id, - projectId: this.projectId, - email: this.targetEmail, - privileges: this.privileges, - createdAt: new Date(), - } - this.inviteReducedData = _.pick(this.invite, ['_id', 'email', 'privileges']) - this.project = { - _id: this.projectId, - owner_ref: this.projectOwner._id, - } - - this.SessionManager = { - getSessionUser: sinon.stub().returns(this.currentUser), - } - - this.AnalyticsManger = { recordEventForUserInBackground: sinon.stub() } - - this.rateLimiter = { - consume: sinon.stub().resolves(), - } - this.RateLimiter = { - RateLimiter: sinon.stub().returns(this.rateLimiter), - } - - this.LimitationsManager = { - promises: { - allowedNumberOfCollaboratorsForUser: sinon.stub(), - canAddXEditCollaborators: sinon.stub().resolves(true), - }, - } - - this.UserGetter = { - promises: { - getUserByAnyEmail: sinon.stub(), - getUser: sinon.stub(), - }, - } - - this.ProjectGetter = { - promises: { - getProject: sinon.stub(), - }, - } - - this.CollaboratorsGetter = { - promises: { - isUserInvitedMemberOfProject: sinon.stub(), - }, - } - - this.CollaboratorsInviteHandler = { - promises: { - inviteToProject: sinon.stub().resolves(this.inviteReducedData), - generateNewInvite: sinon.stub().resolves(this.invite), - revokeInvite: sinon.stub().resolves(this.invite), - acceptInvite: sinon.stub(), - }, - } - - this.CollaboratorsInviteGetter = { - promises: { - getAllInvites: sinon.stub(), - getInviteByToken: sinon.stub().resolves(this.invite), - }, - } - - this.EditorRealTimeController = { - emitToRoom: sinon.stub(), - } - - this.settings = {} - - this.ProjectAuditLogHandler = { - promises: { - addEntry: sinon.stub().resolves(), - }, - addEntryInBackground: sinon.stub(), - } - - this.AuthenticationController = { - setRedirectInSession: sinon.stub(), - } - - this.SplitTestHandler = { - promises: { - getAssignment: sinon.stub().resolves({ variant: 'default' }), - getAssignmentForUser: sinon.stub().resolves({ variant: 'default' }), - }, - } - - this.CollaboratorsInviteController = await esmock.strict(MODULE_PATH, { - '../../../../app/src/Features/Project/ProjectGetter.js': - this.ProjectGetter, - '../../../../app/src/Features/Project/ProjectAuditLogHandler.js': - this.ProjectAuditLogHandler, - '../../../../app/src/Features/Subscription/LimitationsManager.js': - this.LimitationsManager, - '../../../../app/src/Features/User/UserGetter.js': this.UserGetter, - '../../../../app/src/Features/Collaborators/CollaboratorsGetter.js': - this.CollaboratorsGetter, - '../../../../app/src/Features/Collaborators/CollaboratorsInviteHandler.mjs': - this.CollaboratorsInviteHandler, - '../../../../app/src/Features/Collaborators/CollaboratorsInviteGetter.js': - this.CollaboratorsInviteGetter, - '../../../../app/src/Features/Editor/EditorRealTimeController.js': - this.EditorRealTimeController, - '../../../../app/src/Features/Analytics/AnalyticsManager.js': - this.AnalyticsManger, - '../../../../app/src/Features/Authentication/SessionManager.js': - this.SessionManager, - '@overleaf/settings': this.settings, - '../../../../app/src/infrastructure/RateLimiter': this.RateLimiter, - '../../../../app/src/Features/Authentication/AuthenticationController': - this.AuthenticationController, - '../../../../app/src/Features/SplitTests/SplitTestHandler': - this.SplitTestHandler, - }) - - this.res = new MockResponse() - this.req = new MockRequest() - this.next = sinon.stub() - }) - - describe('getAllInvites', function () { - beforeEach(function () { - this.fakeInvites = [ - { _id: new ObjectId(), one: 1 }, - { _id: new ObjectId(), two: 2 }, - ] - this.req.params = { Project_id: this.projectId } - }) - - describe('when all goes well', function () { - beforeEach(function (done) { - this.CollaboratorsInviteGetter.promises.getAllInvites.resolves( - this.fakeInvites - ) - this.res.callback = () => done() - this.CollaboratorsInviteController.getAllInvites( - this.req, - this.res, - this.next - ) - }) - - it('should not produce an error', function () { - this.next.callCount.should.equal(0) - }) - - it('should produce a list of invite objects', function () { - this.res.json.callCount.should.equal(1) - this.res.json - .calledWith({ invites: this.fakeInvites }) - .should.equal(true) - }) - - it('should have called CollaboratorsInviteHandler.getAllInvites', function () { - this.CollaboratorsInviteGetter.promises.getAllInvites.callCount.should.equal( - 1 - ) - this.CollaboratorsInviteGetter.promises.getAllInvites - .calledWith(this.projectId) - .should.equal(true) - }) - }) - - describe('when CollaboratorsInviteHandler.getAllInvites produces an error', function () { - beforeEach(function (done) { - this.CollaboratorsInviteGetter.promises.getAllInvites.rejects( - new Error('woops') - ) - this.next.callsFake(() => done()) - this.CollaboratorsInviteController.getAllInvites( - this.req, - this.res, - this.next - ) - }) - - it('should produce an error', function () { - this.next.callCount.should.equal(1) - this.next.firstCall.args[0].should.be.instanceof(Error) - }) - }) - }) - - describe('inviteToProject', function () { - beforeEach(function () { - this.req.params = { Project_id: this.projectId } - this.req.body = { - email: this.targetEmail, - privileges: this.privileges, - } - this.ProjectGetter.promises.getProject.resolves({ - owner_ref: this.project.owner_ref, - }) - }) - - describe('when all goes well', function (done) { - beforeEach(async function () { - this.CollaboratorsInviteController._checkShouldInviteEmail = sinon - .stub() - .resolves(true) - this.CollaboratorsInviteController._checkRateLimit = sinon - .stub() - .resolves(true) - - await this.CollaboratorsInviteController.inviteToProject( - this.req, - this.res - ) - }) - - it('should produce json response', function () { - this.res.json.callCount.should.equal(1) - expect(this.res.json.firstCall.args[0]).to.deep.equal({ - invite: this.inviteReducedData, - }) - }) - - it('should have called canAddXEditCollaborators', function () { - this.LimitationsManager.promises.canAddXEditCollaborators.callCount.should.equal( - 1 - ) - this.LimitationsManager.promises.canAddXEditCollaborators - .calledWith(this.projectId) - .should.equal(true) - }) - - it('should have called _checkShouldInviteEmail', function () { - this.CollaboratorsInviteController._checkShouldInviteEmail.callCount.should.equal( - 1 - ) - - this.CollaboratorsInviteController._checkShouldInviteEmail - .calledWith(this.targetEmail) - .should.equal(true) - }) - - it('should have called inviteToProject', function () { - this.CollaboratorsInviteHandler.promises.inviteToProject.callCount.should.equal( - 1 - ) - this.CollaboratorsInviteHandler.promises.inviteToProject - .calledWith( - this.projectId, - this.currentUser, - this.targetEmail, - this.privileges - ) - .should.equal(true) - }) - - it('should have called emitToRoom', function () { - this.EditorRealTimeController.emitToRoom.callCount.should.equal(1) - this.EditorRealTimeController.emitToRoom - .calledWith(this.projectId, 'project:membership:changed') - .should.equal(true) - }) - - it('adds a project audit log entry', function () { - this.ProjectAuditLogHandler.addEntryInBackground.should.have.been.calledWith( - this.projectId, - 'send-invite', - this.currentUser._id, - this.req.ip, - { - inviteId: this.invite._id, - privileges: this.privileges, - } - ) - }) - }) - - describe('when the user is not allowed to add more edit collaborators', function () { - beforeEach(function () { - this.LimitationsManager.promises.canAddXEditCollaborators.resolves( - false - ) - }) - - describe('readAndWrite collaborator', function () { - beforeEach(function (done) { - this.privileges = 'readAndWrite' - this.CollaboratorsInviteController._checkShouldInviteEmail = sinon - .stub() - .resolves(true) - this.CollaboratorsInviteController._checkRateLimit = sinon - .stub() - .resolves(true) - this.res.callback = () => done() - this.CollaboratorsInviteController.inviteToProject( - this.req, - this.res, - this.next - ) - }) - - it('should produce json response without an invite', function () { - this.res.json.callCount.should.equal(1) - expect(this.res.json.firstCall.args[0]).to.deep.equal({ - invite: null, - }) - }) - - it('should not have called _checkShouldInviteEmail', function () { - this.CollaboratorsInviteController._checkShouldInviteEmail.callCount.should.equal( - 0 - ) - this.CollaboratorsInviteController._checkShouldInviteEmail - .calledWith(this.currentUser, this.targetEmail) - .should.equal(false) - }) - - it('should not have called inviteToProject', function () { - this.CollaboratorsInviteHandler.promises.inviteToProject.callCount.should.equal( - 0 - ) - }) - }) - - describe('readOnly collaborator (always allowed)', function () { - beforeEach(function (done) { - this.req.body = { - email: this.targetEmail, - privileges: (this.privileges = 'readOnly'), - } - this.CollaboratorsInviteController._checkShouldInviteEmail = sinon - .stub() - .resolves(true) - this.CollaboratorsInviteController._checkRateLimit = sinon - .stub() - .resolves(true) - this.res.callback = () => done() - this.CollaboratorsInviteController.inviteToProject( - this.req, - this.res, - this.next - ) - }) - - it('should produce json response', function () { - this.res.json.callCount.should.equal(1) - expect(this.res.json.firstCall.args[0]).to.deep.equal({ - invite: this.inviteReducedData, - }) - }) - - it('should not have called canAddXEditCollaborators', function () { - this.LimitationsManager.promises.canAddXEditCollaborators.callCount.should.equal( - 0 - ) - }) - - it('should have called _checkShouldInviteEmail', function () { - this.CollaboratorsInviteController._checkShouldInviteEmail.callCount.should.equal( - 1 - ) - this.CollaboratorsInviteController._checkShouldInviteEmail - .calledWith(this.targetEmail) - .should.equal(true) - }) - - it('should have called inviteToProject', function () { - this.CollaboratorsInviteHandler.promises.inviteToProject.callCount.should.equal( - 1 - ) - this.CollaboratorsInviteHandler.promises.inviteToProject - .calledWith( - this.projectId, - this.currentUser, - this.targetEmail, - this.privileges - ) - .should.equal(true) - }) - - it('should have called emitToRoom', function () { - this.EditorRealTimeController.emitToRoom.callCount.should.equal(1) - this.EditorRealTimeController.emitToRoom - .calledWith(this.projectId, 'project:membership:changed') - .should.equal(true) - }) - - it('adds a project audit log entry', function () { - this.ProjectAuditLogHandler.addEntryInBackground.should.have.been.calledWith( - this.projectId, - 'send-invite', - this.currentUser._id, - this.req.ip, - { - inviteId: this.invite._id, - privileges: this.privileges, - } - ) - }) - }) - }) - - describe('when inviteToProject produces an error', function () { - beforeEach(function (done) { - this.CollaboratorsInviteController._checkShouldInviteEmail = sinon - .stub() - .resolves(true) - this.CollaboratorsInviteController._checkRateLimit = sinon - .stub() - .resolves(true) - this.CollaboratorsInviteHandler.promises.inviteToProject.rejects( - new Error('woops') - ) - this.next.callsFake(() => done()) - this.CollaboratorsInviteController.inviteToProject( - this.req, - this.res, - this.next - ) - }) - - it('should call next with an error', function () { - this.next.callCount.should.equal(1) - expect(this.next).to.have.been.calledWith(sinon.match.instanceOf(Error)) - }) - - it('should have called canAddXEditCollaborators', function () { - this.LimitationsManager.promises.canAddXEditCollaborators.callCount.should.equal( - 1 - ) - this.LimitationsManager.promises.canAddXEditCollaborators - .calledWith(this.projectId) - .should.equal(true) - }) - - it('should have called _checkShouldInviteEmail', function () { - this.CollaboratorsInviteController._checkShouldInviteEmail.callCount.should.equal( - 1 - ) - this.CollaboratorsInviteController._checkShouldInviteEmail - .calledWith(this.targetEmail) - .should.equal(true) - }) - - it('should have called inviteToProject', function () { - this.CollaboratorsInviteHandler.promises.inviteToProject.callCount.should.equal( - 1 - ) - this.CollaboratorsInviteHandler.promises.inviteToProject - .calledWith( - this.projectId, - this.currentUser, - this.targetEmail, - this.privileges - ) - .should.equal(true) - }) - }) - - describe('when _checkShouldInviteEmail disallows the invite', function () { - beforeEach(function (done) { - this.CollaboratorsInviteController._checkShouldInviteEmail = sinon - .stub() - .resolves(false) - this.CollaboratorsInviteController._checkRateLimit = sinon - .stub() - .resolves(true) - this.res.callback = () => done() - this.CollaboratorsInviteController.inviteToProject( - this.req, - this.res, - this.next - ) - }) - - it('should produce json response with no invite, and an error property', function () { - this.res.json.callCount.should.equal(1) - expect(this.res.json.firstCall.args[0]).to.deep.equal({ - invite: null, - error: 'cannot_invite_non_user', - }) - }) - - it('should have called _checkShouldInviteEmail', function () { - this.CollaboratorsInviteController._checkShouldInviteEmail.callCount.should.equal( - 1 - ) - this.CollaboratorsInviteController._checkShouldInviteEmail - .calledWith(this.targetEmail) - .should.equal(true) - }) - - it('should not have called inviteToProject', function () { - this.CollaboratorsInviteHandler.promises.inviteToProject.callCount.should.equal( - 0 - ) - }) - }) - - describe('when _checkShouldInviteEmail produces an error', function () { - beforeEach(function (done) { - this.CollaboratorsInviteController._checkShouldInviteEmail = sinon - .stub() - .rejects(new Error('woops')) - this.CollaboratorsInviteController._checkRateLimit = sinon - .stub() - .resolves(true) - this.next.callsFake(() => done()) - this.CollaboratorsInviteController.inviteToProject( - this.req, - this.res, - this.next - ) - }) - - it('should call next with an error', function () { - this.next.callCount.should.equal(1) - this.next.calledWith(sinon.match.instanceOf(Error)).should.equal(true) - }) - - it('should have called _checkShouldInviteEmail', function () { - this.CollaboratorsInviteController._checkShouldInviteEmail.callCount.should.equal( - 1 - ) - this.CollaboratorsInviteController._checkShouldInviteEmail - .calledWith(this.targetEmail) - .should.equal(true) - }) - - it('should not have called inviteToProject', function () { - this.CollaboratorsInviteHandler.promises.inviteToProject.callCount.should.equal( - 0 - ) - }) - }) - - describe('when the user invites themselves to the project', function () { - beforeEach(function () { - this.req.body.email = this.currentUser.email - this.CollaboratorsInviteController._checkShouldInviteEmail = sinon - .stub() - .resolves(true) - this.CollaboratorsInviteController._checkRateLimit = sinon - .stub() - .resolves(true) - this.CollaboratorsInviteController.inviteToProject( - this.req, - this.res, - this.next - ) - }) - - it('should reject action, return json response with error code', function () { - this.res.json.callCount.should.equal(1) - expect(this.res.json.firstCall.args[0]).to.deep.equal({ - invite: null, - error: 'cannot_invite_self', - }) - }) - - it('should not have called canAddXEditCollaborators', function () { - this.LimitationsManager.promises.canAddXEditCollaborators.callCount.should.equal( - 0 - ) - }) - - it('should not have called _checkShouldInviteEmail', function () { - this.CollaboratorsInviteController._checkShouldInviteEmail.callCount.should.equal( - 0 - ) - }) - - it('should not have called inviteToProject', function () { - this.CollaboratorsInviteHandler.promises.inviteToProject.callCount.should.equal( - 0 - ) - }) - - it('should not have called emitToRoom', function () { - this.EditorRealTimeController.emitToRoom.callCount.should.equal(0) - }) - }) - - describe('when _checkRateLimit returns false', function () { - beforeEach(async function () { - this.CollaboratorsInviteController._checkShouldInviteEmail = sinon - .stub() - .resolves(true) - this.CollaboratorsInviteController._checkRateLimit = sinon - .stub() - .resolves(false) - await this.CollaboratorsInviteController.inviteToProject( - this.req, - this.res, - this.next - ) - }) - - it('should send a 429 response', function () { - this.res.sendStatus.calledWith(429).should.equal(true) - }) - - it('should not call inviteToProject', function () { - this.CollaboratorsInviteHandler.promises.inviteToProject.called.should.equal( - false - ) - }) - - it('should not call emitToRoom', function () { - this.EditorRealTimeController.emitToRoom.called.should.equal(false) - }) - }) - }) - - describe('viewInvite', function () { - beforeEach(function () { - this.req.params = { - Project_id: this.projectId, - token: this.token, - } - this.fakeProject = { - _id: this.projectId, - name: 'some project', - owner_ref: this.invite.sendingUserId, - collaberator_refs: [], - readOnly_refs: [], - } - this.owner = { - _id: this.fakeProject.owner_ref, - first_name: 'John', - last_name: 'Doe', - email: 'john@example.com', - } - - this.CollaboratorsGetter.promises.isUserInvitedMemberOfProject.resolves( - false - ) - this.CollaboratorsInviteGetter.promises.getInviteByToken.resolves( - this.invite - ) - this.ProjectGetter.promises.getProject.resolves(this.fakeProject) - this.UserGetter.promises.getUser.resolves(this.owner) - }) - - describe('when the token is valid', function () { - beforeEach(function (done) { - this.res.callback = () => done() - this.CollaboratorsInviteController.viewInvite( - this.req, - this.res, - this.next - ) - }) - - it('should render the view template', function () { - this.res.render.callCount.should.equal(1) - this.res.render.calledWith('project/invite/show').should.equal(true) - }) - - it('should not call next', function () { - this.next.callCount.should.equal(0) - }) - - it('should call CollaboratorsGetter.isUserInvitedMemberOfProject', function () { - this.CollaboratorsGetter.promises.isUserInvitedMemberOfProject.callCount.should.equal( - 1 - ) - this.CollaboratorsGetter.promises.isUserInvitedMemberOfProject - .calledWith(this.currentUser._id, this.projectId) - .should.equal(true) - }) - - it('should call getInviteByToken', function () { - this.CollaboratorsInviteGetter.promises.getInviteByToken.callCount.should.equal( - 1 - ) - this.CollaboratorsInviteGetter.promises.getInviteByToken - .calledWith(this.fakeProject._id, this.invite.token) - .should.equal(true) - }) - - it('should call User.getUser', function () { - this.UserGetter.promises.getUser.callCount.should.equal(1) - this.UserGetter.promises.getUser - .calledWith({ _id: this.fakeProject.owner_ref }) - .should.equal(true) - }) - - it('should call ProjectGetter.getProject', function () { - this.ProjectGetter.promises.getProject.callCount.should.equal(1) - this.ProjectGetter.promises.getProject - .calledWith(this.projectId) - .should.equal(true) - }) - }) - - describe('when not logged in', function () { - beforeEach(function (done) { - this.SessionManager.getSessionUser.returns(null) - - this.res.callback = () => done() - this.CollaboratorsInviteController.viewInvite( - this.req, - this.res, - this.next - ) - }) - it('should not check member status', function () { - expect(this.CollaboratorsGetter.promises.isUserInvitedMemberOfProject) - .to.not.have.been.called - }) - - it('should set redirect back to invite', function () { - expect( - this.AuthenticationController.setRedirectInSession - ).to.have.been.calledWith(this.req) - }) - - it('should redirect to the register page', function () { - expect(this.res.render).to.not.have.been.called - expect(this.res.redirect).to.have.been.calledOnce - expect(this.res.redirect).to.have.been.calledWith('/register') - }) - }) - - describe('when user is already a member of the project', function () { - beforeEach(function (done) { - this.CollaboratorsGetter.promises.isUserInvitedMemberOfProject.resolves( - true - ) - this.res.callback = () => done() - this.CollaboratorsInviteController.viewInvite( - this.req, - this.res, - this.next - ) - }) - - it('should redirect to the project page', function () { - this.res.redirect.callCount.should.equal(1) - this.res.redirect - .calledWith(`/project/${this.projectId}`) - .should.equal(true) - }) - - it('should not call next with an error', function () { - this.next.callCount.should.equal(0) - }) - - it('should call CollaboratorsGetter.isUserInvitedMemberOfProject', function () { - this.CollaboratorsGetter.promises.isUserInvitedMemberOfProject.callCount.should.equal( - 1 - ) - this.CollaboratorsGetter.promises.isUserInvitedMemberOfProject - .calledWith(this.currentUser._id, this.projectId) - .should.equal(true) - }) - - it('should not call getInviteByToken', function () { - this.CollaboratorsInviteGetter.promises.getInviteByToken.callCount.should.equal( - 0 - ) - }) - - it('should not call User.getUser', function () { - this.UserGetter.promises.getUser.callCount.should.equal(0) - }) - - it('should not call ProjectGetter.getProject', function () { - this.ProjectGetter.promises.getProject.callCount.should.equal(0) - }) - }) - - describe('when isUserInvitedMemberOfProject produces an error', function () { - beforeEach(function (done) { - this.CollaboratorsGetter.promises.isUserInvitedMemberOfProject.rejects( - new Error('woops') - ) - this.next.callsFake(() => done()) - this.CollaboratorsInviteController.viewInvite( - this.req, - this.res, - this.next - ) - }) - - it('should call next with an error', function () { - this.next.callCount.should.equal(1) - expect(this.next.firstCall.args[0]).to.be.instanceof(Error) - }) - - it('should call CollaboratorsGetter.isUserInvitedMemberOfProject', function () { - this.CollaboratorsGetter.promises.isUserInvitedMemberOfProject.callCount.should.equal( - 1 - ) - this.CollaboratorsGetter.promises.isUserInvitedMemberOfProject - .calledWith(this.currentUser._id, this.projectId) - .should.equal(true) - }) - - it('should not call getInviteByToken', function () { - this.CollaboratorsInviteGetter.promises.getInviteByToken.callCount.should.equal( - 0 - ) - }) - - it('should not call User.getUser', function () { - this.UserGetter.promises.getUser.callCount.should.equal(0) - }) - - it('should not call ProjectGetter.getProject', function () { - this.ProjectGetter.promises.getProject.callCount.should.equal(0) - }) - }) - - describe('when the getInviteByToken produces an error', function () { - beforeEach(function (done) { - this.CollaboratorsInviteGetter.promises.getInviteByToken.rejects( - new Error('woops') - ) - this.next.callsFake(() => done()) - this.CollaboratorsInviteController.viewInvite( - this.req, - this.res, - this.next - ) - }) - - it('should call next with the error', function () { - this.next.callCount.should.equal(1) - this.next.calledWith(sinon.match.instanceOf(Error)).should.equal(true) - }) - - it('should call CollaboratorsGetter.isUserInvitedMemberOfProject', function () { - this.CollaboratorsGetter.promises.isUserInvitedMemberOfProject.callCount.should.equal( - 1 - ) - this.CollaboratorsGetter.promises.isUserInvitedMemberOfProject - .calledWith(this.currentUser._id, this.projectId) - .should.equal(true) - }) - - it('should call getInviteByToken', function () { - this.CollaboratorsInviteGetter.promises.getInviteByToken.callCount.should.equal( - 1 - ) - this.CollaboratorsGetter.promises.isUserInvitedMemberOfProject - .calledWith(this.currentUser._id, this.projectId) - .should.equal(true) - }) - - it('should not call User.getUser', function () { - this.UserGetter.promises.getUser.callCount.should.equal(0) - }) - - it('should not call ProjectGetter.getProject', function () { - this.ProjectGetter.promises.getProject.callCount.should.equal(0) - }) - }) - - describe('when the getInviteByToken does not produce an invite', function () { - beforeEach(function (done) { - this.CollaboratorsInviteGetter.promises.getInviteByToken.resolves(null) - this.res.callback = () => done() - this.CollaboratorsInviteController.viewInvite( - this.req, - this.res, - this.next - ) - }) - - it('should render the not-valid view template', function () { - this.res.render.callCount.should.equal(1) - this.res.render - .calledWith('project/invite/not-valid') - .should.equal(true) - }) - - it('should not call next', function () { - this.next.callCount.should.equal(0) - }) - - it('should call CollaboratorsGetter.isUserInvitedMemberOfProject', function () { - this.CollaboratorsGetter.promises.isUserInvitedMemberOfProject.callCount.should.equal( - 1 - ) - this.CollaboratorsGetter.promises.isUserInvitedMemberOfProject - .calledWith(this.currentUser._id, this.projectId) - .should.equal(true) - }) - - it('should call getInviteByToken', function () { - this.CollaboratorsInviteGetter.promises.getInviteByToken.callCount.should.equal( - 1 - ) - this.CollaboratorsGetter.promises.isUserInvitedMemberOfProject - .calledWith(this.currentUser._id, this.projectId) - .should.equal(true) - }) - - it('should not call User.getUser', function () { - this.UserGetter.promises.getUser.callCount.should.equal(0) - }) - - it('should not call ProjectGetter.getProject', function () { - this.ProjectGetter.promises.getProject.callCount.should.equal(0) - }) - }) - - describe('when User.getUser produces an error', function () { - beforeEach(function (done) { - this.UserGetter.promises.getUser.rejects(new Error('woops')) - this.next.callsFake(() => done()) - this.CollaboratorsInviteController.viewInvite( - this.req, - this.res, - this.next - ) - }) - - it('should produce an error', function () { - this.next.callCount.should.equal(1) - expect(this.next.firstCall.args[0]).to.be.instanceof(Error) - }) - - it('should call CollaboratorsGetter.isUserInvitedMemberOfProject', function () { - this.CollaboratorsGetter.promises.isUserInvitedMemberOfProject.callCount.should.equal( - 1 - ) - this.CollaboratorsGetter.promises.isUserInvitedMemberOfProject - .calledWith(this.currentUser._id, this.projectId) - .should.equal(true) - }) - - it('should call getInviteByToken', function () { - this.CollaboratorsInviteGetter.promises.getInviteByToken.callCount.should.equal( - 1 - ) - }) - - it('should call User.getUser', function () { - this.UserGetter.promises.getUser.callCount.should.equal(1) - this.UserGetter.promises.getUser - .calledWith({ _id: this.fakeProject.owner_ref }) - .should.equal(true) - }) - - it('should not call ProjectGetter.getProject', function () { - this.ProjectGetter.promises.getProject.callCount.should.equal(0) - }) - }) - - describe('when User.getUser does not find a user', function () { - beforeEach(function (done) { - this.UserGetter.promises.getUser.resolves(null) - this.res.callback = () => done() - this.CollaboratorsInviteController.viewInvite( - this.req, - this.res, - this.next - ) - }) - - it('should render the not-valid view template', function () { - this.res.render.callCount.should.equal(1) - this.res.render - .calledWith('project/invite/not-valid') - .should.equal(true) - }) - - it('should not call next', function () { - this.next.callCount.should.equal(0) - }) - - it('should call CollaboratorsGetter.isUserInvitedMemberOfProject', function () { - this.CollaboratorsGetter.promises.isUserInvitedMemberOfProject.callCount.should.equal( - 1 - ) - this.CollaboratorsGetter.promises.isUserInvitedMemberOfProject - .calledWith(this.currentUser._id, this.projectId) - .should.equal(true) - }) - - it('should call getInviteByToken', function () { - this.CollaboratorsInviteGetter.promises.getInviteByToken.callCount.should.equal( - 1 - ) - }) - - it('should call User.getUser', function () { - this.UserGetter.promises.getUser.callCount.should.equal(1) - this.UserGetter.promises.getUser - .calledWith({ _id: this.fakeProject.owner_ref }) - .should.equal(true) - }) - - it('should not call ProjectGetter.getProject', function () { - this.ProjectGetter.promises.getProject.callCount.should.equal(0) - }) - }) - - describe('when getProject produces an error', function () { - beforeEach(function (done) { - this.ProjectGetter.promises.getProject.rejects(new Error('woops')) - this.next.callsFake(() => done()) - this.CollaboratorsInviteController.viewInvite( - this.req, - this.res, - this.next - ) - }) - - it('should produce an error', function () { - this.next.callCount.should.equal(1) - expect(this.next.firstCall.args[0]).to.be.instanceof(Error) - }) - - it('should call CollaboratorsGetter.isUserInvitedMemberOfProject', function () { - this.CollaboratorsGetter.promises.isUserInvitedMemberOfProject.callCount.should.equal( - 1 - ) - this.CollaboratorsGetter.promises.isUserInvitedMemberOfProject - .calledWith(this.currentUser._id, this.projectId) - .should.equal(true) - }) - - it('should call getInviteByToken', function () { - this.CollaboratorsInviteGetter.promises.getInviteByToken.callCount.should.equal( - 1 - ) - }) - - it('should call User.getUser', function () { - this.UserGetter.promises.getUser.callCount.should.equal(1) - this.UserGetter.promises.getUser - .calledWith({ _id: this.fakeProject.owner_ref }) - .should.equal(true) - }) - - it('should call ProjectGetter.getProject', function () { - this.ProjectGetter.promises.getProject.callCount.should.equal(1) - }) - }) - - describe('when Project.getUser does not find a user', function () { - beforeEach(function (done) { - this.ProjectGetter.promises.getProject.resolves(null) - this.res.callback = () => done() - this.CollaboratorsInviteController.viewInvite( - this.req, - this.res, - this.next - ) - }) - - it('should render the not-valid view template', function () { - this.res.render.callCount.should.equal(1) - this.res.render - .calledWith('project/invite/not-valid') - .should.equal(true) - }) - - it('should not call next', function () { - this.next.callCount.should.equal(0) - }) - - it('should call CollaboratorsGetter.isUserInvitedMemberOfProject', function () { - this.CollaboratorsGetter.promises.isUserInvitedMemberOfProject.callCount.should.equal( - 1 - ) - this.CollaboratorsGetter.promises.isUserInvitedMemberOfProject - .calledWith(this.currentUser._id, this.projectId) - .should.equal(true) - }) - - it('should call getInviteByToken', function () { - this.CollaboratorsInviteGetter.promises.getInviteByToken.callCount.should.equal( - 1 - ) - }) - - it('should call getUser', function () { - this.UserGetter.promises.getUser.callCount.should.equal(1) - this.UserGetter.promises.getUser - .calledWith({ _id: this.fakeProject.owner_ref }) - .should.equal(true) - }) - - it('should call ProjectGetter.getProject', function () { - this.ProjectGetter.promises.getProject.callCount.should.equal(1) - }) - }) - }) - - describe('generateNewInvite', function () { - beforeEach(function () { - this.req.params = { - Project_id: this.projectId, - invite_id: this.invite._id.toString(), - } - this.CollaboratorsInviteController._checkRateLimit = sinon - .stub() - .resolves(true) - }) - - describe('when generateNewInvite does not produce an error', function () { - describe('and returns an invite object', function () { - beforeEach(function (done) { - this.res.callback = () => done() - this.CollaboratorsInviteController.generateNewInvite( - this.req, - this.res, - this.next - ) - }) - - it('should produce a 201 response', function () { - this.res.sendStatus.callCount.should.equal(1) - this.res.sendStatus.calledWith(201).should.equal(true) - }) - - it('should have called generateNewInvite', function () { - this.CollaboratorsInviteHandler.promises.generateNewInvite.callCount.should.equal( - 1 - ) - }) - - it('should have called emitToRoom', function () { - this.EditorRealTimeController.emitToRoom.callCount.should.equal(1) - this.EditorRealTimeController.emitToRoom - .calledWith(this.projectId, 'project:membership:changed') - .should.equal(true) - }) - - it('should check the rate limit', function () { - this.CollaboratorsInviteController._checkRateLimit.callCount.should.equal( - 1 - ) - }) - - it('should add a project audit log entry', function () { - this.ProjectAuditLogHandler.addEntryInBackground.should.have.been.calledWith( - this.projectId, - 'resend-invite', - this.currentUser._id, - this.req.ip, - { - inviteId: this.invite._id, - privileges: this.privileges, - } - ) - }) - }) - - describe('and returns a null invite', function () { - beforeEach(function (done) { - this.CollaboratorsInviteHandler.promises.generateNewInvite.resolves( - null - ) - this.res.callback = () => done() - this.CollaboratorsInviteController.generateNewInvite( - this.req, - this.res, - this.next - ) - }) - - it('should have called emitToRoom', function () { - this.EditorRealTimeController.emitToRoom.callCount.should.equal(1) - this.EditorRealTimeController.emitToRoom - .calledWith(this.projectId, 'project:membership:changed') - .should.equal(true) - }) - - it('should produce a 404 response when invite is null', function () { - this.res.sendStatus.callCount.should.equal(1) - this.res.sendStatus.should.have.been.calledWith(404) - }) - }) - }) - - describe('when generateNewInvite produces an error', function () { - beforeEach(function (done) { - this.CollaboratorsInviteHandler.promises.generateNewInvite.rejects( - new Error('woops') - ) - this.next.callsFake(() => done()) - this.CollaboratorsInviteController.generateNewInvite( - this.req, - this.res, - this.next - ) - }) - - it('should not produce a 201 response', function () { - this.res.sendStatus.callCount.should.equal(0) - }) - - it('should call next with the error', function () { - this.next.callCount.should.equal(1) - this.next.calledWith(sinon.match.instanceOf(Error)).should.equal(true) - }) - - it('should have called generateNewInvite', function () { - this.CollaboratorsInviteHandler.promises.generateNewInvite.callCount.should.equal( - 1 - ) - }) - }) - }) - - describe('revokeInvite', function () { - beforeEach(function () { - this.req.params = { - Project_id: this.projectId, - invite_id: this.invite._id.toString(), - } - }) - - describe('when revokeInvite does not produce an error', function () { - beforeEach(function (done) { - this.res.callback = () => done() - this.CollaboratorsInviteController.revokeInvite( - this.req, - this.res, - this.next - ) - }) - - it('should produce a 204 response', function () { - this.res.sendStatus.callCount.should.equal(1) - this.res.sendStatus.should.have.been.calledWith(204) - }) - - it('should have called revokeInvite', function () { - this.CollaboratorsInviteHandler.promises.revokeInvite.callCount.should.equal( - 1 - ) - }) - - it('should have called emitToRoom', function () { - this.EditorRealTimeController.emitToRoom.callCount.should.equal(1) - this.EditorRealTimeController.emitToRoom - .calledWith(this.projectId, 'project:membership:changed') - .should.equal(true) - }) - - it('should add a project audit log entry', function () { - this.ProjectAuditLogHandler.addEntryInBackground.should.have.been.calledWith( - this.projectId, - 'revoke-invite', - this.currentUser._id, - this.req.ip, - { - inviteId: this.invite._id, - privileges: this.privileges, - } - ) - }) - }) - - describe('when revokeInvite produces an error', function () { - beforeEach(function (done) { - this.CollaboratorsInviteHandler.promises.revokeInvite.rejects( - new Error('woops') - ) - this.next.callsFake(() => done()) - this.CollaboratorsInviteController.revokeInvite( - this.req, - this.res, - this.next - ) - }) - - it('should not produce a 201 response', function () { - this.res.sendStatus.callCount.should.equal(0) - }) - - it('should call next with the error', function () { - this.next.callCount.should.equal(1) - this.next.calledWith(sinon.match.instanceOf(Error)).should.equal(true) - }) - - it('should have called revokeInvite', function () { - this.CollaboratorsInviteHandler.promises.revokeInvite.callCount.should.equal( - 1 - ) - }) - }) - }) - - describe('acceptInvite', function () { - beforeEach(function () { - this.req.params = { - Project_id: this.projectId, - token: this.token, - } - }) - - describe('when acceptInvite does not produce an error', function () { - beforeEach(function (done) { - this.res.callback = () => done() - this.CollaboratorsInviteController.acceptInvite( - this.req, - this.res, - this.next - ) - }) - - it('should redirect to project page', function () { - this.res.redirect.should.have.been.calledOnce - this.res.redirect.should.have.been.calledWith( - `/project/${this.projectId}` - ) - }) - - it('should have called acceptInvite', function () { - this.CollaboratorsInviteHandler.promises.acceptInvite.should.have.been.calledWith( - this.invite, - this.projectId, - this.currentUser - ) - }) - - it('should have called emitToRoom', function () { - this.EditorRealTimeController.emitToRoom.should.have.been.calledOnce - this.EditorRealTimeController.emitToRoom.should.have.been.calledWith( - this.projectId, - 'project:membership:changed' - ) - }) - - it('should add a project audit log entry', function () { - this.ProjectAuditLogHandler.promises.addEntry.should.have.been.calledWith( - this.projectId, - 'accept-invite', - this.currentUser._id, - this.req.ip, - { - inviteId: this.invite._id, - privileges: this.privileges, - } - ) - }) - }) - - describe('when the invite is not found', function () { - beforeEach(function (done) { - this.CollaboratorsInviteGetter.promises.getInviteByToken.resolves(null) - this.next.callsFake(() => done()) - this.CollaboratorsInviteController.acceptInvite( - this.req, - this.res, - this.next - ) - }) - - it('throws a NotFoundError', function () { - expect(this.next).to.have.been.calledWith( - sinon.match.instanceOf(Errors.NotFoundError) - ) - }) - }) - - describe('when acceptInvite produces an error', function () { - beforeEach(function (done) { - this.CollaboratorsInviteHandler.promises.acceptInvite.rejects( - new Error('woops') - ) - this.next.callsFake(() => done()) - this.CollaboratorsInviteController.acceptInvite( - this.req, - this.res, - this.next - ) - }) - - it('should not redirect to project page', function () { - this.res.redirect.callCount.should.equal(0) - }) - - it('should call next with the error', function () { - this.next.callCount.should.equal(1) - this.next.calledWith(sinon.match.instanceOf(Error)).should.equal(true) - }) - - it('should have called acceptInvite', function () { - this.CollaboratorsInviteHandler.promises.acceptInvite.callCount.should.equal( - 1 - ) - }) - }) - - describe('when the project audit log entry fails', function () { - beforeEach(function (done) { - this.ProjectAuditLogHandler.promises.addEntry.rejects(new Error('oops')) - this.next.callsFake(() => done()) - this.CollaboratorsInviteController.acceptInvite( - this.req, - this.res, - this.next - ) - }) - - it('should not accept the invite', function () { - this.CollaboratorsInviteHandler.promises.acceptInvite.should.not.have - .been.called - }) - }) - }) - - describe('_checkShouldInviteEmail', function () { - beforeEach(function () { - this.email = 'user@example.com' - }) - - describe('when we should be restricting to existing accounts', function () { - beforeEach(function () { - this.settings.restrictInvitesToExistingAccounts = true - this.call = () => - this.CollaboratorsInviteController._checkShouldInviteEmail(this.email) - }) - - describe('when user account is present', function () { - beforeEach(function () { - this.user = { _id: new ObjectId().toString() } - this.UserGetter.promises.getUserByAnyEmail.resolves(this.user) - }) - - it('should callback with `true`', async function () { - const shouldAllow = - await this.CollaboratorsInviteController._checkShouldInviteEmail( - this.email - ) - expect(shouldAllow).to.equal(true) - }) - }) - - describe('when user account is absent', function () { - beforeEach(function () { - this.user = null - this.UserGetter.promises.getUserByAnyEmail.resolves(this.user) - }) - - it('should callback with `false`', async function () { - const shouldAllow = - await this.CollaboratorsInviteController._checkShouldInviteEmail( - this.email - ) - expect(shouldAllow).to.equal(false) - }) - - it('should have called getUser', async function () { - await this.CollaboratorsInviteController._checkShouldInviteEmail( - this.email - ) - this.UserGetter.promises.getUserByAnyEmail.callCount.should.equal(1) - this.UserGetter.promises.getUserByAnyEmail - .calledWith(this.email, { _id: 1 }) - .should.equal(true) - }) - }) - - describe('when getUser produces an error', function () { - beforeEach(function () { - this.user = null - this.UserGetter.promises.getUserByAnyEmail.rejects(new Error('woops')) - }) - - it('should callback with an error', async function () { - await expect( - this.CollaboratorsInviteController._checkShouldInviteEmail( - this.email - ) - ).to.be.rejected - }) - }) - }) - }) - - describe('_checkRateLimit', function () { - beforeEach(function () { - this.settings.restrictInvitesToExistingAccounts = false - this.currentUserId = '32312313' - this.LimitationsManager.promises.allowedNumberOfCollaboratorsForUser - .withArgs(this.currentUserId) - .resolves(17) - }) - - it('should callback with `true` when rate limit under', async function () { - const result = await this.CollaboratorsInviteController._checkRateLimit( - this.currentUserId - ) - expect(this.rateLimiter.consume).to.have.been.calledWith( - this.currentUserId - ) - result.should.equal(true) - }) - - it('should callback with `false` when rate limit hit', async function () { - this.rateLimiter.consume.rejects({ remainingPoints: 0 }) - const result = await this.CollaboratorsInviteController._checkRateLimit( - this.currentUserId - ) - expect(this.rateLimiter.consume).to.have.been.calledWith( - this.currentUserId - ) - result.should.equal(false) - }) - - it('should allow 10x the collaborators', async function () { - await this.CollaboratorsInviteController._checkRateLimit( - this.currentUserId - ) - expect(this.rateLimiter.consume).to.have.been.calledWith( - this.currentUserId, - Math.floor(40000 / 170) - ) - }) - - it('should allow 200 requests when collaborators is -1', async function () { - this.LimitationsManager.promises.allowedNumberOfCollaboratorsForUser - .withArgs(this.currentUserId) - .resolves(-1) - await this.CollaboratorsInviteController._checkRateLimit( - this.currentUserId - ) - expect(this.rateLimiter.consume).to.have.been.calledWith( - this.currentUserId, - Math.floor(40000 / 200) - ) - }) - - it('should allow 10 requests when user has no collaborators set', async function () { - this.LimitationsManager.promises.allowedNumberOfCollaboratorsForUser - .withArgs(this.currentUserId) - .resolves(null) - await this.CollaboratorsInviteController._checkRateLimit( - this.currentUserId - ) - expect(this.rateLimiter.consume).to.have.been.calledWith( - this.currentUserId, - Math.floor(40000 / 10) - ) - }) - }) -}) diff --git a/services/web/test/unit/src/Collaborators/CollaboratorsInviteHandler.test.mjs b/services/web/test/unit/src/Collaborators/CollaboratorsInviteHandler.test.mjs new file mode 100644 index 0000000000..5d6690d7c0 --- /dev/null +++ b/services/web/test/unit/src/Collaborators/CollaboratorsInviteHandler.test.mjs @@ -0,0 +1,888 @@ +import { expect, vi } from 'vitest' +import sinon from 'sinon' +import mongodb from 'mongodb-legacy' +import Crypto from 'crypto' + +const ObjectId = mongodb.ObjectId + +const MODULE_PATH = + '../../../../app/src/Features/Collaborators/CollaboratorsInviteHandler.mjs' + +describe('CollaboratorsInviteHandler', function () { + beforeEach(async function (ctx) { + ctx.ProjectInvite = class ProjectInvite { + constructor(options) { + if (options == null) { + options = {} + } + this._id = new ObjectId() + for (const k in options) { + const v = options[k] + this[k] = v + } + } + } + ctx.ProjectInvite.prototype.save = sinon.stub() + ctx.ProjectInvite.findOne = sinon.stub() + ctx.ProjectInvite.find = sinon.stub() + ctx.ProjectInvite.deleteOne = sinon.stub() + ctx.ProjectInvite.findOneAndDelete = sinon.stub() + ctx.ProjectInvite.countDocuments = sinon.stub() + + ctx.Crypto = { + randomBytes: sinon.stub().callsFake(Crypto.randomBytes), + } + ctx.settings = {} + ctx.CollaboratorsEmailHandler = { promises: {} } + ctx.CollaboratorsHandler = { + promises: { + addUserIdToProject: sinon.stub(), + }, + } + ctx.UserGetter = { promises: { getUser: sinon.stub() } } + ctx.ProjectGetter = { promises: { getProject: sinon.stub().resolves() } } + ctx.NotificationsBuilder = { promises: {} } + ctx.tokenHmac = 'jkhajkefhaekjfhkfg' + ctx.CollaboratorsInviteHelper = { + generateToken: sinon.stub().returns(ctx.Crypto.randomBytes(24)), + hashInviteToken: sinon.stub().returns(ctx.tokenHmac), + } + + ctx.CollaboratorsInviteGetter = { + promises: { + getAllInvites: sinon.stub(), + }, + } + + ctx.SplitTestHandler = { + promises: { + getAssignmentForUser: sinon.stub().resolves(), + }, + } + + ctx.LimitationsManager = { + promises: { + canAcceptEditCollaboratorInvite: sinon.stub().resolves(), + }, + } + + ctx.ProjectAuditLogHandler = { + promises: { + addEntry: sinon.stub().resolves(), + }, + addEntryInBackground: sinon.stub(), + } + ctx.logger = { + debug: sinon.stub(), + warn: sinon.stub(), + err: sinon.stub(), + } + + vi.doMock('@overleaf/settings', () => ({ + default: ctx.settings, + })) + + vi.doMock('../../../../app/src/models/ProjectInvite.js', () => ({ + ProjectInvite: ctx.ProjectInvite, + })) + + vi.doMock('@overleaf/logger', () => ({ + default: ctx.logger, + })) + + vi.doMock( + '../../../../app/src/Features/Collaborators/CollaboratorsEmailHandler.mjs', + () => ({ + default: ctx.CollaboratorsEmailHandler, + }) + ) + + vi.doMock( + '../../../../app/src/Features/Collaborators/CollaboratorsHandler.js', + () => ({ + default: ctx.CollaboratorsHandler, + }) + ) + + vi.doMock('../../../../app/src/Features/User/UserGetter.js', () => ({ + default: ctx.UserGetter, + })) + + vi.doMock('../../../../app/src/Features/Project/ProjectGetter.js', () => ({ + default: ctx.ProjectGetter, + })) + + vi.doMock( + '../../../../app/src/Features/Notifications/NotificationsBuilder.js', + () => ({ + default: ctx.NotificationsBuilder, + }) + ) + + vi.doMock( + '../../../../app/src/Features/Collaborators/CollaboratorsInviteHelper.js', + () => ({ + default: ctx.CollaboratorsInviteHelper, + }) + ) + + vi.doMock( + '../../../../app/src/Features/Collaborators/CollaboratorsInviteGetter', + () => ({ + default: ctx.CollaboratorsInviteGetter, + }) + ) + + vi.doMock( + '../../../../app/src/Features/SplitTests/SplitTestHandler.js', + () => ({ + default: ctx.SplitTestHandler, + }) + ) + + vi.doMock( + '../../../../app/src/Features/Subscription/LimitationsManager.js', + () => ({ + default: ctx.LimitationsManager, + }) + ) + + vi.doMock( + '../../../../app/src/Features/Project/ProjectAuditLogHandler.js', + () => ({ + default: ctx.ProjectAuditLogHandler, + }) + ) + + vi.doMock('crypto', () => ({ + default: ctx.CryptogetAssignmentForUser, + })) + + ctx.CollaboratorsInviteHandler = (await import(MODULE_PATH)).default + + ctx.projectId = new ObjectId() + ctx.sendingUserId = new ObjectId() + ctx.sendingUser = { + _id: ctx.sendingUserId, + name: 'Bob', + } + ctx.email = 'user@example.com' + ctx.userId = new ObjectId() + ctx.user = { + _id: ctx.userId, + email: 'someone@example.com', + } + ctx.inviteId = new ObjectId() + ctx.token = 'hnhteaosuhtaeosuahs' + ctx.privileges = 'readAndWrite' + ctx.fakeInvite = { + _id: ctx.inviteId, + email: ctx.email, + token: ctx.token, + tokenHmac: ctx.tokenHmac, + sendingUserId: ctx.sendingUserId, + projectId: ctx.projectId, + privileges: ctx.privileges, + createdAt: new Date(), + } + }) + + describe('inviteToProject', function () { + beforeEach(function (ctx) { + ctx.ProjectInvite.prototype.save.callsFake(async function () { + Object.defineProperty(this, 'toObject', { + value: function () { + return this + }, + writable: true, + configurable: true, + enumerable: false, + }) + return this + }) + ctx.CollaboratorsInviteHandler.promises._sendMessages = sinon + .stub() + .resolves() + ctx.call = async () => { + return await ctx.CollaboratorsInviteHandler.promises.inviteToProject( + ctx.projectId, + ctx.sendingUser, + ctx.email, + ctx.privileges + ) + } + }) + + describe('when all goes well', function () { + it('should produce the invite object', async function (ctx) { + const invite = await ctx.call() + expect(invite).to.not.equal(null) + expect(invite).to.not.equal(undefined) + expect(invite).to.be.instanceof(Object) + expect(invite).to.have.all.keys(['_id', 'email', 'privileges']) + }) + + it('should have generated a random token', async function (ctx) { + await ctx.call() + ctx.Crypto.randomBytes.callCount.should.equal(1) + }) + + it('should have generated a HMAC token', async function (ctx) { + await ctx.call() + ctx.CollaboratorsInviteHelper.hashInviteToken.callCount.should.equal(1) + }) + + it('should have called ProjectInvite.save', async function (ctx) { + await ctx.call() + ctx.ProjectInvite.prototype.save.callCount.should.equal(1) + }) + + it('should have called _sendMessages', async function (ctx) { + await ctx.call() + ctx.CollaboratorsInviteHandler.promises._sendMessages.callCount.should.equal( + 1 + ) + ctx.CollaboratorsInviteHandler.promises._sendMessages + .calledWith(ctx.projectId, ctx.sendingUser) + .should.equal(true) + }) + }) + + describe('when saving model produces an error', function () { + beforeEach(function (ctx) { + ctx.ProjectInvite.prototype.save.rejects(new Error('woops')) + }) + + it('should produce an error', async function (ctx) { + await expect(ctx.call()).to.be.rejectedWith(Error) + }) + }) + }) + + describe('_sendMessages', function () { + beforeEach(function (ctx) { + ctx.CollaboratorsEmailHandler.promises.notifyUserOfProjectInvite = sinon + .stub() + .resolves() + ctx.CollaboratorsInviteHandler.promises._trySendInviteNotification = sinon + .stub() + .resolves() + ctx.call = async () => { + await ctx.CollaboratorsInviteHandler.promises._sendMessages( + ctx.projectId, + ctx.sendingUser, + ctx.fakeInvite + ) + } + }) + + describe('when all goes well', function () { + it('should call CollaboratorsEmailHandler.notifyUserOfProjectInvite', async function (ctx) { + await ctx.call() + ctx.CollaboratorsEmailHandler.promises.notifyUserOfProjectInvite.callCount.should.equal( + 1 + ) + ctx.CollaboratorsEmailHandler.promises.notifyUserOfProjectInvite + .calledWith(ctx.projectId, ctx.fakeInvite.email, ctx.fakeInvite) + .should.equal(true) + }) + + it('should call _trySendInviteNotification', async function (ctx) { + await ctx.call() + ctx.CollaboratorsInviteHandler.promises._trySendInviteNotification.callCount.should.equal( + 1 + ) + ctx.CollaboratorsInviteHandler.promises._trySendInviteNotification + .calledWith(ctx.projectId, ctx.sendingUser, ctx.fakeInvite) + .should.equal(true) + }) + }) + + describe('when CollaboratorsEmailHandler.notifyUserOfProjectInvite produces an error', function () { + beforeEach(function (ctx) { + ctx.CollaboratorsEmailHandler.promises.notifyUserOfProjectInvite = sinon + .stub() + .rejects(new Error('woops')) + }) + + it('should not produce an error', async function (ctx) { + await expect(ctx.call()).to.be.fulfilled + expect(ctx.logger.err).to.be.calledOnce + }) + }) + + describe('when _trySendInviteNotification produces an error', function () { + beforeEach(function (ctx) { + ctx.CollaboratorsInviteHandler.promises._trySendInviteNotification = + sinon.stub().rejects(new Error('woops')) + }) + + it('should not produce an error', async function (ctx) { + await expect(ctx.call()).to.be.fulfilled + expect(ctx.logger.err).to.be.calledOnce + }) + }) + }) + describe('revokeInviteForUser', function () { + beforeEach(function (ctx) { + ctx.targetInvite = { + _id: new ObjectId(), + email: 'fake2@example.org', + two: 2, + } + ctx.fakeInvites = [ + { _id: new ObjectId(), email: 'fake1@example.org', one: 1 }, + ctx.targetInvite, + ] + ctx.fakeInvitesWithoutUser = [ + { _id: new ObjectId(), email: 'fake1@example.org', one: 1 }, + { _id: new ObjectId(), email: 'fake3@example.org', two: 2 }, + ] + ctx.targetEmail = [{ email: 'fake2@example.org' }] + + ctx.CollaboratorsInviteGetter.promises.getAllInvites.resolves( + ctx.fakeInvites + ) + ctx.CollaboratorsInviteHandler.promises.revokeInvite = sinon + .stub() + .resolves(ctx.targetInvite) + + ctx.call = async () => { + return await ctx.CollaboratorsInviteHandler.promises.revokeInviteForUser( + ctx.projectId, + ctx.targetEmail + ) + } + }) + + describe('for a valid user', function () { + it('should have called CollaboratorsInviteGetter.getAllInvites', async function (ctx) { + await ctx.call() + ctx.CollaboratorsInviteGetter.promises.getAllInvites.callCount.should.equal( + 1 + ) + ctx.CollaboratorsInviteGetter.promises.getAllInvites + .calledWith(ctx.projectId) + .should.equal(true) + }) + + it('should have called revokeInvite', async function (ctx) { + await ctx.call() + ctx.CollaboratorsInviteHandler.promises.revokeInvite.callCount.should.equal( + 1 + ) + + ctx.CollaboratorsInviteHandler.promises.revokeInvite + .calledWith(ctx.projectId, ctx.targetInvite._id) + .should.equal(true) + }) + }) + + describe('for a user without an invite in the project', function () { + beforeEach(function (ctx) { + ctx.CollaboratorsInviteGetter.promises.getAllInvites.resolves( + ctx.fakeInvitesWithoutUser + ) + }) + it('should not have called CollaboratorsInviteHandler.revokeInvite', async function (ctx) { + await ctx.call() + ctx.CollaboratorsInviteHandler.promises.revokeInvite.callCount.should.equal( + 0 + ) + }) + }) + }) + + describe('revokeInvite', function () { + beforeEach(function (ctx) { + ctx.ProjectInvite.findOneAndDelete.returns({ + exec: sinon.stub().resolves(ctx.fakeInvite), + }) + ctx.CollaboratorsInviteHandler.promises._tryCancelInviteNotification = + sinon.stub().resolves() + ctx.call = async () => { + return await ctx.CollaboratorsInviteHandler.promises.revokeInvite( + ctx.projectId, + ctx.inviteId + ) + } + }) + + describe('when all goes well', function () { + it('should call ProjectInvite.findOneAndDelete', async function (ctx) { + await ctx.call() + ctx.ProjectInvite.findOneAndDelete.should.have.been.calledOnce + ctx.ProjectInvite.findOneAndDelete.should.have.been.calledWith({ + projectId: ctx.projectId, + _id: ctx.inviteId, + }) + }) + + it('should call _tryCancelInviteNotification', async function (ctx) { + await ctx.call() + ctx.CollaboratorsInviteHandler.promises._tryCancelInviteNotification.callCount.should.equal( + 1 + ) + ctx.CollaboratorsInviteHandler.promises._tryCancelInviteNotification + .calledWith(ctx.inviteId) + .should.equal(true) + }) + + it('should return the deleted invite', async function (ctx) { + const invite = await ctx.call() + expect(invite).to.deep.equal(ctx.fakeInvite) + }) + }) + + describe('when remove produces an error', function () { + beforeEach(function (ctx) { + ctx.ProjectInvite.findOneAndDelete.returns({ + exec: sinon.stub().rejects(new Error('woops')), + }) + }) + + it('should produce an error', async function (ctx) { + await expect(ctx.call()).to.be.rejectedWith(Error) + }) + }) + }) + + describe('generateNewInvite', function () { + beforeEach(function (ctx) { + ctx.fakeInviteToProjectObject = { + _id: new ObjectId(), + email: ctx.email, + privileges: ctx.privileges, + } + ctx.CollaboratorsInviteHandler.promises.revokeInvite = sinon + .stub() + .resolves(ctx.fakeInvite) + ctx.CollaboratorsInviteHandler.promises.inviteToProject = sinon + .stub() + .resolves(ctx.fakeInviteToProjectObject) + ctx.call = async () => { + return await ctx.CollaboratorsInviteHandler.promises.generateNewInvite( + ctx.projectId, + ctx.sendingUser, + ctx.inviteId + ) + } + }) + + describe('when all goes well', function () { + it('should call revokeInvite', async function (ctx) { + await ctx.call() + ctx.CollaboratorsInviteHandler.promises.revokeInvite.callCount.should.equal( + 1 + ) + ctx.CollaboratorsInviteHandler.promises.revokeInvite + .calledWith(ctx.projectId, ctx.inviteId) + .should.equal(true) + }) + + it('should have called inviteToProject', async function (ctx) { + await ctx.call() + ctx.CollaboratorsInviteHandler.promises.inviteToProject.callCount.should.equal( + 1 + ) + ctx.CollaboratorsInviteHandler.promises.inviteToProject + .calledWith( + ctx.projectId, + ctx.sendingUser, + ctx.fakeInvite.email, + ctx.fakeInvite.privileges + ) + .should.equal(true) + }) + + it('should return the invite', async function (ctx) { + const invite = await ctx.call() + expect(invite).to.deep.equal(ctx.fakeInviteToProjectObject) + }) + }) + + describe('when revokeInvite produces an error', function () { + beforeEach(function (ctx) { + ctx.CollaboratorsInviteHandler.promises.revokeInvite = sinon + .stub() + .rejects(new Error('woops')) + }) + + it('should produce an error', async function (ctx) { + await expect(ctx.call()).to.be.rejectedWith(Error) + }) + + it('should not have called inviteToProject', async function (ctx) { + await expect(ctx.call()).to.be.rejected + ctx.CollaboratorsInviteHandler.promises.inviteToProject.callCount.should.equal( + 0 + ) + }) + }) + + describe('when findOne does not find an invite', function () { + beforeEach(function (ctx) { + ctx.CollaboratorsInviteHandler.promises.revokeInvite = sinon + .stub() + .resolves(null) + }) + + it('should not have called inviteToProject', async function (ctx) { + await ctx.call() + ctx.CollaboratorsInviteHandler.promises.inviteToProject.callCount.should.equal( + 0 + ) + }) + }) + }) + + describe('acceptInvite', function () { + beforeEach(function (ctx) { + ctx.fakeProject = { + _id: ctx.projectId, + owner_ref: ctx.sendingUserId, + } + ctx.ProjectGetter.promises.getProject = sinon + .stub() + .resolves(ctx.fakeProject) + ctx.CollaboratorsHandler.promises.addUserIdToProject.resolves() + ctx.CollaboratorsInviteHandler.promises._tryCancelInviteNotification = + sinon.stub().resolves() + ctx.LimitationsManager.promises.canAcceptEditCollaboratorInvite.resolves( + true + ) + ctx.ProjectInvite.deleteOne.returns({ exec: sinon.stub().resolves() }) + ctx.call = async () => { + await ctx.CollaboratorsInviteHandler.promises.acceptInvite( + ctx.fakeInvite, + ctx.projectId, + ctx.user + ) + } + }) + + describe('when all goes well', function () { + it('should add readAndWrite invitees to the project as normal', async function (ctx) { + await ctx.call() + ctx.CollaboratorsHandler.promises.addUserIdToProject.should.have.been.calledWith( + ctx.projectId, + ctx.sendingUserId, + ctx.userId, + ctx.fakeInvite.privileges + ) + }) + + it('should have called ProjectInvite.deleteOne', async function (ctx) { + await ctx.call() + ctx.ProjectInvite.deleteOne.callCount.should.equal(1) + ctx.ProjectInvite.deleteOne + .calledWith({ _id: ctx.inviteId }) + .should.equal(true) + }) + }) + + describe('when the invite is for readOnly access', function () { + beforeEach(function (ctx) { + ctx.fakeInvite.privileges = 'readOnly' + }) + + it('should have called CollaboratorsHandler.addUserIdToProject', async function (ctx) { + await ctx.call() + ctx.CollaboratorsHandler.promises.addUserIdToProject.callCount.should.equal( + 1 + ) + ctx.CollaboratorsHandler.promises.addUserIdToProject + .calledWith( + ctx.projectId, + ctx.sendingUserId, + ctx.userId, + ctx.fakeInvite.privileges + ) + .should.equal(true) + }) + }) + + describe('when the project has no more edit collaborator slots', function () { + beforeEach(function (ctx) { + ctx.LimitationsManager.promises.canAcceptEditCollaboratorInvite.resolves( + false + ) + }) + + it('should add readAndWrite invitees to the project as readOnly (pendingEditor) users', async function (ctx) { + await ctx.call() + ctx.ProjectAuditLogHandler.promises.addEntry.should.have.been.calledWith( + ctx.projectId, + 'editor-moved-to-pending', + null, + null, + { userId: ctx.userId.toString(), role: 'editor' } + ) + ctx.CollaboratorsHandler.promises.addUserIdToProject.should.have.been.calledWith( + ctx.projectId, + ctx.sendingUserId, + ctx.userId, + 'readOnly', + { pendingEditor: true } + ) + }) + }) + + describe('when addUserIdToProject produces an error', function () { + beforeEach(function (ctx) { + ctx.CollaboratorsHandler.promises.addUserIdToProject.callsArgWith( + 4, + new Error('woops') + ) + }) + + it('should produce an error', async function (ctx) { + await expect(ctx.call()).to.be.rejectedWith(Error) + }) + + it('should have called CollaboratorsHandler.addUserIdToProject', async function (ctx) { + await expect(ctx.call()).to.be.rejected + ctx.CollaboratorsHandler.promises.addUserIdToProject.callCount.should.equal( + 1 + ) + ctx.CollaboratorsHandler.promises.addUserIdToProject + .calledWith( + ctx.projectId, + ctx.sendingUserId, + ctx.userId, + ctx.fakeInvite.privileges + ) + .should.equal(true) + }) + + it('should not have called ProjectInvite.deleteOne', async function (ctx) { + await expect(ctx.call()).to.be.rejected + ctx.ProjectInvite.deleteOne.callCount.should.equal(0) + }) + }) + + describe('when ProjectInvite.deleteOne produces an error', function () { + beforeEach(function (ctx) { + ctx.ProjectInvite.deleteOne.returns({ + exec: sinon.stub().rejects(new Error('woops')), + }) + }) + + it('should produce an error', async function (ctx) { + await expect(ctx.call()).to.be.rejectedWith(Error) + }) + + it('should have called CollaboratorsHandler.addUserIdToProject', async function (ctx) { + await expect(ctx.call()).to.be.rejected + ctx.CollaboratorsHandler.promises.addUserIdToProject.callCount.should.equal( + 1 + ) + ctx.CollaboratorsHandler.promises.addUserIdToProject.should.have.been.calledWith( + ctx.projectId, + ctx.sendingUserId, + ctx.userId, + ctx.fakeInvite.privileges + ) + }) + + it('should have called ProjectInvite.deleteOne', async function (ctx) { + await expect(ctx.call()).to.be.rejected + ctx.ProjectInvite.deleteOne.callCount.should.equal(1) + }) + }) + }) + + describe('_tryCancelInviteNotification', function () { + beforeEach(function (ctx) { + ctx.inviteId = new ObjectId() + ctx.currentUser = { _id: new ObjectId() } + ctx.notification = { read: sinon.stub().resolves() } + ctx.NotificationsBuilder.promises.projectInvite = sinon + .stub() + .returns(ctx.notification) + ctx.call = async () => { + await ctx.CollaboratorsInviteHandler.promises._tryCancelInviteNotification( + ctx.inviteId + ) + } + }) + + it('should call notification.read', async function (ctx) { + await ctx.call() + ctx.notification.read.callCount.should.equal(1) + }) + + describe('when notification.read produces an error', function () { + beforeEach(function (ctx) { + ctx.notification = { + read: sinon.stub().rejects(new Error('woops')), + } + ctx.NotificationsBuilder.promises.projectInvite = sinon + .stub() + .returns(ctx.notification) + }) + + it('should produce an error', async function (ctx) { + await expect(ctx.call()).to.be.rejected + }) + }) + }) + + describe('_trySendInviteNotification', function () { + beforeEach(function (ctx) { + ctx.invite = { + _id: new ObjectId(), + token: 'some_token', + sendingUserId: new ObjectId(), + projectId: ctx.project_id, + targetEmail: 'user@example.com', + createdAt: new Date(), + } + ctx.sendingUser = { + _id: new ObjectId(), + first_name: 'jim', + } + ctx.existingUser = { _id: new ObjectId() } + ctx.UserGetter.promises.getUserByAnyEmail = sinon + .stub() + .resolves(ctx.existingUser) + ctx.fakeProject = { + _id: ctx.project_id, + name: 'some project', + } + ctx.ProjectGetter.promises.getProject = sinon + .stub() + .resolves(ctx.fakeProject) + ctx.notification = { create: sinon.stub().resolves() } + ctx.NotificationsBuilder.promises.projectInvite = sinon + .stub() + .returns(ctx.notification) + ctx.call = async () => { + await ctx.CollaboratorsInviteHandler.promises._trySendInviteNotification( + ctx.project_id, + ctx.sendingUser, + ctx.invite + ) + } + }) + + describe('when the user exists', function () { + beforeEach(function () {}) + + it('should call getUser', async function (ctx) { + await ctx.call() + ctx.UserGetter.promises.getUserByAnyEmail.callCount.should.equal(1) + ctx.UserGetter.promises.getUserByAnyEmail + .calledWith(ctx.invite.email) + .should.equal(true) + }) + + it('should call getProject', async function (ctx) { + await ctx.call() + ctx.ProjectGetter.promises.getProject.callCount.should.equal(1) + ctx.ProjectGetter.promises.getProject + .calledWith(ctx.project_id) + .should.equal(true) + }) + + it('should call NotificationsBuilder.projectInvite.create', async function (ctx) { + await ctx.call() + ctx.NotificationsBuilder.promises.projectInvite.callCount.should.equal( + 1 + ) + ctx.notification.create.callCount.should.equal(1) + }) + + describe('when getProject produces an error', function () { + beforeEach(function (ctx) { + ctx.ProjectGetter.promises.getProject.callsArgWith( + 2, + new Error('woops') + ) + }) + + it('should produce an error', async function (ctx) { + await expect(ctx.call()).to.be.rejectedWith(Error) + }) + + it('should not call NotificationsBuilder.projectInvite.create', async function (ctx) { + await expect(ctx.call()).to.be.rejected + ctx.NotificationsBuilder.promises.projectInvite.callCount.should.equal( + 0 + ) + ctx.notification.create.callCount.should.equal(0) + }) + }) + + describe('when projectInvite.create produces an error', function () { + beforeEach(function (ctx) { + ctx.notification.create.callsArgWith(0, new Error('woops')) + }) + + it('should produce an error', async function (ctx) { + await expect(ctx.call()).to.be.rejectedWith(Error) + }) + }) + }) + + describe('when the user does not exist', function () { + beforeEach(function (ctx) { + ctx.UserGetter.promises.getUserByAnyEmail = sinon.stub().resolves(null) + }) + + it('should call getUser', async function (ctx) { + await ctx.call() + ctx.UserGetter.promises.getUserByAnyEmail.callCount.should.equal(1) + ctx.UserGetter.promises.getUserByAnyEmail + .calledWith(ctx.invite.email) + .should.equal(true) + }) + + it('should not call getProject', async function (ctx) { + await ctx.call() + ctx.ProjectGetter.promises.getProject.callCount.should.equal(0) + }) + + it('should not call NotificationsBuilder.projectInvite.create', async function (ctx) { + await ctx.call() + ctx.NotificationsBuilder.promises.projectInvite.callCount.should.equal( + 0 + ) + ctx.notification.create.callCount.should.equal(0) + }) + }) + + describe('when the getUser produces an error', function () { + beforeEach(function (ctx) { + ctx.UserGetter.promises.getUserByAnyEmail = sinon + .stub() + .rejects(new Error('woops')) + }) + + it('should produce an error', async function (ctx) { + await expect(ctx.call()).to.be.rejectedWith(Error) + }) + + it('should call getUser', async function (ctx) { + await expect(ctx.call()).to.be.rejected + ctx.UserGetter.promises.getUserByAnyEmail.callCount.should.equal(1) + ctx.UserGetter.promises.getUserByAnyEmail + .calledWith(ctx.invite.email) + .should.equal(true) + }) + + it('should not call getProject', async function (ctx) { + await expect(ctx.call()).to.be.rejected + ctx.ProjectGetter.promises.getProject.callCount.should.equal(0) + }) + + it('should not call NotificationsBuilder.projectInvite.create', async function (ctx) { + await expect(ctx.call()).to.be.rejected + ctx.NotificationsBuilder.promises.projectInvite.callCount.should.equal( + 0 + ) + ctx.notification.create.callCount.should.equal(0) + }) + }) + }) +}) diff --git a/services/web/test/unit/src/Collaborators/CollaboratorsInviteHandlerTests.mjs b/services/web/test/unit/src/Collaborators/CollaboratorsInviteHandlerTests.mjs deleted file mode 100644 index f386648552..0000000000 --- a/services/web/test/unit/src/Collaborators/CollaboratorsInviteHandlerTests.mjs +++ /dev/null @@ -1,833 +0,0 @@ -import sinon from 'sinon' -import { expect } from 'chai' -import esmock from 'esmock' -import mongodb from 'mongodb-legacy' -import Crypto from 'crypto' - -const ObjectId = mongodb.ObjectId - -const MODULE_PATH = - '../../../../app/src/Features/Collaborators/CollaboratorsInviteHandler.mjs' - -describe('CollaboratorsInviteHandler', function () { - beforeEach(async function () { - this.ProjectInvite = class ProjectInvite { - constructor(options) { - if (options == null) { - options = {} - } - this._id = new ObjectId() - for (const k in options) { - const v = options[k] - this[k] = v - } - } - } - this.ProjectInvite.prototype.save = sinon.stub() - this.ProjectInvite.findOne = sinon.stub() - this.ProjectInvite.find = sinon.stub() - this.ProjectInvite.deleteOne = sinon.stub() - this.ProjectInvite.findOneAndDelete = sinon.stub() - this.ProjectInvite.countDocuments = sinon.stub() - - this.Crypto = { - randomBytes: sinon.stub().callsFake(Crypto.randomBytes), - } - this.settings = {} - this.CollaboratorsEmailHandler = { promises: {} } - this.CollaboratorsHandler = { - promises: { - addUserIdToProject: sinon.stub(), - }, - } - this.UserGetter = { promises: { getUser: sinon.stub() } } - this.ProjectGetter = { promises: { getProject: sinon.stub().resolves() } } - this.NotificationsBuilder = { promises: {} } - this.tokenHmac = 'jkhajkefhaekjfhkfg' - this.CollaboratorsInviteHelper = { - generateToken: sinon.stub().returns(this.Crypto.randomBytes(24)), - hashInviteToken: sinon.stub().returns(this.tokenHmac), - } - - this.CollaboratorsInviteGetter = { - promises: { - getAllInvites: sinon.stub(), - }, - } - - this.SplitTestHandler = { - promises: { - getAssignmentForUser: sinon.stub().resolves(), - }, - } - - this.LimitationsManager = { - promises: { - canAcceptEditCollaboratorInvite: sinon.stub().resolves(), - }, - } - - this.ProjectAuditLogHandler = { - promises: { - addEntry: sinon.stub().resolves(), - }, - addEntryInBackground: sinon.stub(), - } - this.logger = { - debug: sinon.stub(), - warn: sinon.stub(), - err: sinon.stub(), - } - - this.CollaboratorsInviteHandler = await esmock.strict(MODULE_PATH, { - '@overleaf/settings': this.settings, - '../../../../app/src/models/ProjectInvite.js': { - ProjectInvite: this.ProjectInvite, - }, - '@overleaf/logger': this.logger, - '../../../../app/src/Features/Collaborators/CollaboratorsEmailHandler.mjs': - this.CollaboratorsEmailHandler, - '../../../../app/src/Features/Collaborators/CollaboratorsHandler.js': - this.CollaboratorsHandler, - '../../../../app/src/Features/User/UserGetter.js': this.UserGetter, - '../../../../app/src/Features/Project/ProjectGetter.js': - this.ProjectGetter, - '../../../../app/src/Features/Notifications/NotificationsBuilder.js': - this.NotificationsBuilder, - '../../../../app/src/Features/Collaborators/CollaboratorsInviteHelper.js': - this.CollaboratorsInviteHelper, - '../../../../app/src/Features/Collaborators/CollaboratorsInviteGetter': - this.CollaboratorsInviteGetter, - '../../../../app/src/Features/SplitTests/SplitTestHandler.js': - this.SplitTestHandler, - '../../../../app/src/Features/Subscription/LimitationsManager.js': - this.LimitationsManager, - '../../../../app/src/Features/Project/ProjectAuditLogHandler.js': - this.ProjectAuditLogHandler, - crypto: this.CryptogetAssignmentForUser, - }) - - this.projectId = new ObjectId() - this.sendingUserId = new ObjectId() - this.sendingUser = { - _id: this.sendingUserId, - name: 'Bob', - } - this.email = 'user@example.com' - this.userId = new ObjectId() - this.user = { - _id: this.userId, - email: 'someone@example.com', - } - this.inviteId = new ObjectId() - this.token = 'hnhteaosuhtaeosuahs' - this.privileges = 'readAndWrite' - this.fakeInvite = { - _id: this.inviteId, - email: this.email, - token: this.token, - tokenHmac: this.tokenHmac, - sendingUserId: this.sendingUserId, - projectId: this.projectId, - privileges: this.privileges, - createdAt: new Date(), - } - }) - - describe('inviteToProject', function () { - beforeEach(function () { - this.ProjectInvite.prototype.save.callsFake(async function () { - Object.defineProperty(this, 'toObject', { - value: function () { - return this - }, - writable: true, - configurable: true, - enumerable: false, - }) - return this - }) - this.CollaboratorsInviteHandler.promises._sendMessages = sinon - .stub() - .resolves() - this.call = async () => { - return await this.CollaboratorsInviteHandler.promises.inviteToProject( - this.projectId, - this.sendingUser, - this.email, - this.privileges - ) - } - }) - - describe('when all goes well', function () { - it('should produce the invite object', async function () { - const invite = await this.call() - expect(invite).to.not.equal(null) - expect(invite).to.not.equal(undefined) - expect(invite).to.be.instanceof(Object) - expect(invite).to.have.all.keys(['_id', 'email', 'privileges']) - }) - - it('should have generated a random token', async function () { - await this.call() - this.Crypto.randomBytes.callCount.should.equal(1) - }) - - it('should have generated a HMAC token', async function () { - await this.call() - this.CollaboratorsInviteHelper.hashInviteToken.callCount.should.equal(1) - }) - - it('should have called ProjectInvite.save', async function () { - await this.call() - this.ProjectInvite.prototype.save.callCount.should.equal(1) - }) - - it('should have called _sendMessages', async function () { - await this.call() - this.CollaboratorsInviteHandler.promises._sendMessages.callCount.should.equal( - 1 - ) - this.CollaboratorsInviteHandler.promises._sendMessages - .calledWith(this.projectId, this.sendingUser) - .should.equal(true) - }) - }) - - describe('when saving model produces an error', function () { - beforeEach(function () { - this.ProjectInvite.prototype.save.rejects(new Error('woops')) - }) - - it('should produce an error', async function () { - await expect(this.call()).to.be.rejectedWith(Error) - }) - }) - }) - - describe('_sendMessages', function () { - beforeEach(function () { - this.CollaboratorsEmailHandler.promises.notifyUserOfProjectInvite = sinon - .stub() - .resolves() - this.CollaboratorsInviteHandler.promises._trySendInviteNotification = - sinon.stub().resolves() - this.call = async () => { - await this.CollaboratorsInviteHandler.promises._sendMessages( - this.projectId, - this.sendingUser, - this.fakeInvite - ) - } - }) - - describe('when all goes well', function () { - it('should call CollaboratorsEmailHandler.notifyUserOfProjectInvite', async function () { - await this.call() - this.CollaboratorsEmailHandler.promises.notifyUserOfProjectInvite.callCount.should.equal( - 1 - ) - this.CollaboratorsEmailHandler.promises.notifyUserOfProjectInvite - .calledWith(this.projectId, this.fakeInvite.email, this.fakeInvite) - .should.equal(true) - }) - - it('should call _trySendInviteNotification', async function () { - await this.call() - this.CollaboratorsInviteHandler.promises._trySendInviteNotification.callCount.should.equal( - 1 - ) - this.CollaboratorsInviteHandler.promises._trySendInviteNotification - .calledWith(this.projectId, this.sendingUser, this.fakeInvite) - .should.equal(true) - }) - }) - - describe('when CollaboratorsEmailHandler.notifyUserOfProjectInvite produces an error', function () { - beforeEach(function () { - this.CollaboratorsEmailHandler.promises.notifyUserOfProjectInvite = - sinon.stub().rejects(new Error('woops')) - }) - - it('should not produce an error', async function () { - await expect(this.call()).to.be.fulfilled - expect(this.logger.err).to.be.calledOnce - }) - }) - - describe('when _trySendInviteNotification produces an error', function () { - beforeEach(function () { - this.CollaboratorsInviteHandler.promises._trySendInviteNotification = - sinon.stub().rejects(new Error('woops')) - }) - - it('should not produce an error', async function () { - await expect(this.call()).to.be.fulfilled - expect(this.logger.err).to.be.calledOnce - }) - }) - }) - describe('revokeInviteForUser', function () { - beforeEach(function () { - this.targetInvite = { - _id: new ObjectId(), - email: 'fake2@example.org', - two: 2, - } - this.fakeInvites = [ - { _id: new ObjectId(), email: 'fake1@example.org', one: 1 }, - this.targetInvite, - ] - this.fakeInvitesWithoutUser = [ - { _id: new ObjectId(), email: 'fake1@example.org', one: 1 }, - { _id: new ObjectId(), email: 'fake3@example.org', two: 2 }, - ] - this.targetEmail = [{ email: 'fake2@example.org' }] - - this.CollaboratorsInviteGetter.promises.getAllInvites.resolves( - this.fakeInvites - ) - this.CollaboratorsInviteHandler.promises.revokeInvite = sinon - .stub() - .resolves(this.targetInvite) - - this.call = async () => { - return await this.CollaboratorsInviteHandler.promises.revokeInviteForUser( - this.projectId, - this.targetEmail - ) - } - }) - - describe('for a valid user', function () { - it('should have called CollaboratorsInviteGetter.getAllInvites', async function () { - await this.call() - this.CollaboratorsInviteGetter.promises.getAllInvites.callCount.should.equal( - 1 - ) - this.CollaboratorsInviteGetter.promises.getAllInvites - .calledWith(this.projectId) - .should.equal(true) - }) - - it('should have called revokeInvite', async function () { - await this.call() - this.CollaboratorsInviteHandler.promises.revokeInvite.callCount.should.equal( - 1 - ) - - this.CollaboratorsInviteHandler.promises.revokeInvite - .calledWith(this.projectId, this.targetInvite._id) - .should.equal(true) - }) - }) - - describe('for a user without an invite in the project', function () { - beforeEach(function () { - this.CollaboratorsInviteGetter.promises.getAllInvites.resolves( - this.fakeInvitesWithoutUser - ) - }) - it('should not have called CollaboratorsInviteHandler.revokeInvite', async function () { - await this.call() - this.CollaboratorsInviteHandler.promises.revokeInvite.callCount.should.equal( - 0 - ) - }) - }) - }) - - describe('revokeInvite', function () { - beforeEach(function () { - this.ProjectInvite.findOneAndDelete.returns({ - exec: sinon.stub().resolves(this.fakeInvite), - }) - this.CollaboratorsInviteHandler.promises._tryCancelInviteNotification = - sinon.stub().resolves() - this.call = async () => { - return await this.CollaboratorsInviteHandler.promises.revokeInvite( - this.projectId, - this.inviteId - ) - } - }) - - describe('when all goes well', function () { - it('should call ProjectInvite.findOneAndDelete', async function () { - await this.call() - this.ProjectInvite.findOneAndDelete.should.have.been.calledOnce - this.ProjectInvite.findOneAndDelete.should.have.been.calledWith({ - projectId: this.projectId, - _id: this.inviteId, - }) - }) - - it('should call _tryCancelInviteNotification', async function () { - await this.call() - this.CollaboratorsInviteHandler.promises._tryCancelInviteNotification.callCount.should.equal( - 1 - ) - this.CollaboratorsInviteHandler.promises._tryCancelInviteNotification - .calledWith(this.inviteId) - .should.equal(true) - }) - - it('should return the deleted invite', async function () { - const invite = await this.call() - expect(invite).to.deep.equal(this.fakeInvite) - }) - }) - - describe('when remove produces an error', function () { - beforeEach(function () { - this.ProjectInvite.findOneAndDelete.returns({ - exec: sinon.stub().rejects(new Error('woops')), - }) - }) - - it('should produce an error', async function () { - await expect(this.call()).to.be.rejectedWith(Error) - }) - }) - }) - - describe('generateNewInvite', function () { - beforeEach(function () { - this.fakeInviteToProjectObject = { - _id: new ObjectId(), - email: this.email, - privileges: this.privileges, - } - this.CollaboratorsInviteHandler.promises.revokeInvite = sinon - .stub() - .resolves(this.fakeInvite) - this.CollaboratorsInviteHandler.promises.inviteToProject = sinon - .stub() - .resolves(this.fakeInviteToProjectObject) - this.call = async () => { - return await this.CollaboratorsInviteHandler.promises.generateNewInvite( - this.projectId, - this.sendingUser, - this.inviteId - ) - } - }) - - describe('when all goes well', function () { - it('should call revokeInvite', async function () { - await this.call() - this.CollaboratorsInviteHandler.promises.revokeInvite.callCount.should.equal( - 1 - ) - this.CollaboratorsInviteHandler.promises.revokeInvite - .calledWith(this.projectId, this.inviteId) - .should.equal(true) - }) - - it('should have called inviteToProject', async function () { - await this.call() - this.CollaboratorsInviteHandler.promises.inviteToProject.callCount.should.equal( - 1 - ) - this.CollaboratorsInviteHandler.promises.inviteToProject - .calledWith( - this.projectId, - this.sendingUser, - this.fakeInvite.email, - this.fakeInvite.privileges - ) - .should.equal(true) - }) - - it('should return the invite', async function () { - const invite = await this.call() - expect(invite).to.deep.equal(this.fakeInviteToProjectObject) - }) - }) - - describe('when revokeInvite produces an error', function () { - beforeEach(function () { - this.CollaboratorsInviteHandler.promises.revokeInvite = sinon - .stub() - .rejects(new Error('woops')) - }) - - it('should produce an error', async function () { - await expect(this.call()).to.be.rejectedWith(Error) - }) - - it('should not have called inviteToProject', async function () { - await expect(this.call()).to.be.rejected - this.CollaboratorsInviteHandler.promises.inviteToProject.callCount.should.equal( - 0 - ) - }) - }) - - describe('when findOne does not find an invite', function () { - beforeEach(function () { - this.CollaboratorsInviteHandler.promises.revokeInvite = sinon - .stub() - .resolves(null) - }) - - it('should not have called inviteToProject', async function () { - await this.call() - this.CollaboratorsInviteHandler.promises.inviteToProject.callCount.should.equal( - 0 - ) - }) - }) - }) - - describe('acceptInvite', function () { - beforeEach(function () { - this.fakeProject = { - _id: this.projectId, - owner_ref: this.sendingUserId, - } - this.ProjectGetter.promises.getProject = sinon - .stub() - .resolves(this.fakeProject) - this.CollaboratorsHandler.promises.addUserIdToProject.resolves() - this.CollaboratorsInviteHandler.promises._tryCancelInviteNotification = - sinon.stub().resolves() - this.LimitationsManager.promises.canAcceptEditCollaboratorInvite.resolves( - true - ) - this.ProjectInvite.deleteOne.returns({ exec: sinon.stub().resolves() }) - this.call = async () => { - await this.CollaboratorsInviteHandler.promises.acceptInvite( - this.fakeInvite, - this.projectId, - this.user - ) - } - }) - - describe('when all goes well', function () { - it('should add readAndWrite invitees to the project as normal', async function () { - await this.call() - this.CollaboratorsHandler.promises.addUserIdToProject.should.have.been.calledWith( - this.projectId, - this.sendingUserId, - this.userId, - this.fakeInvite.privileges - ) - }) - - it('should have called ProjectInvite.deleteOne', async function () { - await this.call() - this.ProjectInvite.deleteOne.callCount.should.equal(1) - this.ProjectInvite.deleteOne - .calledWith({ _id: this.inviteId }) - .should.equal(true) - }) - }) - - describe('when the invite is for readOnly access', function () { - beforeEach(function () { - this.fakeInvite.privileges = 'readOnly' - }) - - it('should have called CollaboratorsHandler.addUserIdToProject', async function () { - await this.call() - this.CollaboratorsHandler.promises.addUserIdToProject.callCount.should.equal( - 1 - ) - this.CollaboratorsHandler.promises.addUserIdToProject - .calledWith( - this.projectId, - this.sendingUserId, - this.userId, - this.fakeInvite.privileges - ) - .should.equal(true) - }) - }) - - describe('when the project has no more edit collaborator slots', function () { - beforeEach(function () { - this.LimitationsManager.promises.canAcceptEditCollaboratorInvite.resolves( - false - ) - }) - - it('should add readAndWrite invitees to the project as readOnly (pendingEditor) users', async function () { - await this.call() - this.ProjectAuditLogHandler.promises.addEntry.should.have.been.calledWith( - this.projectId, - 'editor-moved-to-pending', - null, - null, - { userId: this.userId.toString(), role: 'editor' } - ) - this.CollaboratorsHandler.promises.addUserIdToProject.should.have.been.calledWith( - this.projectId, - this.sendingUserId, - this.userId, - 'readOnly', - { pendingEditor: true } - ) - }) - }) - - describe('when addUserIdToProject produces an error', function () { - beforeEach(function () { - this.CollaboratorsHandler.promises.addUserIdToProject.callsArgWith( - 4, - new Error('woops') - ) - }) - - it('should produce an error', async function () { - await expect(this.call()).to.be.rejectedWith(Error) - }) - - it('should have called CollaboratorsHandler.addUserIdToProject', async function () { - await expect(this.call()).to.be.rejected - this.CollaboratorsHandler.promises.addUserIdToProject.callCount.should.equal( - 1 - ) - this.CollaboratorsHandler.promises.addUserIdToProject - .calledWith( - this.projectId, - this.sendingUserId, - this.userId, - this.fakeInvite.privileges - ) - .should.equal(true) - }) - - it('should not have called ProjectInvite.deleteOne', async function () { - await expect(this.call()).to.be.rejected - this.ProjectInvite.deleteOne.callCount.should.equal(0) - }) - }) - - describe('when ProjectInvite.deleteOne produces an error', function () { - beforeEach(function () { - this.ProjectInvite.deleteOne.returns({ - exec: sinon.stub().rejects(new Error('woops')), - }) - }) - - it('should produce an error', async function () { - await expect(this.call()).to.be.rejectedWith(Error) - }) - - it('should have called CollaboratorsHandler.addUserIdToProject', async function () { - await expect(this.call()).to.be.rejected - this.CollaboratorsHandler.promises.addUserIdToProject.callCount.should.equal( - 1 - ) - this.CollaboratorsHandler.promises.addUserIdToProject.should.have.been.calledWith( - this.projectId, - this.sendingUserId, - this.userId, - this.fakeInvite.privileges - ) - }) - - it('should have called ProjectInvite.deleteOne', async function () { - await expect(this.call()).to.be.rejected - this.ProjectInvite.deleteOne.callCount.should.equal(1) - }) - }) - }) - - describe('_tryCancelInviteNotification', function () { - beforeEach(function () { - this.inviteId = new ObjectId() - this.currentUser = { _id: new ObjectId() } - this.notification = { read: sinon.stub().resolves() } - this.NotificationsBuilder.promises.projectInvite = sinon - .stub() - .returns(this.notification) - this.call = async () => { - await this.CollaboratorsInviteHandler.promises._tryCancelInviteNotification( - this.inviteId - ) - } - }) - - it('should call notification.read', async function () { - await this.call() - this.notification.read.callCount.should.equal(1) - }) - - describe('when notification.read produces an error', function () { - beforeEach(function () { - this.notification = { - read: sinon.stub().rejects(new Error('woops')), - } - this.NotificationsBuilder.promises.projectInvite = sinon - .stub() - .returns(this.notification) - }) - - it('should produce an error', async function () { - await expect(this.call()).to.be.rejected - }) - }) - }) - - describe('_trySendInviteNotification', function () { - beforeEach(function () { - this.invite = { - _id: new ObjectId(), - token: 'some_token', - sendingUserId: new ObjectId(), - projectId: this.project_id, - targetEmail: 'user@example.com', - createdAt: new Date(), - } - this.sendingUser = { - _id: new ObjectId(), - first_name: 'jim', - } - this.existingUser = { _id: new ObjectId() } - this.UserGetter.promises.getUserByAnyEmail = sinon - .stub() - .resolves(this.existingUser) - this.fakeProject = { - _id: this.project_id, - name: 'some project', - } - this.ProjectGetter.promises.getProject = sinon - .stub() - .resolves(this.fakeProject) - this.notification = { create: sinon.stub().resolves() } - this.NotificationsBuilder.promises.projectInvite = sinon - .stub() - .returns(this.notification) - this.call = async () => { - await this.CollaboratorsInviteHandler.promises._trySendInviteNotification( - this.project_id, - this.sendingUser, - this.invite - ) - } - }) - - describe('when the user exists', function () { - beforeEach(function () {}) - - it('should call getUser', async function () { - await this.call() - this.UserGetter.promises.getUserByAnyEmail.callCount.should.equal(1) - this.UserGetter.promises.getUserByAnyEmail - .calledWith(this.invite.email) - .should.equal(true) - }) - - it('should call getProject', async function () { - await this.call() - this.ProjectGetter.promises.getProject.callCount.should.equal(1) - this.ProjectGetter.promises.getProject - .calledWith(this.project_id) - .should.equal(true) - }) - - it('should call NotificationsBuilder.projectInvite.create', async function () { - await this.call() - this.NotificationsBuilder.promises.projectInvite.callCount.should.equal( - 1 - ) - this.notification.create.callCount.should.equal(1) - }) - - describe('when getProject produces an error', function () { - beforeEach(function () { - this.ProjectGetter.promises.getProject.callsArgWith( - 2, - new Error('woops') - ) - }) - - it('should produce an error', async function () { - await expect(this.call()).to.be.rejectedWith(Error) - }) - - it('should not call NotificationsBuilder.projectInvite.create', async function () { - await expect(this.call()).to.be.rejected - this.NotificationsBuilder.promises.projectInvite.callCount.should.equal( - 0 - ) - this.notification.create.callCount.should.equal(0) - }) - }) - - describe('when projectInvite.create produces an error', function () { - beforeEach(function () { - this.notification.create.callsArgWith(0, new Error('woops')) - }) - - it('should produce an error', async function () { - await expect(this.call()).to.be.rejectedWith(Error) - }) - }) - }) - - describe('when the user does not exist', function () { - beforeEach(function () { - this.UserGetter.promises.getUserByAnyEmail = sinon.stub().resolves(null) - }) - - it('should call getUser', async function () { - await this.call() - this.UserGetter.promises.getUserByAnyEmail.callCount.should.equal(1) - this.UserGetter.promises.getUserByAnyEmail - .calledWith(this.invite.email) - .should.equal(true) - }) - - it('should not call getProject', async function () { - await this.call() - this.ProjectGetter.promises.getProject.callCount.should.equal(0) - }) - - it('should not call NotificationsBuilder.projectInvite.create', async function () { - await this.call() - this.NotificationsBuilder.promises.projectInvite.callCount.should.equal( - 0 - ) - this.notification.create.callCount.should.equal(0) - }) - }) - - describe('when the getUser produces an error', function () { - beforeEach(function () { - this.UserGetter.promises.getUserByAnyEmail = sinon - .stub() - .rejects(new Error('woops')) - }) - - it('should produce an error', async function () { - await expect(this.call()).to.be.rejectedWith(Error) - }) - - it('should call getUser', async function () { - await expect(this.call()).to.be.rejected - this.UserGetter.promises.getUserByAnyEmail.callCount.should.equal(1) - this.UserGetter.promises.getUserByAnyEmail - .calledWith(this.invite.email) - .should.equal(true) - }) - - it('should not call getProject', async function () { - await expect(this.call()).to.be.rejected - this.ProjectGetter.promises.getProject.callCount.should.equal(0) - }) - - it('should not call NotificationsBuilder.projectInvite.create', async function () { - await expect(this.call()).to.be.rejected - this.NotificationsBuilder.promises.projectInvite.callCount.should.equal( - 0 - ) - this.notification.create.callCount.should.equal(0) - }) - }) - }) -}) diff --git a/services/web/test/unit/src/Contact/ContactController.test.mjs b/services/web/test/unit/src/Contact/ContactController.test.mjs new file mode 100644 index 0000000000..13f70c81f6 --- /dev/null +++ b/services/web/test/unit/src/Contact/ContactController.test.mjs @@ -0,0 +1,147 @@ +import { expect, vi } from 'vitest' +import sinon from 'sinon' +import MockResponse from '../helpers/MockResponse.js' +const modulePath = '../../../../app/src/Features/Contacts/ContactController.mjs' + +describe('ContactController', function () { + beforeEach(async function (ctx) { + ctx.SessionManager = { getLoggedInUserId: sinon.stub() } + + vi.doMock('../../../../app/src/Features/User/UserGetter', () => ({ + default: (ctx.UserGetter = { + promises: {}, + }), + })) + + vi.doMock('../../../../app/src/Features/Contacts/ContactManager', () => ({ + default: (ctx.ContactManager = { promises: {} }), + })) + + vi.doMock( + '../../../../app/src/Features/Authentication/SessionManager', + () => ({ + default: (ctx.SessionManager = {}), + }) + ) + + vi.doMock('../../../../app/src/infrastructure/Modules', () => ({ + default: (ctx.Modules = { + promises: { hooks: {} }, + }), + })) + + ctx.ContactController = (await import(modulePath)).default + + ctx.req = {} + ctx.res = new MockResponse() + }) + + describe('getContacts', function () { + beforeEach(function (ctx) { + ctx.user_id = 'mock-user-id' + ctx.contact_ids = ['contact-1', 'contact-2', 'contact-3'] + ctx.contacts = [ + { + _id: 'contact-1', + email: 'joe@example.com', + first_name: 'Joe', + last_name: 'Example', + unsued: 'foo', + }, + { + _id: 'contact-2', + email: 'jane@example.com', + first_name: 'Jane', + last_name: 'Example', + unsued: 'foo', + holdingAccount: true, + }, + { + _id: 'contact-3', + email: 'jim@example.com', + first_name: 'Jim', + last_name: 'Example', + unsued: 'foo', + }, + ] + ctx.SessionManager.getLoggedInUserId = sinon.stub().returns(ctx.user_id) + ctx.ContactManager.promises.getContactIds = sinon + .stub() + .resolves(ctx.contact_ids) + ctx.UserGetter.promises.getUsers = sinon.stub().resolves(ctx.contacts) + ctx.Modules.promises.hooks.fire = sinon.stub() + }) + + it('should look up the logged in user id', async function (ctx) { + ctx.ContactController.getContacts(ctx.req, ctx.res) + ctx.SessionManager.getLoggedInUserId + .calledWith(ctx.req.session) + .should.equal(true) + }) + + it('should get the users contact ids', async function (ctx) { + ctx.res.callback = () => { + expect( + ctx.ContactManager.promises.getContactIds + ).to.have.been.calledWith(ctx.user_id, { limit: 50 }) + } + ctx.ContactController.getContacts(ctx.req, ctx.res) + }) + + it('should populate the users contacts ids', function (ctx) { + return new Promise(resolve => { + ctx.res.callback = () => { + expect(ctx.UserGetter.promises.getUsers).to.have.been.calledWith( + ctx.contact_ids, + { + email: 1, + first_name: 1, + last_name: 1, + holdingAccount: 1, + } + ) + resolve() + } + ctx.ContactController.getContacts(ctx.req, ctx.res, resolve) + }) + }) + + it('should fire the getContact module hook', function (ctx) { + return new Promise(resolve => { + ctx.res.callback = () => { + expect(ctx.Modules.promises.hooks.fire).to.have.been.calledWith( + 'getContacts', + ctx.user_id + ) + resolve() + } + ctx.ContactController.getContacts(ctx.req, ctx.res, resolve) + }) + }) + + it('should return a formatted list of contacts in contact list order, without holding accounts', function (ctx) { + return new Promise(resolve => { + ctx.res.callback = () => { + ctx.res.json.args[0][0].contacts.should.deep.equal([ + { + id: 'contact-1', + email: 'joe@example.com', + first_name: 'Joe', + last_name: 'Example', + type: 'user', + }, + { + id: 'contact-3', + email: 'jim@example.com', + first_name: 'Jim', + last_name: 'Example', + type: 'user', + }, + ]) + resolve() + } + ctx.ContactController.getContacts(ctx.req, ctx.res, resolve) + }) + }) + }) +}) diff --git a/services/web/test/unit/src/Contact/ContactControllerTests.mjs b/services/web/test/unit/src/Contact/ContactControllerTests.mjs deleted file mode 100644 index ea5a1d0220..0000000000 --- a/services/web/test/unit/src/Contact/ContactControllerTests.mjs +++ /dev/null @@ -1,129 +0,0 @@ -import sinon from 'sinon' -import { expect } from 'chai' -import esmock from 'esmock' -import MockResponse from '../helpers/MockResponse.js' -const modulePath = '../../../../app/src/Features/Contacts/ContactController.mjs' - -describe('ContactController', function () { - beforeEach(async function () { - this.SessionManager = { getLoggedInUserId: sinon.stub() } - this.ContactController = await esmock.strict(modulePath, { - '../../../../app/src/Features/User/UserGetter': (this.UserGetter = { - promises: {}, - }), - '../../../../app/src/Features/Contacts/ContactManager': - (this.ContactManager = { promises: {} }), - '../../../../app/src/Features/Authentication/SessionManager': - (this.SessionManager = {}), - '../../../../app/src/infrastructure/Modules': (this.Modules = { - promises: { hooks: {} }, - }), - }) - - this.req = {} - this.res = new MockResponse() - }) - - describe('getContacts', function () { - beforeEach(function () { - this.user_id = 'mock-user-id' - this.contact_ids = ['contact-1', 'contact-2', 'contact-3'] - this.contacts = [ - { - _id: 'contact-1', - email: 'joe@example.com', - first_name: 'Joe', - last_name: 'Example', - unsued: 'foo', - }, - { - _id: 'contact-2', - email: 'jane@example.com', - first_name: 'Jane', - last_name: 'Example', - unsued: 'foo', - holdingAccount: true, - }, - { - _id: 'contact-3', - email: 'jim@example.com', - first_name: 'Jim', - last_name: 'Example', - unsued: 'foo', - }, - ] - this.SessionManager.getLoggedInUserId = sinon.stub().returns(this.user_id) - this.ContactManager.promises.getContactIds = sinon - .stub() - .resolves(this.contact_ids) - this.UserGetter.promises.getUsers = sinon.stub().resolves(this.contacts) - this.Modules.promises.hooks.fire = sinon.stub() - }) - - it('should look up the logged in user id', async function () { - this.ContactController.getContacts(this.req, this.res) - this.SessionManager.getLoggedInUserId - .calledWith(this.req.session) - .should.equal(true) - }) - - it('should get the users contact ids', async function () { - this.res.callback = () => { - expect( - this.ContactManager.promises.getContactIds - ).to.have.been.calledWith(this.user_id, { limit: 50 }) - } - this.ContactController.getContacts(this.req, this.res) - }) - - it('should populate the users contacts ids', function (done) { - this.res.callback = () => { - expect(this.UserGetter.promises.getUsers).to.have.been.calledWith( - this.contact_ids, - { - email: 1, - first_name: 1, - last_name: 1, - holdingAccount: 1, - } - ) - done() - } - this.ContactController.getContacts(this.req, this.res, done) - }) - - it('should fire the getContact module hook', function (done) { - this.res.callback = () => { - expect(this.Modules.promises.hooks.fire).to.have.been.calledWith( - 'getContacts', - this.user_id - ) - done() - } - this.ContactController.getContacts(this.req, this.res, done) - }) - - it('should return a formatted list of contacts in contact list order, without holding accounts', function (done) { - this.res.callback = () => { - this.res.json.args[0][0].contacts.should.deep.equal([ - { - id: 'contact-1', - email: 'joe@example.com', - first_name: 'Joe', - last_name: 'Example', - type: 'user', - }, - { - id: 'contact-3', - email: 'jim@example.com', - first_name: 'Jim', - last_name: 'Example', - type: 'user', - }, - ]) - done() - } - this.ContactController.getContacts(this.req, this.res, done) - }) - }) -}) diff --git a/services/web/test/unit/src/Cooldown/CooldownMiddleware.test.mjs b/services/web/test/unit/src/Cooldown/CooldownMiddleware.test.mjs new file mode 100644 index 0000000000..846a54d4ce --- /dev/null +++ b/services/web/test/unit/src/Cooldown/CooldownMiddleware.test.mjs @@ -0,0 +1,125 @@ +import { expect, vi } from 'vitest' +import sinon from 'sinon' +const modulePath = new URL( + '../../../../app/src/Features/Cooldown/CooldownMiddleware.mjs', + import.meta.url +).pathname + +describe('CooldownMiddleware', function () { + beforeEach(async function (ctx) { + ctx.CooldownManager = { isProjectOnCooldown: sinon.stub() } + + vi.doMock( + '../../../../app/src/Features/Cooldown/CooldownManager.js', + () => ({ + default: ctx.CooldownManager, + }) + ) + + ctx.CooldownMiddleware = (await import(modulePath)).default + }) + + describe('freezeProject', function () { + describe('when project is on cooldown', function () { + beforeEach(function (ctx) { + ctx.CooldownManager.isProjectOnCooldown = sinon + .stub() + .callsArgWith(1, null, true) + ctx.req = { params: { Project_id: 'abc' } } + ctx.res = { sendStatus: sinon.stub() } + return (ctx.next = sinon.stub()) + }) + + it('should call CooldownManager.isProjectOnCooldown', function (ctx) { + ctx.CooldownMiddleware.freezeProject(ctx.req, ctx.res, ctx.next) + ctx.CooldownManager.isProjectOnCooldown.callCount.should.equal(1) + return ctx.CooldownManager.isProjectOnCooldown + .calledWith('abc') + .should.equal(true) + }) + + it('should not produce an error', function (ctx) { + ctx.CooldownMiddleware.freezeProject(ctx.req, ctx.res, ctx.next) + return ctx.next.callCount.should.equal(0) + }) + + it('should send a 429 status', function (ctx) { + ctx.CooldownMiddleware.freezeProject(ctx.req, ctx.res, ctx.next) + ctx.res.sendStatus.callCount.should.equal(1) + return ctx.res.sendStatus.calledWith(429).should.equal(true) + }) + }) + + describe('when project is not on cooldown', function () { + beforeEach(function (ctx) { + ctx.CooldownManager.isProjectOnCooldown = sinon + .stub() + .callsArgWith(1, null, false) + ctx.req = { params: { Project_id: 'abc' } } + ctx.res = { sendStatus: sinon.stub() } + return (ctx.next = sinon.stub()) + }) + + it('should call CooldownManager.isProjectOnCooldown', function (ctx) { + ctx.CooldownMiddleware.freezeProject(ctx.req, ctx.res, ctx.next) + ctx.CooldownManager.isProjectOnCooldown.callCount.should.equal(1) + return ctx.CooldownManager.isProjectOnCooldown + .calledWith('abc') + .should.equal(true) + }) + + it('call next with no arguments', function (ctx) { + ctx.CooldownMiddleware.freezeProject(ctx.req, ctx.res, ctx.next) + ctx.next.callCount.should.equal(1) + return expect(ctx.next.lastCall.args.length).to.equal(0) + }) + }) + + describe('when isProjectOnCooldown produces an error', function () { + beforeEach(function (ctx) { + ctx.CooldownManager.isProjectOnCooldown = sinon + .stub() + .callsArgWith(1, new Error('woops')) + ctx.req = { params: { Project_id: 'abc' } } + ctx.res = { sendStatus: sinon.stub() } + return (ctx.next = sinon.stub()) + }) + + it('should call CooldownManager.isProjectOnCooldown', function (ctx) { + ctx.CooldownMiddleware.freezeProject(ctx.req, ctx.res, ctx.next) + ctx.CooldownManager.isProjectOnCooldown.callCount.should.equal(1) + return ctx.CooldownManager.isProjectOnCooldown + .calledWith('abc') + .should.equal(true) + }) + + it('call next with an error', function (ctx) { + ctx.CooldownMiddleware.freezeProject(ctx.req, ctx.res, ctx.next) + ctx.next.callCount.should.equal(1) + return expect(ctx.next.lastCall.args[0]).to.be.instanceof(Error) + }) + }) + + describe('when projectId is not part of route', function () { + beforeEach(function (ctx) { + ctx.CooldownManager.isProjectOnCooldown = sinon + .stub() + .callsArgWith(1, null, true) + ctx.req = { params: { lol: 'abc' } } + ctx.res = { sendStatus: sinon.stub() } + return (ctx.next = sinon.stub()) + }) + + it('call next with an error', function (ctx) { + ctx.CooldownMiddleware.freezeProject(ctx.req, ctx.res, ctx.next) + ctx.next.callCount.should.equal(1) + return expect(ctx.next.lastCall.args[0]).to.be.instanceof(Error) + }) + + it('should not call CooldownManager.isProjectOnCooldown', function (ctx) { + ctx.CooldownMiddleware.freezeProject(ctx.req, ctx.res, ctx.next) + return ctx.CooldownManager.isProjectOnCooldown.callCount.should.equal(0) + }) + }) + }) +}) diff --git a/services/web/test/unit/src/Cooldown/CooldownMiddlewareTests.mjs b/services/web/test/unit/src/Cooldown/CooldownMiddlewareTests.mjs deleted file mode 100644 index 22d05fba56..0000000000 --- a/services/web/test/unit/src/Cooldown/CooldownMiddlewareTests.mjs +++ /dev/null @@ -1,134 +0,0 @@ -/* eslint-disable - max-len, - no-return-assign, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -import esmock from 'esmock' -import sinon from 'sinon' -import { expect } from 'chai' -const modulePath = new URL( - '../../../../app/src/Features/Cooldown/CooldownMiddleware.mjs', - import.meta.url -).pathname - -describe('CooldownMiddleware', function () { - beforeEach(async function () { - this.CooldownManager = { isProjectOnCooldown: sinon.stub() } - return (this.CooldownMiddleware = await esmock.strict(modulePath, { - '../../../../app/src/Features/Cooldown/CooldownManager.js': - this.CooldownManager, - })) - }) - - describe('freezeProject', function () { - describe('when project is on cooldown', function () { - beforeEach(function () { - this.CooldownManager.isProjectOnCooldown = sinon - .stub() - .callsArgWith(1, null, true) - this.req = { params: { Project_id: 'abc' } } - this.res = { sendStatus: sinon.stub() } - return (this.next = sinon.stub()) - }) - - it('should call CooldownManager.isProjectOnCooldown', function () { - this.CooldownMiddleware.freezeProject(this.req, this.res, this.next) - this.CooldownManager.isProjectOnCooldown.callCount.should.equal(1) - return this.CooldownManager.isProjectOnCooldown - .calledWith('abc') - .should.equal(true) - }) - - it('should not produce an error', function () { - this.CooldownMiddleware.freezeProject(this.req, this.res, this.next) - return this.next.callCount.should.equal(0) - }) - - it('should send a 429 status', function () { - this.CooldownMiddleware.freezeProject(this.req, this.res, this.next) - this.res.sendStatus.callCount.should.equal(1) - return this.res.sendStatus.calledWith(429).should.equal(true) - }) - }) - - describe('when project is not on cooldown', function () { - beforeEach(function () { - this.CooldownManager.isProjectOnCooldown = sinon - .stub() - .callsArgWith(1, null, false) - this.req = { params: { Project_id: 'abc' } } - this.res = { sendStatus: sinon.stub() } - return (this.next = sinon.stub()) - }) - - it('should call CooldownManager.isProjectOnCooldown', function () { - this.CooldownMiddleware.freezeProject(this.req, this.res, this.next) - this.CooldownManager.isProjectOnCooldown.callCount.should.equal(1) - return this.CooldownManager.isProjectOnCooldown - .calledWith('abc') - .should.equal(true) - }) - - it('call next with no arguments', function () { - this.CooldownMiddleware.freezeProject(this.req, this.res, this.next) - this.next.callCount.should.equal(1) - return expect(this.next.lastCall.args.length).to.equal(0) - }) - }) - - describe('when isProjectOnCooldown produces an error', function () { - beforeEach(function () { - this.CooldownManager.isProjectOnCooldown = sinon - .stub() - .callsArgWith(1, new Error('woops')) - this.req = { params: { Project_id: 'abc' } } - this.res = { sendStatus: sinon.stub() } - return (this.next = sinon.stub()) - }) - - it('should call CooldownManager.isProjectOnCooldown', function () { - this.CooldownMiddleware.freezeProject(this.req, this.res, this.next) - this.CooldownManager.isProjectOnCooldown.callCount.should.equal(1) - return this.CooldownManager.isProjectOnCooldown - .calledWith('abc') - .should.equal(true) - }) - - it('call next with an error', function () { - this.CooldownMiddleware.freezeProject(this.req, this.res, this.next) - this.next.callCount.should.equal(1) - return expect(this.next.lastCall.args[0]).to.be.instanceof(Error) - }) - }) - - describe('when projectId is not part of route', function () { - beforeEach(function () { - this.CooldownManager.isProjectOnCooldown = sinon - .stub() - .callsArgWith(1, null, true) - this.req = { params: { lol: 'abc' } } - this.res = { sendStatus: sinon.stub() } - return (this.next = sinon.stub()) - }) - - it('call next with an error', function () { - this.CooldownMiddleware.freezeProject(this.req, this.res, this.next) - this.next.callCount.should.equal(1) - return expect(this.next.lastCall.args[0]).to.be.instanceof(Error) - }) - - it('should not call CooldownManager.isProjectOnCooldown', function () { - this.CooldownMiddleware.freezeProject(this.req, this.res, this.next) - return this.CooldownManager.isProjectOnCooldown.callCount.should.equal( - 0 - ) - }) - }) - }) -}) diff --git a/services/web/test/unit/src/DocumentUpdater/DocumentUpdaterController.test.mjs b/services/web/test/unit/src/DocumentUpdater/DocumentUpdaterController.test.mjs new file mode 100644 index 0000000000..5a60903552 --- /dev/null +++ b/services/web/test/unit/src/DocumentUpdater/DocumentUpdaterController.test.mjs @@ -0,0 +1,102 @@ +import { expect, vi } from 'vitest' +import sinon from 'sinon' +import MockResponse from '../helpers/MockResponse.js' + +const MODULE_PATH = + '../../../../app/src/Features/DocumentUpdater/DocumentUpdaterController.mjs' + +describe('DocumentUpdaterController', function () { + beforeEach(async function (ctx) { + ctx.DocumentUpdaterHandler = { + promises: { + getDocument: sinon.stub(), + }, + } + ctx.ProjectLocator = { + promises: { + findElement: sinon.stub(), + }, + } + + vi.doMock('@overleaf/settings', () => ({ + default: ctx.settings, + })) + + vi.doMock('../../../../app/src/Features/Project/ProjectLocator.js', () => ({ + default: ctx.ProjectLocator, + })) + + vi.doMock( + '../../../../app/src/Features/DocumentUpdater/DocumentUpdaterHandler.js', + () => ({ + default: ctx.DocumentUpdaterHandler, + }) + ) + + ctx.controller = (await import(MODULE_PATH)).default + ctx.projectId = '2k3j1lk3j21lk3j' + ctx.fileId = '12321kklj1lk3jk12' + ctx.req = { + params: { + Project_id: ctx.projectId, + Doc_id: ctx.docId, + }, + get(key) { + return undefined + }, + } + ctx.lines = ['test', '', 'testing'] + ctx.res = new MockResponse() + ctx.next = sinon.stub() + ctx.doc = { name: 'myfile.tex' } + }) + + describe('getDoc', function () { + beforeEach(function (ctx) { + ctx.DocumentUpdaterHandler.promises.getDocument.resolves({ + lines: ctx.lines, + }) + ctx.ProjectLocator.promises.findElement.resolves({ + element: ctx.doc, + }) + ctx.res = new MockResponse() + }) + + it('should call the document updater handler with the project_id and doc_id', async function (ctx) { + await ctx.controller.getDoc(ctx.req, ctx.res, ctx.next) + expect( + ctx.DocumentUpdaterHandler.promises.getDocument + ).to.have.been.calledOnceWith( + ctx.req.params.Project_id, + ctx.req.params.Doc_id, + -1 + ) + }) + + it('should return the content', async function (ctx) { + await ctx.controller.getDoc(ctx.req, ctx.res) + expect(ctx.next).to.not.have.been.called + expect(ctx.res.statusCode).to.equal(200) + expect(ctx.res.body).to.equal('test\n\ntesting') + }) + + it('should find the doc in the project', async function (ctx) { + await ctx.controller.getDoc(ctx.req, ctx.res) + expect( + ctx.ProjectLocator.promises.findElement + ).to.have.been.calledOnceWith({ + project_id: ctx.projectId, + element_id: ctx.docId, + type: 'doc', + }) + }) + + it('should set the Content-Disposition header', async function (ctx) { + await ctx.controller.getDoc(ctx.req, ctx.res) + expect(ctx.res.setContentDisposition).to.have.been.calledWith( + 'attachment', + { filename: ctx.doc.name } + ) + }) + }) +}) diff --git a/services/web/test/unit/src/DocumentUpdater/DocumentUpdaterControllerTests.mjs b/services/web/test/unit/src/DocumentUpdater/DocumentUpdaterControllerTests.mjs deleted file mode 100644 index 6a783d452e..0000000000 --- a/services/web/test/unit/src/DocumentUpdater/DocumentUpdaterControllerTests.mjs +++ /dev/null @@ -1,93 +0,0 @@ -import sinon from 'sinon' -import { expect } from 'chai' -import esmock from 'esmock' -import MockResponse from '../helpers/MockResponse.js' - -const MODULE_PATH = - '../../../../app/src/Features/DocumentUpdater/DocumentUpdaterController.mjs' - -describe('DocumentUpdaterController', function () { - beforeEach(async function () { - this.DocumentUpdaterHandler = { - promises: { - getDocument: sinon.stub(), - }, - } - this.ProjectLocator = { - promises: { - findElement: sinon.stub(), - }, - } - this.controller = await esmock.strict(MODULE_PATH, { - '@overleaf/settings': this.settings, - '../../../../app/src/Features/Project/ProjectLocator.js': - this.ProjectLocator, - '../../../../app/src/Features/DocumentUpdater/DocumentUpdaterHandler.js': - this.DocumentUpdaterHandler, - }) - this.projectId = '2k3j1lk3j21lk3j' - this.fileId = '12321kklj1lk3jk12' - this.req = { - params: { - Project_id: this.projectId, - Doc_id: this.docId, - }, - get(key) { - return undefined - }, - } - this.lines = ['test', '', 'testing'] - this.res = new MockResponse() - this.next = sinon.stub() - this.doc = { name: 'myfile.tex' } - }) - - describe('getDoc', function () { - beforeEach(function () { - this.DocumentUpdaterHandler.promises.getDocument.resolves({ - lines: this.lines, - }) - this.ProjectLocator.promises.findElement.resolves({ - element: this.doc, - }) - this.res = new MockResponse() - }) - - it('should call the document updater handler with the project_id and doc_id', async function () { - await this.controller.getDoc(this.req, this.res, this.next) - expect( - this.DocumentUpdaterHandler.promises.getDocument - ).to.have.been.calledOnceWith( - this.req.params.Project_id, - this.req.params.Doc_id, - -1 - ) - }) - - it('should return the content', async function () { - await this.controller.getDoc(this.req, this.res) - expect(this.next).to.not.have.been.called - expect(this.res.statusCode).to.equal(200) - expect(this.res.body).to.equal('test\n\ntesting') - }) - - it('should find the doc in the project', async function () { - await this.controller.getDoc(this.req, this.res) - expect( - this.ProjectLocator.promises.findElement - ).to.have.been.calledOnceWith({ - project_id: this.projectId, - element_id: this.docId, - type: 'doc', - }) - }) - - it('should set the Content-Disposition header', async function () { - await this.controller.getDoc(this.req, this.res) - expect(this.res.setContentDisposition).to.have.been.calledWith( - 'attachment', - { filename: this.doc.name } - ) - }) - }) -}) diff --git a/services/web/test/unit/src/Documents/DocumentController.test.mjs b/services/web/test/unit/src/Documents/DocumentController.test.mjs new file mode 100644 index 0000000000..06c971be91 --- /dev/null +++ b/services/web/test/unit/src/Documents/DocumentController.test.mjs @@ -0,0 +1,234 @@ +import { vi } from 'vitest' +import sinon from 'sinon' +import MockRequest from '../helpers/MockRequest.js' +import MockResponse from '../helpers/MockResponse.js' +import Errors from '../../../../app/src/Features/Errors/Errors.js' + +const MODULE_PATH = + '../../../../app/src/Features/Documents/DocumentController.mjs' + +describe('DocumentController', function () { + beforeEach(async function (ctx) { + ctx.res = new MockResponse() + ctx.req = new MockRequest() + ctx.next = sinon.stub() + ctx.doc = { _id: 'doc-id-123' } + ctx.doc_lines = ['one', 'two', 'three'] + ctx.version = 42 + ctx.ranges = { + comments: [ + { + id: 'comment1', + op: { + c: 'foo', + p: 123, + t: 'comment1', + }, + }, + { + id: 'comment2', + op: { + c: 'bar', + p: 456, + t: 'comment2', + }, + }, + ], + } + ctx.pathname = '/a/b/c/file.tex' + ctx.lastUpdatedAt = new Date().getTime() + ctx.lastUpdatedBy = 'fake-last-updater-id' + ctx.rev = 5 + ctx.project = { + _id: 'project-id-123', + overleaf: { + history: { + id: 1234, + display: true, + }, + }, + } + ctx.resolvedThreadIds = [ + 'comment2', + 'comment4', // Comment in project but not in doc + ] + + ctx.ProjectGetter = { + promises: { + getProject: sinon.stub().resolves(ctx.project), + }, + } + ctx.ProjectLocator = { + promises: { + findElement: sinon + .stub() + .resolves({ element: ctx.doc, path: { fileSystem: ctx.pathname } }), + }, + } + ctx.ProjectEntityHandler = { + promises: { + getDoc: sinon.stub().resolves({ + lines: ctx.doc_lines, + rev: ctx.rev, + version: ctx.version, + ranges: ctx.ranges, + }), + }, + } + ctx.ProjectEntityUpdateHandler = { + promises: { + updateDocLines: sinon.stub().resolves(), + }, + } + + ctx.ChatApiHandler = { + promises: { + getResolvedThreadIds: sinon.stub().resolves(ctx.resolvedThreadIds), + }, + } + + vi.doMock('../../../../app/src/Features/Project/ProjectGetter', () => ({ + default: ctx.ProjectGetter, + })) + + vi.doMock('../../../../app/src/Features/Project/ProjectLocator', () => ({ + default: ctx.ProjectLocator, + })) + + vi.doMock( + '../../../../app/src/Features/Project/ProjectEntityHandler', + () => ({ + default: ctx.ProjectEntityHandler, + }) + ) + + vi.doMock( + '../../../../app/src/Features/Project/ProjectEntityUpdateHandler', + () => ({ + default: ctx.ProjectEntityUpdateHandler, + }) + ) + + vi.doMock('../../../../app/src/Features/Chat/ChatApiHandler', () => ({ + default: ctx.ChatApiHandler, + })) + + ctx.DocumentController = (await import(MODULE_PATH)).default + }) + + describe('getDocument', function () { + beforeEach(function (ctx) { + ctx.req.params = { + Project_id: ctx.project._id, + doc_id: ctx.doc._id, + } + }) + + describe('when project exists with project history enabled', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.res.callback = err => { + resolve(err) + } + ctx.DocumentController.getDocument(ctx.req, ctx.res, ctx.next) + }) + }) + + it('should return the history id and display setting to the client as JSON', function (ctx) { + ctx.res.type.should.equal('application/json') + JSON.parse(ctx.res.body).should.deep.equal({ + lines: ctx.doc_lines, + version: ctx.version, + ranges: ctx.ranges, + pathname: ctx.pathname, + projectHistoryId: ctx.project.overleaf.history.id, + projectHistoryType: 'project-history', + resolvedCommentIds: ['comment2'], + historyRangesSupport: false, + otMigrationStage: 0, + }) + }) + }) + + describe('when the project does not exist', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.ProjectGetter.promises.getProject.resolves(null) + ctx.res.callback = err => { + resolve(err) + } + ctx.DocumentController.getDocument(ctx.req, ctx.res, ctx.next) + }) + }) + + it('returns a 404', function (ctx) { + ctx.res.statusCode.should.equal(404) + }) + }) + }) + + describe('setDocument', function () { + beforeEach(function (ctx) { + ctx.req.params = { + Project_id: ctx.project._id, + doc_id: ctx.doc._id, + } + }) + + describe('when the document exists', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.req.body = { + lines: ctx.doc_lines, + version: ctx.version, + ranges: ctx.ranges, + lastUpdatedAt: ctx.lastUpdatedAt, + lastUpdatedBy: ctx.lastUpdatedBy, + } + ctx.res.callback = err => { + resolve(err) + } + ctx.DocumentController.setDocument(ctx.req, ctx.res, ctx.next) + }) + }) + + it('should update the document in Mongo', function (ctx) { + sinon.assert.calledWith( + ctx.ProjectEntityUpdateHandler.promises.updateDocLines, + ctx.project._id, + ctx.doc._id, + ctx.doc_lines, + ctx.version, + ctx.ranges, + ctx.lastUpdatedAt, + ctx.lastUpdatedBy + ) + }) + + it('should return a successful response', function (ctx) { + ctx.res.success.should.equal(true) + }) + }) + + describe("when the document doesn't exist", function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.ProjectEntityUpdateHandler.promises.updateDocLines.rejects( + new Errors.NotFoundError('document does not exist') + ) + ctx.req.body = { lines: ctx.doc_lines } + ctx.next.callsFake(() => { + resolve() + }) + ctx.DocumentController.setDocument(ctx.req, ctx.res, ctx.next) + }) + }) + + it('should call next with the NotFoundError', function (ctx) { + ctx.next + .calledWith(sinon.match.instanceOf(Errors.NotFoundError)) + .should.equal(true) + }) + }) + }) +}) diff --git a/services/web/test/unit/src/Documents/DocumentControllerTests.mjs b/services/web/test/unit/src/Documents/DocumentControllerTests.mjs deleted file mode 100644 index 813e8d65f3..0000000000 --- a/services/web/test/unit/src/Documents/DocumentControllerTests.mjs +++ /dev/null @@ -1,209 +0,0 @@ -import sinon from 'sinon' -import esmock from 'esmock' -import MockRequest from '../helpers/MockRequest.js' -import MockResponse from '../helpers/MockResponse.js' -import Errors from '../../../../app/src/Features/Errors/Errors.js' - -const MODULE_PATH = - '../../../../app/src/Features/Documents/DocumentController.mjs' - -describe('DocumentController', function () { - beforeEach(async function () { - this.res = new MockResponse() - this.req = new MockRequest() - this.next = sinon.stub() - this.doc = { _id: 'doc-id-123' } - this.doc_lines = ['one', 'two', 'three'] - this.version = 42 - this.ranges = { - comments: [ - { - id: 'comment1', - op: { - c: 'foo', - p: 123, - t: 'comment1', - }, - }, - { - id: 'comment2', - op: { - c: 'bar', - p: 456, - t: 'comment2', - }, - }, - ], - } - this.pathname = '/a/b/c/file.tex' - this.lastUpdatedAt = new Date().getTime() - this.lastUpdatedBy = 'fake-last-updater-id' - this.rev = 5 - this.project = { - _id: 'project-id-123', - overleaf: { - history: { - id: 1234, - display: true, - }, - }, - } - this.resolvedThreadIds = [ - 'comment2', - 'comment4', // Comment in project but not in doc - ] - - this.ProjectGetter = { - promises: { - getProject: sinon.stub().resolves(this.project), - }, - } - this.ProjectLocator = { - promises: { - findElement: sinon - .stub() - .resolves({ element: this.doc, path: { fileSystem: this.pathname } }), - }, - } - this.ProjectEntityHandler = { - promises: { - getDoc: sinon.stub().resolves({ - lines: this.doc_lines, - rev: this.rev, - version: this.version, - ranges: this.ranges, - }), - }, - } - this.ProjectEntityUpdateHandler = { - promises: { - updateDocLines: sinon.stub().resolves(), - }, - } - - this.ChatApiHandler = { - promises: { - getResolvedThreadIds: sinon.stub().resolves(this.resolvedThreadIds), - }, - } - - this.DocumentController = await esmock.strict(MODULE_PATH, { - '../../../../app/src/Features/Project/ProjectGetter': this.ProjectGetter, - '../../../../app/src/Features/Project/ProjectLocator': - this.ProjectLocator, - '../../../../app/src/Features/Project/ProjectEntityHandler': - this.ProjectEntityHandler, - '../../../../app/src/Features/Project/ProjectEntityUpdateHandler': - this.ProjectEntityUpdateHandler, - '../../../../app/src/Features/Chat/ChatApiHandler': this.ChatApiHandler, - }) - }) - - describe('getDocument', function () { - beforeEach(function () { - this.req.params = { - Project_id: this.project._id, - doc_id: this.doc._id, - } - }) - - describe('when project exists with project history enabled', function () { - beforeEach(function (done) { - this.res.callback = err => { - done(err) - } - this.DocumentController.getDocument(this.req, this.res, this.next) - }) - - it('should return the history id and display setting to the client as JSON', function () { - this.res.type.should.equal('application/json') - JSON.parse(this.res.body).should.deep.equal({ - lines: this.doc_lines, - version: this.version, - ranges: this.ranges, - pathname: this.pathname, - projectHistoryId: this.project.overleaf.history.id, - projectHistoryType: 'project-history', - resolvedCommentIds: ['comment2'], - historyRangesSupport: false, - otMigrationStage: 0, - }) - }) - }) - - describe('when the project does not exist', function () { - beforeEach(function (done) { - this.ProjectGetter.promises.getProject.resolves(null) - this.res.callback = err => { - done(err) - } - this.DocumentController.getDocument(this.req, this.res, this.next) - }) - - it('returns a 404', function () { - this.res.statusCode.should.equal(404) - }) - }) - }) - - describe('setDocument', function () { - beforeEach(function () { - this.req.params = { - Project_id: this.project._id, - doc_id: this.doc._id, - } - }) - - describe('when the document exists', function () { - beforeEach(function (done) { - this.req.body = { - lines: this.doc_lines, - version: this.version, - ranges: this.ranges, - lastUpdatedAt: this.lastUpdatedAt, - lastUpdatedBy: this.lastUpdatedBy, - } - this.res.callback = err => { - done(err) - } - this.DocumentController.setDocument(this.req, this.res, this.next) - }) - - it('should update the document in Mongo', function () { - sinon.assert.calledWith( - this.ProjectEntityUpdateHandler.promises.updateDocLines, - this.project._id, - this.doc._id, - this.doc_lines, - this.version, - this.ranges, - this.lastUpdatedAt, - this.lastUpdatedBy - ) - }) - - it('should return a successful response', function () { - this.res.success.should.equal(true) - }) - }) - - describe("when the document doesn't exist", function () { - beforeEach(function (done) { - this.ProjectEntityUpdateHandler.promises.updateDocLines.rejects( - new Errors.NotFoundError('document does not exist') - ) - this.req.body = { lines: this.doc_lines } - this.next.callsFake(() => { - done() - }) - this.DocumentController.setDocument(this.req, this.res, this.next) - }) - - it('should call next with the NotFoundError', function () { - this.next - .calledWith(sinon.match.instanceOf(Errors.NotFoundError)) - .should.equal(true) - }) - }) - }) -}) diff --git a/services/web/test/unit/src/Downloads/ProjectDownloadsController.test.mjs b/services/web/test/unit/src/Downloads/ProjectDownloadsController.test.mjs new file mode 100644 index 0000000000..1e339097fa --- /dev/null +++ b/services/web/test/unit/src/Downloads/ProjectDownloadsController.test.mjs @@ -0,0 +1,167 @@ +import { vi } from 'vitest' +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +import sinon from 'sinon' +import MockRequest from '../helpers/MockRequest.js' +import MockResponse from '../helpers/MockResponse.js' +const modulePath = + '../../../../app/src/Features/Downloads/ProjectDownloadsController.mjs' + +describe('ProjectDownloadsController', function () { + beforeEach(async function (ctx) { + ctx.project_id = 'project-id-123' + ctx.req = new MockRequest() + ctx.res = new MockResponse() + ctx.next = sinon.stub() + ctx.DocumentUpdaterHandler = sinon.stub() + + vi.doMock( + '../../../../app/src/Features/Downloads/ProjectZipStreamManager.mjs', + () => ({ + default: (ctx.ProjectZipStreamManager = {}), + }) + ) + + vi.doMock('../../../../app/src/Features/Project/ProjectGetter.js', () => ({ + default: (ctx.ProjectGetter = {}), + })) + + vi.doMock('@overleaf/metrics', () => ({ + default: (ctx.metrics = {}), + })) + + vi.doMock( + '../../../../app/src/Features/DocumentUpdater/DocumentUpdaterHandler.js', + () => ({ + default: ctx.DocumentUpdaterHandler, + }) + ) + + ctx.ProjectDownloadsController = (await import(modulePath)).default + }) + + describe('downloadProject', function () { + beforeEach(function (ctx) { + ctx.stream = { pipe: sinon.stub() } + ctx.ProjectZipStreamManager.createZipStreamForProject = sinon + .stub() + .callsArgWith(1, null, ctx.stream) + ctx.req.params = { Project_id: ctx.project_id } + ctx.project_name = 'project name with accênts' + ctx.ProjectGetter.getProject = sinon + .stub() + .callsArgWith(2, null, { name: ctx.project_name }) + ctx.DocumentUpdaterHandler.flushProjectToMongo = sinon + .stub() + .callsArgWith(1) + ctx.metrics.inc = sinon.stub() + return ctx.ProjectDownloadsController.downloadProject( + ctx.req, + ctx.res, + ctx.next + ) + }) + + it('should create a zip from the project', function (ctx) { + return ctx.ProjectZipStreamManager.createZipStreamForProject + .calledWith(ctx.project_id) + .should.equal(true) + }) + + it('should stream the zip to the request', function (ctx) { + return ctx.stream.pipe.calledWith(ctx.res).should.equal(true) + }) + + it('should set the correct content type on the request', function (ctx) { + return ctx.res.contentType + .calledWith('application/zip') + .should.equal(true) + }) + + it('should flush the project to mongo', function (ctx) { + return ctx.DocumentUpdaterHandler.flushProjectToMongo + .calledWith(ctx.project_id) + .should.equal(true) + }) + + it("should look up the project's name", function (ctx) { + return ctx.ProjectGetter.getProject + .calledWith(ctx.project_id, { name: true }) + .should.equal(true) + }) + + it('should name the downloaded file after the project', function (ctx) { + ctx.res.headers.should.deep.equal({ + 'Content-Disposition': `attachment; filename="${ctx.project_name}.zip"`, + 'Content-Type': 'application/zip', + 'X-Content-Type-Options': 'nosniff', + }) + }) + + it('should record the action via Metrics', function (ctx) { + return ctx.metrics.inc.calledWith('zip-downloads').should.equal(true) + }) + }) + + describe('downloadMultipleProjects', function () { + beforeEach(function (ctx) { + ctx.stream = { pipe: sinon.stub() } + ctx.ProjectZipStreamManager.createZipStreamForMultipleProjects = sinon + .stub() + .callsArgWith(1, null, ctx.stream) + ctx.project_ids = ['project-1', 'project-2'] + ctx.req.query = { project_ids: ctx.project_ids.join(',') } + ctx.DocumentUpdaterHandler.flushMultipleProjectsToMongo = sinon + .stub() + .callsArgWith(1) + ctx.metrics.inc = sinon.stub() + return ctx.ProjectDownloadsController.downloadMultipleProjects( + ctx.req, + ctx.res, + ctx.next + ) + }) + + it('should create a zip from the project', function (ctx) { + return ctx.ProjectZipStreamManager.createZipStreamForMultipleProjects + .calledWith(ctx.project_ids) + .should.equal(true) + }) + + it('should stream the zip to the request', function (ctx) { + return ctx.stream.pipe.calledWith(ctx.res).should.equal(true) + }) + + it('should set the correct content type on the request', function (ctx) { + return ctx.res.contentType + .calledWith('application/zip') + .should.equal(true) + }) + + it('should flush the projects to mongo', function (ctx) { + return ctx.DocumentUpdaterHandler.flushMultipleProjectsToMongo + .calledWith(ctx.project_ids) + .should.equal(true) + }) + + it('should name the downloaded file after the project', function (ctx) { + ctx.res.headers.should.deep.equal({ + 'Content-Disposition': + 'attachment; filename="Overleaf Projects (2 items).zip"', + 'Content-Type': 'application/zip', + 'X-Content-Type-Options': 'nosniff', + }) + }) + + it('should record the action via Metrics', function (ctx) { + return ctx.metrics.inc + .calledWith('zip-downloads-multiple') + .should.equal(true) + }) + }) +}) diff --git a/services/web/test/unit/src/Downloads/ProjectDownloadsControllerTests.mjs b/services/web/test/unit/src/Downloads/ProjectDownloadsControllerTests.mjs deleted file mode 100644 index db9cf19df7..0000000000 --- a/services/web/test/unit/src/Downloads/ProjectDownloadsControllerTests.mjs +++ /dev/null @@ -1,152 +0,0 @@ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -import sinon from 'sinon' -import esmock from 'esmock' -import MockRequest from '../helpers/MockRequest.js' -import MockResponse from '../helpers/MockResponse.js' -const modulePath = - '../../../../app/src/Features/Downloads/ProjectDownloadsController.mjs' - -describe('ProjectDownloadsController', function () { - beforeEach(async function () { - this.project_id = 'project-id-123' - this.req = new MockRequest() - this.res = new MockResponse() - this.next = sinon.stub() - this.DocumentUpdaterHandler = sinon.stub() - return (this.ProjectDownloadsController = await esmock.strict(modulePath, { - '../../../../app/src/Features/Downloads/ProjectZipStreamManager.mjs': - (this.ProjectZipStreamManager = {}), - '../../../../app/src/Features/Project/ProjectGetter.js': - (this.ProjectGetter = {}), - '@overleaf/metrics': (this.metrics = {}), - '../../../../app/src/Features/DocumentUpdater/DocumentUpdaterHandler.js': - this.DocumentUpdaterHandler, - })) - }) - - describe('downloadProject', function () { - beforeEach(function () { - this.stream = { pipe: sinon.stub() } - this.ProjectZipStreamManager.createZipStreamForProject = sinon - .stub() - .callsArgWith(1, null, this.stream) - this.req.params = { Project_id: this.project_id } - this.project_name = 'project name with accênts' - this.ProjectGetter.getProject = sinon - .stub() - .callsArgWith(2, null, { name: this.project_name }) - this.DocumentUpdaterHandler.flushProjectToMongo = sinon - .stub() - .callsArgWith(1) - this.metrics.inc = sinon.stub() - return this.ProjectDownloadsController.downloadProject( - this.req, - this.res, - this.next - ) - }) - - it('should create a zip from the project', function () { - return this.ProjectZipStreamManager.createZipStreamForProject - .calledWith(this.project_id) - .should.equal(true) - }) - - it('should stream the zip to the request', function () { - return this.stream.pipe.calledWith(this.res).should.equal(true) - }) - - it('should set the correct content type on the request', function () { - return this.res.contentType - .calledWith('application/zip') - .should.equal(true) - }) - - it('should flush the project to mongo', function () { - return this.DocumentUpdaterHandler.flushProjectToMongo - .calledWith(this.project_id) - .should.equal(true) - }) - - it("should look up the project's name", function () { - return this.ProjectGetter.getProject - .calledWith(this.project_id, { name: true }) - .should.equal(true) - }) - - it('should name the downloaded file after the project', function () { - this.res.headers.should.deep.equal({ - 'Content-Disposition': `attachment; filename="${this.project_name}.zip"`, - 'Content-Type': 'application/zip', - 'X-Content-Type-Options': 'nosniff', - }) - }) - - it('should record the action via Metrics', function () { - return this.metrics.inc.calledWith('zip-downloads').should.equal(true) - }) - }) - - describe('downloadMultipleProjects', function () { - beforeEach(function () { - this.stream = { pipe: sinon.stub() } - this.ProjectZipStreamManager.createZipStreamForMultipleProjects = sinon - .stub() - .callsArgWith(1, null, this.stream) - this.project_ids = ['project-1', 'project-2'] - this.req.query = { project_ids: this.project_ids.join(',') } - this.DocumentUpdaterHandler.flushMultipleProjectsToMongo = sinon - .stub() - .callsArgWith(1) - this.metrics.inc = sinon.stub() - return this.ProjectDownloadsController.downloadMultipleProjects( - this.req, - this.res, - this.next - ) - }) - - it('should create a zip from the project', function () { - return this.ProjectZipStreamManager.createZipStreamForMultipleProjects - .calledWith(this.project_ids) - .should.equal(true) - }) - - it('should stream the zip to the request', function () { - return this.stream.pipe.calledWith(this.res).should.equal(true) - }) - - it('should set the correct content type on the request', function () { - return this.res.contentType - .calledWith('application/zip') - .should.equal(true) - }) - - it('should flush the projects to mongo', function () { - return this.DocumentUpdaterHandler.flushMultipleProjectsToMongo - .calledWith(this.project_ids) - .should.equal(true) - }) - - it('should name the downloaded file after the project', function () { - this.res.headers.should.deep.equal({ - 'Content-Disposition': - 'attachment; filename="Overleaf Projects (2 items).zip"', - 'Content-Type': 'application/zip', - 'X-Content-Type-Options': 'nosniff', - }) - }) - - it('should record the action via Metrics', function () { - return this.metrics.inc - .calledWith('zip-downloads-multiple') - .should.equal(true) - }) - }) -}) diff --git a/services/web/test/unit/src/Downloads/ProjectZipStreamManager.test.mjs b/services/web/test/unit/src/Downloads/ProjectZipStreamManager.test.mjs new file mode 100644 index 0000000000..df7486e11d --- /dev/null +++ b/services/web/test/unit/src/Downloads/ProjectZipStreamManager.test.mjs @@ -0,0 +1,503 @@ +import { vi } from 'vitest' +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS201: Simplify complex destructure assignments + * DS205: Consider reworking code to avoid use of IIFEs + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +import sinon from 'sinon' +import { EventEmitter } from 'events' +const modulePath = + '../../../../app/src/Features/Downloads/ProjectZipStreamManager.mjs' + +describe('ProjectZipStreamManager', function () { + beforeEach(async function (ctx) { + ctx.project_id = 'project-id-123' + ctx.callback = sinon.stub() + ctx.archive = { + on() {}, + append: sinon.stub(), + } + ctx.logger = { + error: sinon.stub(), + info: sinon.stub(), + debug: sinon.stub(), + } + + vi.doMock('archiver', () => ({ + default: (ctx.archiver = sinon.stub().returns(ctx.archive)), + })) + + vi.doMock('@overleaf/logger', () => ({ + default: ctx.logger, + })) + + vi.doMock( + '../../../../app/src/Features/Project/ProjectEntityHandler', + () => ({ + default: (ctx.ProjectEntityHandler = {}), + }) + ) + + vi.doMock('../../../../app/src/Features/History/HistoryManager.js', () => ({ + default: (ctx.HistoryManager = {}), + })) + + vi.doMock('../../../../app/src/Features/Project/ProjectGetter', () => ({ + default: (ctx.ProjectGetter = {}), + })) + + vi.doMock( + '../../../../app/src/Features/FileStore/FileStoreHandler', + () => ({ + default: (ctx.FileStoreHandler = {}), + }) + ) + + vi.doMock('../../../../app/src/infrastructure/Features', () => ({ + default: (ctx.Features = { + hasFeature: sinon + .stub() + .withArgs('project-history-blobs') + .returns(true), + }), + })) + + ctx.ProjectZipStreamManager = (await import(modulePath)).default + }) + + describe('createZipStreamForMultipleProjects', function () { + describe('successfully', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.project_ids = ['project-1', 'project-2'] + ctx.zip_streams = { + 'project-1': new EventEmitter(), + 'project-2': new EventEmitter(), + } + + ctx.project_names = { + 'project-1': 'Project One Name', + 'project-2': 'Project Two Name', + } + + ctx.ProjectZipStreamManager.createZipStreamForProject = ( + projectId, + callback + ) => { + callback(null, ctx.zip_streams[projectId]) + setTimeout(() => { + return ctx.zip_streams[projectId].emit('end') + }) + return 0 + } + sinon.spy(ctx.ProjectZipStreamManager, 'createZipStreamForProject') + + ctx.ProjectGetter.getProject = (projectId, fields, callback) => { + return callback(null, { name: ctx.project_names[projectId] }) + } + sinon.spy(ctx.ProjectGetter, 'getProject') + + ctx.ProjectZipStreamManager.createZipStreamForMultipleProjects( + ctx.project_ids, + (...args) => { + return ctx.callback(...Array.from(args || [])) + } + ) + + return (ctx.archive.finalize = () => resolve()) + }) + }) + + it('should create a zip archive', function (ctx) { + return ctx.archiver.calledWith('zip').should.equal(true) + }) + + it('should return a stream before any processing is done', function (ctx) { + ctx.callback + .calledWith(sinon.match.falsy, ctx.archive) + .should.equal(true) + return ctx.callback + .calledBefore(ctx.ProjectZipStreamManager.createZipStreamForProject) + .should.equal(true) + }) + + it('should get a zip stream for all of the projects', function (ctx) { + return Array.from(ctx.project_ids).map(projectId => + ctx.ProjectZipStreamManager.createZipStreamForProject + .calledWith(projectId) + .should.equal(true) + ) + }) + + it('should get the names of each project', function (ctx) { + return Array.from(ctx.project_ids).map(projectId => + ctx.ProjectGetter.getProject + .calledWith(projectId, { name: true }) + .should.equal(true) + ) + }) + + it('should add all of the projects to the zip', function (ctx) { + return Array.from(ctx.project_ids).map(projectId => + ctx.archive.append + .calledWith(ctx.zip_streams[projectId], { + name: ctx.project_names[projectId] + '.zip', + }) + .should.equal(true) + ) + }) + }) + + describe('with a project not existing', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.project_ids = ['project-1', 'wrong-id'] + ctx.project_names = { + 'project-1': 'Project One Name', + } + ctx.zip_streams = { + 'project-1': new EventEmitter(), + } + + ctx.ProjectZipStreamManager.createZipStreamForProject = ( + projectId, + callback + ) => { + callback(null, ctx.zip_streams[projectId]) + setTimeout(() => { + ctx.zip_streams[projectId].emit('end') + }) + } + sinon.spy(ctx.ProjectZipStreamManager, 'createZipStreamForProject') + + ctx.ProjectGetter.getProject = (projectId, fields, callback) => { + const name = ctx.project_names[projectId] + callback(null, name ? { name } : undefined) + } + sinon.spy(ctx.ProjectGetter, 'getProject') + + ctx.ProjectZipStreamManager.createZipStreamForMultipleProjects( + ctx.project_ids, + ctx.callback + ) + + ctx.archive.finalize = () => resolve() + }) + }) + + it('should create a zip archive', function (ctx) { + ctx.archiver.calledWith('zip').should.equal(true) + }) + + it('should return a stream before any processing is done', function (ctx) { + ctx.callback + .calledWith(sinon.match.falsy, ctx.archive) + .should.equal(true) + ctx.callback + .calledBefore(ctx.ProjectZipStreamManager.createZipStreamForProject) + .should.equal(true) + }) + + it('should get the names of each project', function (ctx) { + ctx.project_ids.map(projectId => + ctx.ProjectGetter.getProject + .calledWith(projectId, { name: true }) + .should.equal(true) + ) + }) + + it('should get a zip stream only for the existing project', function (ctx) { + ctx.ProjectZipStreamManager.createZipStreamForProject + .calledWith('project-1') + .should.equal(true) + ctx.ProjectZipStreamManager.createZipStreamForProject + .calledWith('wrong-id') + .should.equal(false) + }) + + it('should only add the existing project to the zip', function (ctx) { + sinon.assert.calledOnce(ctx.archive.append) + ctx.archive.append + .calledWith(ctx.zip_streams['project-1'], { + name: ctx.project_names['project-1'] + '.zip', + }) + .should.equal(true) + }) + }) + }) + + describe('createZipStreamForProject', function () { + describe('successfully', function () { + beforeEach(function (ctx) { + ctx.ProjectZipStreamManager.addAllDocsToArchive = sinon + .stub() + .callsArg(2) + ctx.ProjectZipStreamManager.addAllFilesToArchive = sinon + .stub() + .callsArg(2) + ctx.archive.finalize = sinon.stub() + return ctx.ProjectZipStreamManager.createZipStreamForProject( + ctx.project_id, + ctx.callback + ) + }) + + it('should create a zip archive', function (ctx) { + return ctx.archiver.calledWith('zip').should.equal(true) + }) + + it('should return a stream before any processing is done', function (ctx) { + ctx.callback + .calledWith(sinon.match.falsy, ctx.archive) + .should.equal(true) + ctx.callback + .calledBefore(ctx.ProjectZipStreamManager.addAllDocsToArchive) + .should.equal(true) + return ctx.callback + .calledBefore(ctx.ProjectZipStreamManager.addAllFilesToArchive) + .should.equal(true) + }) + + it('should add all of the project docs to the zip', function (ctx) { + return ctx.ProjectZipStreamManager.addAllDocsToArchive + .calledWith(ctx.project_id, ctx.archive) + .should.equal(true) + }) + + it('should add all of the project files to the zip', function (ctx) { + return ctx.ProjectZipStreamManager.addAllFilesToArchive + .calledWith(ctx.project_id, ctx.archive) + .should.equal(true) + }) + + it('should finalise the stream', function (ctx) { + return ctx.archive.finalize.called.should.equal(true) + }) + }) + + describe('with an error adding docs', function () { + beforeEach(function (ctx) { + ctx.ProjectZipStreamManager.addAllDocsToArchive = sinon + .stub() + .callsArgWith(2, new Error('something went wrong')) + ctx.ProjectZipStreamManager.addAllFilesToArchive = sinon + .stub() + .callsArg(2) + ctx.archive.finalize = sinon.stub() + ctx.ProjectZipStreamManager.createZipStreamForProject( + ctx.project_id, + ctx.callback + ) + }) + + it('should log out an error', function (ctx) { + return ctx.logger.error + .calledWith(sinon.match.any, 'error adding docs to zip stream') + .should.equal(true) + }) + + it('should continue with the process', function (ctx) { + ctx.ProjectZipStreamManager.addAllDocsToArchive.called.should.equal( + true + ) + ctx.ProjectZipStreamManager.addAllFilesToArchive.called.should.equal( + true + ) + return ctx.archive.finalize.called.should.equal(true) + }) + }) + + describe('with an error adding files', function () { + beforeEach(function (ctx) { + ctx.ProjectZipStreamManager.addAllDocsToArchive = sinon + .stub() + .callsArg(2) + ctx.ProjectZipStreamManager.addAllFilesToArchive = sinon + .stub() + .callsArgWith(2, new Error('something went wrong')) + ctx.archive.finalize = sinon.stub() + return ctx.ProjectZipStreamManager.createZipStreamForProject( + ctx.project_id, + ctx.callback + ) + }) + + it('should log out an error', function (ctx) { + return ctx.logger.error + .calledWith(sinon.match.any, 'error adding files to zip stream') + .should.equal(true) + }) + + it('should continue with the process', function (ctx) { + ctx.ProjectZipStreamManager.addAllDocsToArchive.called.should.equal( + true + ) + ctx.ProjectZipStreamManager.addAllFilesToArchive.called.should.equal( + true + ) + return ctx.archive.finalize.called.should.equal(true) + }) + }) + }) + + describe('addAllDocsToArchive', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.docs = { + '/main.tex': { + lines: [ + '\\documentclass{article}', + '\\begin{document}', + 'Hello world', + '\\end{document}', + ], + }, + '/chapters/chapter1.tex': { + lines: ['chapter1', 'content'], + }, + } + ctx.ProjectEntityHandler.getAllDocs = sinon + .stub() + .callsArgWith(1, null, ctx.docs) + return ctx.ProjectZipStreamManager.addAllDocsToArchive( + ctx.project_id, + ctx.archive, + error => { + ctx.callback(error) + return resolve() + } + ) + }) + }) + + it('should get the docs for the project', function (ctx) { + return ctx.ProjectEntityHandler.getAllDocs + .calledWith(ctx.project_id) + .should.equal(true) + }) + + it('should add each doc to the archive', function (ctx) { + return (() => { + const result = [] + for (let path in ctx.docs) { + const doc = ctx.docs[path] + path = path.slice(1) // remove "/" + result.push( + ctx.archive.append + .calledWith(doc.lines.join('\n'), { name: path }) + .should.equal(true) + ) + } + return result + })() + }) + }) + + describe('addAllFilesToArchive', function () { + beforeEach(function (ctx) { + ctx.files = { + '/image.png': { + _id: 'file-id-1', + hash: 'abc', + }, + '/folder/picture.png': { + _id: 'file-id-2', + hash: 'def', + }, + } + ctx.streams = { + 'file-id-1': new EventEmitter(), + 'file-id-2': new EventEmitter(), + } + ctx.ProjectEntityHandler.getAllFiles = sinon + .stub() + .callsArgWith(1, null, ctx.files) + }) + describe('with project-history-blobs feature enabled', function () { + beforeEach(function (ctx) { + ctx.HistoryManager.requestBlobWithFallback = ( + projectId, + hash, + fileId, + callback + ) => { + return callback(null, { stream: ctx.streams[fileId] }) + } + sinon.spy(ctx.HistoryManager, 'requestBlobWithFallback') + ctx.ProjectZipStreamManager.addAllFilesToArchive( + ctx.project_id, + ctx.archive, + ctx.callback + ) + for (const path in ctx.streams) { + const stream = ctx.streams[path] + stream.emit('end') + } + }) + + it('should get the files for the project', function (ctx) { + return ctx.ProjectEntityHandler.getAllFiles + .calledWith(ctx.project_id) + .should.equal(true) + }) + + it('should get a stream for each file', function (ctx) { + for (const path in ctx.files) { + const file = ctx.files[path] + + ctx.HistoryManager.requestBlobWithFallback + .calledWith(ctx.project_id, file.hash, file._id) + .should.equal(true) + } + }) + + it('should add each file to the archive', function (ctx) { + for (let path in ctx.files) { + const file = ctx.files[path] + path = path.slice(1) // remove "/" + ctx.archive.append + .calledWith(ctx.streams[file._id], { name: path }) + .should.equal(true) + } + }) + }) + + describe('with project-history-blobs feature disabled', function () { + beforeEach(function (ctx) { + ctx.FileStoreHandler.getFileStream = ( + projectId, + fileId, + query, + callback + ) => callback(null, ctx.streams[fileId]) + + sinon.spy(ctx.FileStoreHandler, 'getFileStream') + ctx.Features.hasFeature.withArgs('project-history-blobs').returns(false) + ctx.ProjectZipStreamManager.addAllFilesToArchive( + ctx.project_id, + ctx.archive, + ctx.callback + ) + for (const path in ctx.streams) { + const stream = ctx.streams[path] + stream.emit('end') + } + }) + + it('should get a stream for each file', function (ctx) { + for (const path in ctx.files) { + const file = ctx.files[path] + + ctx.FileStoreHandler.getFileStream + .calledWith(ctx.project_id, file._id) + .should.equal(true) + } + }) + }) + }) +}) diff --git a/services/web/test/unit/src/Downloads/ProjectZipStreamManagerTests.mjs b/services/web/test/unit/src/Downloads/ProjectZipStreamManagerTests.mjs deleted file mode 100644 index f86b99bd96..0000000000 --- a/services/web/test/unit/src/Downloads/ProjectZipStreamManagerTests.mjs +++ /dev/null @@ -1,477 +0,0 @@ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS101: Remove unnecessary use of Array.from - * DS102: Remove unnecessary code created because of implicit returns - * DS201: Simplify complex destructure assignments - * DS205: Consider reworking code to avoid use of IIFEs - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -import sinon from 'sinon' -import esmock from 'esmock' -import { EventEmitter } from 'events' -const modulePath = - '../../../../app/src/Features/Downloads/ProjectZipStreamManager.mjs' - -describe('ProjectZipStreamManager', function () { - beforeEach(async function () { - this.project_id = 'project-id-123' - this.callback = sinon.stub() - this.archive = { - on() {}, - append: sinon.stub(), - } - this.logger = { - error: sinon.stub(), - info: sinon.stub(), - debug: sinon.stub(), - } - - return (this.ProjectZipStreamManager = await esmock.strict(modulePath, { - archiver: (this.archiver = sinon.stub().returns(this.archive)), - '@overleaf/logger': this.logger, - '../../../../app/src/Features/Project/ProjectEntityHandler': - (this.ProjectEntityHandler = {}), - '../../../../app/src/Features/History/HistoryManager.js': - (this.HistoryManager = {}), - '../../../../app/src/Features/Project/ProjectGetter': - (this.ProjectGetter = {}), - '../../../../app/src/Features/FileStore/FileStoreHandler': - (this.FileStoreHandler = {}), - '../../../../app/src/infrastructure/Features': (this.Features = { - hasFeature: sinon - .stub() - .withArgs('project-history-blobs') - .returns(true), - }), - })) - }) - - describe('createZipStreamForMultipleProjects', function () { - describe('successfully', function () { - beforeEach(function (done) { - this.project_ids = ['project-1', 'project-2'] - this.zip_streams = { - 'project-1': new EventEmitter(), - 'project-2': new EventEmitter(), - } - - this.project_names = { - 'project-1': 'Project One Name', - 'project-2': 'Project Two Name', - } - - this.ProjectZipStreamManager.createZipStreamForProject = ( - projectId, - callback - ) => { - callback(null, this.zip_streams[projectId]) - setTimeout(() => { - return this.zip_streams[projectId].emit('end') - }) - return 0 - } - sinon.spy(this.ProjectZipStreamManager, 'createZipStreamForProject') - - this.ProjectGetter.getProject = (projectId, fields, callback) => { - return callback(null, { name: this.project_names[projectId] }) - } - sinon.spy(this.ProjectGetter, 'getProject') - - this.ProjectZipStreamManager.createZipStreamForMultipleProjects( - this.project_ids, - (...args) => { - return this.callback(...Array.from(args || [])) - } - ) - - return (this.archive.finalize = () => done()) - }) - - it('should create a zip archive', function () { - return this.archiver.calledWith('zip').should.equal(true) - }) - - it('should return a stream before any processing is done', function () { - this.callback - .calledWith(sinon.match.falsy, this.archive) - .should.equal(true) - return this.callback - .calledBefore(this.ProjectZipStreamManager.createZipStreamForProject) - .should.equal(true) - }) - - it('should get a zip stream for all of the projects', function () { - return Array.from(this.project_ids).map(projectId => - this.ProjectZipStreamManager.createZipStreamForProject - .calledWith(projectId) - .should.equal(true) - ) - }) - - it('should get the names of each project', function () { - return Array.from(this.project_ids).map(projectId => - this.ProjectGetter.getProject - .calledWith(projectId, { name: true }) - .should.equal(true) - ) - }) - - it('should add all of the projects to the zip', function () { - return Array.from(this.project_ids).map(projectId => - this.archive.append - .calledWith(this.zip_streams[projectId], { - name: this.project_names[projectId] + '.zip', - }) - .should.equal(true) - ) - }) - }) - - describe('with a project not existing', function () { - beforeEach(function (done) { - this.project_ids = ['project-1', 'wrong-id'] - this.project_names = { - 'project-1': 'Project One Name', - } - this.zip_streams = { - 'project-1': new EventEmitter(), - } - - this.ProjectZipStreamManager.createZipStreamForProject = ( - projectId, - callback - ) => { - callback(null, this.zip_streams[projectId]) - setTimeout(() => { - this.zip_streams[projectId].emit('end') - }) - } - sinon.spy(this.ProjectZipStreamManager, 'createZipStreamForProject') - - this.ProjectGetter.getProject = (projectId, fields, callback) => { - const name = this.project_names[projectId] - callback(null, name ? { name } : undefined) - } - sinon.spy(this.ProjectGetter, 'getProject') - - this.ProjectZipStreamManager.createZipStreamForMultipleProjects( - this.project_ids, - this.callback - ) - - this.archive.finalize = () => done() - }) - - it('should create a zip archive', function () { - this.archiver.calledWith('zip').should.equal(true) - }) - - it('should return a stream before any processing is done', function () { - this.callback - .calledWith(sinon.match.falsy, this.archive) - .should.equal(true) - this.callback - .calledBefore(this.ProjectZipStreamManager.createZipStreamForProject) - .should.equal(true) - }) - - it('should get the names of each project', function () { - this.project_ids.map(projectId => - this.ProjectGetter.getProject - .calledWith(projectId, { name: true }) - .should.equal(true) - ) - }) - - it('should get a zip stream only for the existing project', function () { - this.ProjectZipStreamManager.createZipStreamForProject - .calledWith('project-1') - .should.equal(true) - this.ProjectZipStreamManager.createZipStreamForProject - .calledWith('wrong-id') - .should.equal(false) - }) - - it('should only add the existing project to the zip', function () { - sinon.assert.calledOnce(this.archive.append) - this.archive.append - .calledWith(this.zip_streams['project-1'], { - name: this.project_names['project-1'] + '.zip', - }) - .should.equal(true) - }) - }) - }) - - describe('createZipStreamForProject', function () { - describe('successfully', function () { - beforeEach(function () { - this.ProjectZipStreamManager.addAllDocsToArchive = sinon - .stub() - .callsArg(2) - this.ProjectZipStreamManager.addAllFilesToArchive = sinon - .stub() - .callsArg(2) - this.archive.finalize = sinon.stub() - return this.ProjectZipStreamManager.createZipStreamForProject( - this.project_id, - this.callback - ) - }) - - it('should create a zip archive', function () { - return this.archiver.calledWith('zip').should.equal(true) - }) - - it('should return a stream before any processing is done', function () { - this.callback - .calledWith(sinon.match.falsy, this.archive) - .should.equal(true) - this.callback - .calledBefore(this.ProjectZipStreamManager.addAllDocsToArchive) - .should.equal(true) - return this.callback - .calledBefore(this.ProjectZipStreamManager.addAllFilesToArchive) - .should.equal(true) - }) - - it('should add all of the project docs to the zip', function () { - return this.ProjectZipStreamManager.addAllDocsToArchive - .calledWith(this.project_id, this.archive) - .should.equal(true) - }) - - it('should add all of the project files to the zip', function () { - return this.ProjectZipStreamManager.addAllFilesToArchive - .calledWith(this.project_id, this.archive) - .should.equal(true) - }) - - it('should finalise the stream', function () { - return this.archive.finalize.called.should.equal(true) - }) - }) - - describe('with an error adding docs', function () { - beforeEach(function () { - this.ProjectZipStreamManager.addAllDocsToArchive = sinon - .stub() - .callsArgWith(2, new Error('something went wrong')) - this.ProjectZipStreamManager.addAllFilesToArchive = sinon - .stub() - .callsArg(2) - this.archive.finalize = sinon.stub() - this.ProjectZipStreamManager.createZipStreamForProject( - this.project_id, - this.callback - ) - }) - - it('should log out an error', function () { - return this.logger.error - .calledWith(sinon.match.any, 'error adding docs to zip stream') - .should.equal(true) - }) - - it('should continue with the process', function () { - this.ProjectZipStreamManager.addAllDocsToArchive.called.should.equal( - true - ) - this.ProjectZipStreamManager.addAllFilesToArchive.called.should.equal( - true - ) - return this.archive.finalize.called.should.equal(true) - }) - }) - - describe('with an error adding files', function () { - beforeEach(function () { - this.ProjectZipStreamManager.addAllDocsToArchive = sinon - .stub() - .callsArg(2) - this.ProjectZipStreamManager.addAllFilesToArchive = sinon - .stub() - .callsArgWith(2, new Error('something went wrong')) - this.archive.finalize = sinon.stub() - return this.ProjectZipStreamManager.createZipStreamForProject( - this.project_id, - this.callback - ) - }) - - it('should log out an error', function () { - return this.logger.error - .calledWith(sinon.match.any, 'error adding files to zip stream') - .should.equal(true) - }) - - it('should continue with the process', function () { - this.ProjectZipStreamManager.addAllDocsToArchive.called.should.equal( - true - ) - this.ProjectZipStreamManager.addAllFilesToArchive.called.should.equal( - true - ) - return this.archive.finalize.called.should.equal(true) - }) - }) - }) - - describe('addAllDocsToArchive', function () { - beforeEach(function (done) { - this.docs = { - '/main.tex': { - lines: [ - '\\documentclass{article}', - '\\begin{document}', - 'Hello world', - '\\end{document}', - ], - }, - '/chapters/chapter1.tex': { - lines: ['chapter1', 'content'], - }, - } - this.ProjectEntityHandler.getAllDocs = sinon - .stub() - .callsArgWith(1, null, this.docs) - return this.ProjectZipStreamManager.addAllDocsToArchive( - this.project_id, - this.archive, - error => { - this.callback(error) - return done() - } - ) - }) - - it('should get the docs for the project', function () { - return this.ProjectEntityHandler.getAllDocs - .calledWith(this.project_id) - .should.equal(true) - }) - - it('should add each doc to the archive', function () { - return (() => { - const result = [] - for (let path in this.docs) { - const doc = this.docs[path] - path = path.slice(1) // remove "/" - result.push( - this.archive.append - .calledWith(doc.lines.join('\n'), { name: path }) - .should.equal(true) - ) - } - return result - })() - }) - }) - - describe('addAllFilesToArchive', function () { - beforeEach(function () { - this.files = { - '/image.png': { - _id: 'file-id-1', - hash: 'abc', - }, - '/folder/picture.png': { - _id: 'file-id-2', - hash: 'def', - }, - } - this.streams = { - 'file-id-1': new EventEmitter(), - 'file-id-2': new EventEmitter(), - } - this.ProjectEntityHandler.getAllFiles = sinon - .stub() - .callsArgWith(1, null, this.files) - }) - describe('with project-history-blobs feature enabled', function () { - beforeEach(function () { - this.HistoryManager.requestBlobWithFallback = ( - projectId, - hash, - fileId, - callback - ) => { - return callback(null, { stream: this.streams[fileId] }) - } - sinon.spy(this.HistoryManager, 'requestBlobWithFallback') - this.ProjectZipStreamManager.addAllFilesToArchive( - this.project_id, - this.archive, - this.callback - ) - for (const path in this.streams) { - const stream = this.streams[path] - stream.emit('end') - } - }) - - it('should get the files for the project', function () { - return this.ProjectEntityHandler.getAllFiles - .calledWith(this.project_id) - .should.equal(true) - }) - - it('should get a stream for each file', function () { - for (const path in this.files) { - const file = this.files[path] - - this.HistoryManager.requestBlobWithFallback - .calledWith(this.project_id, file.hash, file._id) - .should.equal(true) - } - }) - - it('should add each file to the archive', function () { - for (let path in this.files) { - const file = this.files[path] - path = path.slice(1) // remove "/" - this.archive.append - .calledWith(this.streams[file._id], { name: path }) - .should.equal(true) - } - }) - }) - - describe('with project-history-blobs feature disabled', function () { - beforeEach(function () { - this.FileStoreHandler.getFileStream = ( - projectId, - fileId, - query, - callback - ) => callback(null, this.streams[fileId]) - - sinon.spy(this.FileStoreHandler, 'getFileStream') - this.Features.hasFeature - .withArgs('project-history-blobs') - .returns(false) - this.ProjectZipStreamManager.addAllFilesToArchive( - this.project_id, - this.archive, - this.callback - ) - for (const path in this.streams) { - const stream = this.streams[path] - stream.emit('end') - } - }) - - it('should get a stream for each file', function () { - for (const path in this.files) { - const file = this.files[path] - - this.FileStoreHandler.getFileStream - .calledWith(this.project_id, file._id) - .should.equal(true) - } - }) - }) - }) -}) diff --git a/services/web/test/unit/src/Editor/EditorHttpControllerTests.js b/services/web/test/unit/src/Editor/EditorHttpControllerTests.js index dffa2d21ff..7fc08c45d3 100644 --- a/services/web/test/unit/src/Editor/EditorHttpControllerTests.js +++ b/services/web/test/unit/src/Editor/EditorHttpControllerTests.js @@ -20,6 +20,12 @@ describe('EditorHttpController', function () { _id: new ObjectId(), projects: {}, } + this.members = [ + { user: { _id: 'owner', features: {} }, privilegeLevel: 'owner' }, + { user: { _id: 'one' }, privilegeLevel: 'readOnly' }, + ] + this.ownerMember = this.members[0] + this.invites = [{ _id: 'three' }, { _id: 'four' }] this.projectView = { _id: this.project._id, owner: { @@ -27,7 +33,10 @@ describe('EditorHttpController', function () { email: 'owner@example.com', other_property: true, }, - members: [{ one: 1 }, { two: 2 }], + members: [ + { _id: 'owner', privileges: 'owner' }, + { _id: 'one', privileges: 'readOnly' }, + ], invites: [{ three: 3 }, { four: 4 }], } this.reducedProjectView = { @@ -51,14 +60,32 @@ describe('EditorHttpController', function () { this.AuthorizationManager = { isRestrictedUser: sinon.stub().returns(false), promises: { - getPrivilegeLevelForProject: sinon.stub().resolves('owner'), + getPrivilegeLevelForProjectWithProjectAccess: sinon + .stub() + .resolves('owner'), }, } + const members = this.members + const ownerMember = this.ownerMember this.CollaboratorsGetter = { + ProjectAccess: class { + loadOwnerAndInvitedMembers() { + return { members, ownerMember } + } + + loadOwner() { + return ownerMember + } + + isUserTokenMember() { + return false + } + + isUserInvitedMember() { + return false + } + }, promises: { - getInvitedMembersWithPrivilegeLevels: sinon - .stub() - .resolves(['members', 'mock']), isUserInvitedMemberOfProject: sinon.stub().resolves(false), }, } @@ -67,22 +94,23 @@ describe('EditorHttpController', function () { userIsTokenMember: sinon.stub().resolves(false), }, } + this.invites = [ + { + _id: 'invite_one', + email: 'user-one@example.com', + privileges: 'readOnly', + projectId: this.project._id, + }, + { + _id: 'invite_two', + email: 'user-two@example.com', + privileges: 'readOnly', + projectId: this.project._id, + }, + ] this.CollaboratorsInviteGetter = { promises: { - getAllInvites: sinon.stub().resolves([ - { - _id: 'invite_one', - email: 'user-one@example.com', - privileges: 'readOnly', - projectId: this.project._id, - }, - { - _id: 'invite_two', - email: 'user-two@example.com', - privileges: 'readOnly', - projectId: this.project._id, - }, - ]), + getAllInvites: sinon.stub().resolves(this.invites), }, } this.EditorController = { @@ -170,13 +198,28 @@ describe('EditorHttpController', function () { describe('successfully', function () { beforeEach(function (done) { - this.CollaboratorsGetter.promises.isUserInvitedMemberOfProject.resolves( - true - ) + sinon + .stub( + this.CollaboratorsGetter.ProjectAccess.prototype, + 'isUserInvitedMember' + ) + .returns(true) this.res.callback = done this.EditorHttpController.joinProject(this.req, this.res) }) + it('should request a full view', function () { + expect( + this.ProjectEditorHandler.buildProjectModelView + ).to.have.been.calledWith( + this.project, + this.ownerMember, + this.members, + this.invites, + false + ) + }) + it('should return the project and privilege level', function () { expect(this.res.json).to.have.been.calledWith({ project: this.projectView, @@ -213,14 +256,23 @@ describe('EditorHttpController', function () { describe('with a restricted user', function () { beforeEach(function (done) { + this.ProjectEditorHandler.buildProjectModelView.returns( + this.reducedProjectView + ) this.AuthorizationManager.isRestrictedUser.returns(true) - this.AuthorizationManager.promises.getPrivilegeLevelForProject.resolves( + this.AuthorizationManager.promises.getPrivilegeLevelForProjectWithProjectAccess.resolves( 'readOnly' ) this.res.callback = done this.EditorHttpController.joinProject(this.req, this.res) }) + it('should request a restricted view', function () { + expect( + this.ProjectEditorHandler.buildProjectModelView + ).to.have.been.calledWith(this.project, this.ownerMember, [], [], true) + }) + it('should mark the user as restricted, and hide details of owner', function () { expect(this.res.json).to.have.been.calledWith({ project: this.reducedProjectView, @@ -234,7 +286,7 @@ describe('EditorHttpController', function () { describe('when not authorized', function () { beforeEach(function (done) { - this.AuthorizationManager.promises.getPrivilegeLevelForProject.resolves( + this.AuthorizationManager.promises.getPrivilegeLevelForProjectWithProjectAccess.resolves( null ) this.res.callback = done @@ -250,6 +302,9 @@ describe('EditorHttpController', function () { beforeEach(function (done) { this.token = 'token' this.TokenAccessHandler.getRequestToken.returns(this.token) + this.ProjectEditorHandler.buildProjectModelView.returns( + this.reducedProjectView + ) this.req.body = { userId: 'anonymous-user', anonymousAccessToken: this.token, @@ -258,12 +313,18 @@ describe('EditorHttpController', function () { this.AuthorizationManager.isRestrictedUser .withArgs(null, 'readOnly', false, false) .returns(true) - this.AuthorizationManager.promises.getPrivilegeLevelForProject + this.AuthorizationManager.promises.getPrivilegeLevelForProjectWithProjectAccess .withArgs(null, this.project._id, this.token) .resolves('readOnly') this.EditorHttpController.joinProject(this.req, this.res) }) + it('should request a restricted view', function () { + expect( + this.ProjectEditorHandler.buildProjectModelView + ).to.have.been.calledWith(this.project, this.ownerMember, [], [], true) + }) + it('should mark the user as restricted', function () { expect(this.res.json).to.have.been.calledWith({ project: this.reducedProjectView, @@ -277,11 +338,19 @@ describe('EditorHttpController', function () { describe('with a token access user', function () { beforeEach(function (done) { - this.CollaboratorsGetter.promises.isUserInvitedMemberOfProject.resolves( - false - ) - this.CollaboratorsHandler.promises.userIsTokenMember.resolves(true) - this.AuthorizationManager.promises.getPrivilegeLevelForProject.resolves( + sinon + .stub( + this.CollaboratorsGetter.ProjectAccess.prototype, + 'isUserInvitedMember' + ) + .returns(false) + sinon + .stub( + this.CollaboratorsGetter.ProjectAccess.prototype, + 'isUserTokenMember' + ) + .returns(true) + this.AuthorizationManager.promises.getPrivilegeLevelForProjectWithProjectAccess.resolves( 'readAndWrite' ) this.res.callback = done diff --git a/services/web/test/unit/src/Email/EmailBuilderTests.js b/services/web/test/unit/src/Email/EmailBuilderTests.js index a8a0dc1ad5..8cc83f0228 100644 --- a/services/web/test/unit/src/Email/EmailBuilderTests.js +++ b/services/web/test/unit/src/Email/EmailBuilderTests.js @@ -818,6 +818,43 @@ describe('EmailBuilder', function () { }) }) }) + + describe('removeGroupMember', function () { + beforeEach(function () { + this.passwordResetUrl = `${this.settings.siteUrl}/user/password/reset` + this.emailAddress = 'example@overleaf.com' + this.opts = { + to: this.emailAddress, + adminName: 'abcdef', + } + this.email = this.EmailBuilder.buildEmail( + 'removeGroupMember', + this.opts + ) + this.dom = cheerio.load(this.email.html) + }) + + it('should build the email', function () { + expect(this.email.html).to.exist + expect(this.email.text).to.exist + }) + + describe('HTML email', function () { + it('should include links', function () { + const resetPasswordLink = this.dom('a:contains("set a password")') + expect(resetPasswordLink.length).to.equal(1) + expect(resetPasswordLink.attr('href')).to.equal( + this.passwordResetUrl + ) + }) + }) + + describe('plain text email', function () { + it('should include URLs', function () { + expect(this.email.text).to.contain(this.passwordResetUrl) + }) + }) + }) }) }) }) diff --git a/services/web/test/unit/src/Exports/ExportsController.test.mjs b/services/web/test/unit/src/Exports/ExportsController.test.mjs new file mode 100644 index 0000000000..cd8f4ba7a9 --- /dev/null +++ b/services/web/test/unit/src/Exports/ExportsController.test.mjs @@ -0,0 +1,215 @@ +import { expect, vi } from 'vitest' +import sinon from 'sinon' +const modulePath = new URL( + '../../../../app/src/Features/Exports/ExportsController.mjs', + import.meta.url +).pathname + +describe('ExportsController', function () { + const projectId = '123njdskj9jlk' + const userId = '123nd3ijdks' + const brandVariationId = 22 + const firstName = 'first' + const lastName = 'last' + const title = 'title' + const description = 'description' + const author = 'author' + const license = 'other' + const showSource = true + + beforeEach(async function (ctx) { + ctx.handler = { getUserNotifications: sinon.stub().callsArgWith(1) } + ctx.req = { + params: { + project_id: projectId, + brand_variation_id: brandVariationId, + }, + body: { + firstName, + lastName, + }, + session: { + user: { + _id: userId, + }, + }, + i18n: { + translate() {}, + }, + } + ctx.res = { + json: sinon.stub(), + status: sinon.stub(), + } + ctx.res.status.returns(ctx.res) + ctx.next = sinon.stub() + ctx.AuthenticationController = { + getLoggedInUserId: sinon.stub().returns(ctx.req.session.user._id), + } + + vi.doMock( + '../../../../app/src/Features/Exports/ExportsHandler.mjs', + () => ({ + default: ctx.handler, + }) + ) + + vi.doMock( + '../../../../app/src/Features/Authentication/AuthenticationController.js', + () => ({ + default: ctx.AuthenticationController, + }) + ) + + ctx.controller = (await import(modulePath)).default + }) + + describe('without gallery fields', function () { + it('should ask the handler to perform the export', function (ctx) { + return new Promise(resolve => { + ctx.handler.exportProject = sinon + .stub() + .yields(null, { iAmAnExport: true, v1_id: 897 }) + const expected = { + project_id: projectId, + user_id: userId, + brand_variation_id: brandVariationId, + first_name: firstName, + last_name: lastName, + } + return ctx.controller.exportProject(ctx.req, { + json: body => { + expect(ctx.handler.exportProject.args[0][0]).to.deep.equal(expected) + expect(body).to.deep.equal({ + export_v1_id: 897, + message: undefined, + }) + return resolve() + }, + }) + }) + }) + }) + + describe('with a message from v1', function () { + it('should ask the handler to perform the export', function (ctx) { + return new Promise(resolve => { + ctx.handler.exportProject = sinon.stub().yields(null, { + iAmAnExport: true, + v1_id: 897, + message: 'RESUBMISSION', + }) + const expected = { + project_id: projectId, + user_id: userId, + brand_variation_id: brandVariationId, + first_name: firstName, + last_name: lastName, + } + return ctx.controller.exportProject(ctx.req, { + json: body => { + expect(ctx.handler.exportProject.args[0][0]).to.deep.equal(expected) + expect(body).to.deep.equal({ + export_v1_id: 897, + message: 'RESUBMISSION', + }) + return resolve() + }, + }) + }) + }) + }) + + describe('with gallery fields', function () { + beforeEach(function (ctx) { + ctx.req.body.title = title + ctx.req.body.description = description + ctx.req.body.author = author + ctx.req.body.license = license + return (ctx.req.body.showSource = true) + }) + + it('should ask the handler to perform the export', function (ctx) { + return new Promise(resolve => { + ctx.handler.exportProject = sinon + .stub() + .yields(null, { iAmAnExport: true, v1_id: 897 }) + const expected = { + project_id: projectId, + user_id: userId, + brand_variation_id: brandVariationId, + first_name: firstName, + last_name: lastName, + title, + description, + author, + license, + show_source: showSource, + } + return ctx.controller.exportProject(ctx.req, { + json: body => { + expect(ctx.handler.exportProject.args[0][0]).to.deep.equal(expected) + expect(body).to.deep.equal({ + export_v1_id: 897, + message: undefined, + }) + return resolve() + }, + }) + }) + }) + }) + + describe('with an error return from v1 to forward to the publish modal', function () { + it('should forward the response onward', function (ctx) { + return new Promise(resolve => { + ctx.error_json = { status: 422, message: 'nope' } + ctx.handler.exportProject = sinon + .stub() + .yields({ forwardResponse: ctx.error_json }) + ctx.controller.exportProject(ctx.req, ctx.res, ctx.next) + expect(ctx.res.json.args[0][0]).to.deep.equal(ctx.error_json) + expect(ctx.res.status.args[0][0]).to.equal(ctx.error_json.status) + return resolve() + }) + }) + }) + + it('should ask the handler to return the status of an export', function (ctx) { + return new Promise(resolve => { + ctx.handler.fetchExport = sinon.stub().yields( + null, + `{ + "id":897, + "status_summary":"completed", + "status_detail":"all done", + "partner_submission_id":"abc123", + "v2_user_email":"la@tex.com", + "v2_user_first_name":"Arthur", + "v2_user_last_name":"Author", + "title":"my project", + "token":"token" + }` + ) + + ctx.req.params = { project_id: projectId, export_id: 897 } + return ctx.controller.exportStatus(ctx.req, { + json: body => { + expect(body).to.deep.equal({ + export_json: { + status_summary: 'completed', + status_detail: 'all done', + partner_submission_id: 'abc123', + v2_user_email: 'la@tex.com', + v2_user_first_name: 'Arthur', + v2_user_last_name: 'Author', + title: 'my project', + token: 'token', + }, + }) + return resolve() + }, + }) + }) + }) +}) diff --git a/services/web/test/unit/src/Exports/ExportsControllerTests.mjs b/services/web/test/unit/src/Exports/ExportsControllerTests.mjs deleted file mode 100644 index 65e6e16d27..0000000000 --- a/services/web/test/unit/src/Exports/ExportsControllerTests.mjs +++ /dev/null @@ -1,196 +0,0 @@ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -import esmock from 'esmock' -import { expect } from 'chai' -import sinon from 'sinon' -const modulePath = new URL( - '../../../../app/src/Features/Exports/ExportsController.mjs', - import.meta.url -).pathname - -describe('ExportsController', function () { - const projectId = '123njdskj9jlk' - const userId = '123nd3ijdks' - const brandVariationId = 22 - const firstName = 'first' - const lastName = 'last' - const title = 'title' - const description = 'description' - const author = 'author' - const license = 'other' - const showSource = true - - beforeEach(async function () { - this.handler = { getUserNotifications: sinon.stub().callsArgWith(1) } - this.req = { - params: { - project_id: projectId, - brand_variation_id: brandVariationId, - }, - body: { - firstName, - lastName, - }, - session: { - user: { - _id: userId, - }, - }, - i18n: { - translate() {}, - }, - } - this.res = { - json: sinon.stub(), - status: sinon.stub(), - } - this.res.status.returns(this.res) - this.next = sinon.stub() - this.AuthenticationController = { - getLoggedInUserId: sinon.stub().returns(this.req.session.user._id), - } - return (this.controller = await esmock.strict(modulePath, { - '../../../../app/src/Features/Exports/ExportsHandler.mjs': this.handler, - '../../../../app/src/Features/Authentication/AuthenticationController.js': - this.AuthenticationController, - })) - }) - - describe('without gallery fields', function () { - it('should ask the handler to perform the export', function (done) { - this.handler.exportProject = sinon - .stub() - .yields(null, { iAmAnExport: true, v1_id: 897 }) - const expected = { - project_id: projectId, - user_id: userId, - brand_variation_id: brandVariationId, - first_name: firstName, - last_name: lastName, - } - return this.controller.exportProject(this.req, { - json: body => { - expect(this.handler.exportProject.args[0][0]).to.deep.equal(expected) - expect(body).to.deep.equal({ export_v1_id: 897, message: undefined }) - return done() - }, - }) - }) - }) - - describe('with a message from v1', function () { - it('should ask the handler to perform the export', function (done) { - this.handler.exportProject = sinon.stub().yields(null, { - iAmAnExport: true, - v1_id: 897, - message: 'RESUBMISSION', - }) - const expected = { - project_id: projectId, - user_id: userId, - brand_variation_id: brandVariationId, - first_name: firstName, - last_name: lastName, - } - return this.controller.exportProject(this.req, { - json: body => { - expect(this.handler.exportProject.args[0][0]).to.deep.equal(expected) - expect(body).to.deep.equal({ - export_v1_id: 897, - message: 'RESUBMISSION', - }) - return done() - }, - }) - }) - }) - - describe('with gallery fields', function () { - beforeEach(function () { - this.req.body.title = title - this.req.body.description = description - this.req.body.author = author - this.req.body.license = license - return (this.req.body.showSource = true) - }) - - it('should ask the handler to perform the export', function (done) { - this.handler.exportProject = sinon - .stub() - .yields(null, { iAmAnExport: true, v1_id: 897 }) - const expected = { - project_id: projectId, - user_id: userId, - brand_variation_id: brandVariationId, - first_name: firstName, - last_name: lastName, - title, - description, - author, - license, - show_source: showSource, - } - return this.controller.exportProject(this.req, { - json: body => { - expect(this.handler.exportProject.args[0][0]).to.deep.equal(expected) - expect(body).to.deep.equal({ export_v1_id: 897, message: undefined }) - return done() - }, - }) - }) - }) - - describe('with an error return from v1 to forward to the publish modal', function () { - it('should forward the response onward', function (done) { - this.error_json = { status: 422, message: 'nope' } - this.handler.exportProject = sinon - .stub() - .yields({ forwardResponse: this.error_json }) - this.controller.exportProject(this.req, this.res, this.next) - expect(this.res.json.args[0][0]).to.deep.equal(this.error_json) - expect(this.res.status.args[0][0]).to.equal(this.error_json.status) - return done() - }) - }) - - it('should ask the handler to return the status of an export', function (done) { - this.handler.fetchExport = sinon.stub().yields( - null, - `{ -"id":897, -"status_summary":"completed", -"status_detail":"all done", -"partner_submission_id":"abc123", -"v2_user_email":"la@tex.com", -"v2_user_first_name":"Arthur", -"v2_user_last_name":"Author", -"title":"my project", -"token":"token" -}` - ) - - this.req.params = { project_id: projectId, export_id: 897 } - return this.controller.exportStatus(this.req, { - json: body => { - expect(body).to.deep.equal({ - export_json: { - status_summary: 'completed', - status_detail: 'all done', - partner_submission_id: 'abc123', - v2_user_email: 'la@tex.com', - v2_user_first_name: 'Arthur', - v2_user_last_name: 'Author', - title: 'my project', - token: 'token', - }, - }) - return done() - }, - }) - }) -}) diff --git a/services/web/test/unit/src/Exports/ExportsHandler.test.mjs b/services/web/test/unit/src/Exports/ExportsHandler.test.mjs new file mode 100644 index 0000000000..a7944beced --- /dev/null +++ b/services/web/test/unit/src/Exports/ExportsHandler.test.mjs @@ -0,0 +1,736 @@ +import { expect, vi } from 'vitest' +import sinon from 'sinon' +const modulePath = '../../../../app/src/Features/Exports/ExportsHandler.mjs' + +describe('ExportsHandler', function () { + beforeEach(async function (ctx) { + ctx.stubRequest = {} + ctx.request = { + defaults: () => { + return ctx.stubRequest + }, + } + + vi.doMock('../../../../app/src/Features/Project/ProjectGetter', () => ({ + default: (ctx.ProjectGetter = {}), + })) + + vi.doMock( + '../../../../app/src/Features/Project/ProjectHistoryHandler', + () => ({ + default: (ctx.ProjectHistoryHandler = {}), + }) + ) + + vi.doMock('../../../../app/src/Features/Project/ProjectLocator', () => ({ + default: (ctx.ProjectLocator = {}), + })) + + vi.doMock( + '../../../../app/src/Features/Project/ProjectRootDocManager', + () => ({ + default: (ctx.ProjectRootDocManager = {}), + }) + ) + + vi.doMock('../../../../app/src/Features/User/UserGetter', () => ({ + default: (ctx.UserGetter = {}), + })) + + vi.doMock('@overleaf/settings', () => ({ + default: (ctx.settings = {}), + })) + + vi.doMock('request', () => ({ + default: ctx.request, + })) + + ctx.ExportsHandler = (await import(modulePath)).default + ctx.project_id = 'project-id-123' + ctx.project_history_id = 987 + ctx.user_id = 'user-id-456' + ctx.brand_variation_id = 789 + ctx.title = 'title' + ctx.description = 'description' + ctx.author = 'author' + ctx.license = 'other' + ctx.show_source = true + ctx.export_params = { + project_id: ctx.project_id, + brand_variation_id: ctx.brand_variation_id, + user_id: ctx.user_id, + title: ctx.title, + description: ctx.description, + author: ctx.author, + license: ctx.license, + show_source: ctx.show_source, + } + ctx.callback = sinon.stub() + }) + + describe('exportProject', function () { + beforeEach(function (ctx) { + ctx.export_data = { iAmAnExport: true } + ctx.response_body = { iAmAResponseBody: true } + ctx.ExportsHandler._buildExport = sinon + .stub() + .yields(null, ctx.export_data) + ctx.ExportsHandler._requestExport = sinon + .stub() + .yields(null, ctx.response_body) + }) + + describe('when all goes well', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.ExportsHandler.exportProject( + ctx.export_params, + (error, exportData) => { + ctx.callback(error, exportData) + resolve() + } + ) + }) + }) + + it('should build the export', function (ctx) { + ctx.ExportsHandler._buildExport + .calledWith(ctx.export_params) + .should.equal(true) + }) + + it('should request the export', function (ctx) { + ctx.ExportsHandler._requestExport + .calledWith(ctx.export_data) + .should.equal(true) + }) + + it('should return the export', function (ctx) { + ctx.callback.calledWith(null, ctx.export_data).should.equal(true) + }) + }) + + describe("when request can't be built", function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.ExportsHandler._buildExport = sinon + .stub() + .yields(new Error('cannot export project without root doc')) + ctx.ExportsHandler.exportProject( + ctx.export_params, + (error, exportData) => { + ctx.callback(error, exportData) + resolve() + } + ) + }) + }) + + it('should return an error', function (ctx) { + expect(ctx.callback.args[0][0]).to.be.instanceOf(Error) + }) + }) + + describe('when export request returns an error to forward to the user', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.error_json = { status: 422, message: 'nope' } + ctx.ExportsHandler._requestExport = sinon + .stub() + .yields(null, { forwardResponse: ctx.error_json }) + ctx.ExportsHandler.exportProject( + ctx.export_params, + (error, exportData) => { + ctx.callback(error, exportData) + resolve() + } + ) + }) + }) + + it('should return success and the response to forward', function (ctx) { + expect(ctx.callback.args[0][0]).not.to.be.instanceOf(Error) + ctx.callback.calledWith(null, { + forwardResponse: ctx.error_json, + }) + }) + }) + }) + + describe('_buildExport', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.project = { + id: ctx.project_id, + rootDoc_id: 'doc1_id', + compiler: 'pdflatex', + imageName: 'mock-image-name', + overleaf: { + id: ctx.project_history_id, // for projects imported from v1 + history: { + id: ctx.project_history_id, + }, + }, + } + ctx.user = { + id: ctx.user_id, + first_name: 'Arthur', + last_name: 'Author', + email: 'arthur.author@arthurauthoring.org', + overleaf: { + id: 876, + }, + } + ctx.rootDocPath = 'main.tex' + ctx.historyVersion = 777 + ctx.ProjectGetter.getProject = sinon.stub().yields(null, ctx.project) + ctx.ProjectHistoryHandler.ensureHistoryExistsForProject = sinon + .stub() + .yields(null) + ctx.ProjectLocator.findRootDoc = sinon + .stub() + .yields(null, [null, { fileSystem: 'main.tex' }]) + ctx.ProjectRootDocManager.ensureRootDocumentIsValid = sinon + .stub() + .callsArgWith(1, null) + ctx.UserGetter.getUser = sinon.stub().yields(null, ctx.user) + ctx.ExportsHandler._requestVersion = sinon + .stub() + .yields(null, ctx.historyVersion) + resolve() + }) + }) + + describe('when all goes well', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.ExportsHandler._buildExport( + ctx.export_params, + (error, exportData) => { + ctx.callback(error, exportData) + resolve() + } + ) + }) + }) + + it('should ensure the project has history', function (ctx) { + ctx.ProjectHistoryHandler.ensureHistoryExistsForProject.called.should.equal( + true + ) + }) + + it('should request the project history version', function (ctx) { + ctx.ExportsHandler._requestVersion.called.should.equal(true) + }) + + it('should return export data', function (ctx) { + const expectedExportData = { + project: { + id: ctx.project_id, + rootDocPath: ctx.rootDocPath, + historyId: ctx.project_history_id, + historyVersion: ctx.historyVersion, + v1ProjectId: ctx.project_history_id, + metadata: { + compiler: 'pdflatex', + imageName: 'mock-image-name', + title: ctx.title, + description: ctx.description, + author: ctx.author, + license: ctx.license, + showSource: ctx.show_source, + }, + }, + user: { + id: ctx.user_id, + firstName: ctx.user.first_name, + lastName: ctx.user.last_name, + email: ctx.user.email, + orcidId: null, + v1UserId: 876, + }, + destination: { + brandVariationId: ctx.brand_variation_id, + }, + options: { + callbackUrl: null, + }, + } + ctx.callback.calledWith(null, expectedExportData).should.equal(true) + }) + }) + + describe('when we send replacement user first and last name', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.custom_first_name = 'FIRST' + ctx.custom_last_name = 'LAST' + ctx.export_params.first_name = ctx.custom_first_name + ctx.export_params.last_name = ctx.custom_last_name + ctx.ExportsHandler._buildExport( + ctx.export_params, + (error, exportData) => { + ctx.callback(error, exportData) + resolve() + } + ) + }) + }) + + it('should send the data from the user input', function (ctx) { + const expectedExportData = { + project: { + id: ctx.project_id, + rootDocPath: ctx.rootDocPath, + historyId: ctx.project_history_id, + historyVersion: ctx.historyVersion, + v1ProjectId: ctx.project_history_id, + metadata: { + compiler: 'pdflatex', + imageName: 'mock-image-name', + title: ctx.title, + description: ctx.description, + author: ctx.author, + license: ctx.license, + showSource: ctx.show_source, + }, + }, + user: { + id: ctx.user_id, + firstName: ctx.custom_first_name, + lastName: ctx.custom_last_name, + email: ctx.user.email, + orcidId: null, + v1UserId: 876, + }, + destination: { + brandVariationId: ctx.brand_variation_id, + }, + options: { + callbackUrl: null, + }, + } + ctx.callback.calledWith(null, expectedExportData).should.equal(true) + }) + }) + + describe('when project is not found', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.ProjectGetter.getProject = sinon + .stub() + .yields(new Error('project not found')) + ctx.ExportsHandler._buildExport( + ctx.export_params, + (error, exportData) => { + ctx.callback(error, exportData) + resolve() + } + ) + }) + }) + + it('should return an error', function (ctx) { + expect(ctx.callback.args[0][0]).to.be.instanceOf(Error) + }) + }) + + describe('when project has no root doc', function () { + describe('when a root doc can be set automatically', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.project.rootDoc_id = null + ctx.ProjectLocator.findRootDoc = sinon + .stub() + .yields(null, [null, { fileSystem: 'other.tex' }]) + ctx.ExportsHandler._buildExport( + ctx.export_params, + (error, exportData) => { + ctx.callback(error, exportData) + resolve() + } + ) + }) + }) + + it('should set a root doc', function (ctx) { + ctx.ProjectRootDocManager.ensureRootDocumentIsValid.called.should.equal( + true + ) + }) + + it('should return export data', function (ctx) { + const expectedExportData = { + project: { + id: ctx.project_id, + rootDocPath: 'other.tex', + historyId: ctx.project_history_id, + historyVersion: ctx.historyVersion, + v1ProjectId: ctx.project_history_id, + metadata: { + compiler: 'pdflatex', + imageName: 'mock-image-name', + title: ctx.title, + description: ctx.description, + author: ctx.author, + license: ctx.license, + showSource: ctx.show_source, + }, + }, + user: { + id: ctx.user_id, + firstName: ctx.user.first_name, + lastName: ctx.user.last_name, + email: ctx.user.email, + orcidId: null, + v1UserId: 876, + }, + destination: { + brandVariationId: ctx.brand_variation_id, + }, + options: { + callbackUrl: null, + }, + } + ctx.callback.calledWith(null, expectedExportData).should.equal(true) + }) + }) + }) + + describe('when project has an invalid root doc', function () { + describe('when a new root doc can be set automatically', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.fakeDoc_id = '1a2b3c4d5e6f' + ctx.project.rootDoc_id = ctx.fakeDoc_id + ctx.ProjectLocator.findRootDoc = sinon + .stub() + .yields(null, [null, { fileSystem: 'other.tex' }]) + ctx.ExportsHandler._buildExport( + ctx.export_params, + (error, exportData) => { + ctx.callback(error, exportData) + resolve() + } + ) + }) + }) + + it('should set a valid root doc', function (ctx) { + ctx.ProjectRootDocManager.ensureRootDocumentIsValid.called.should.equal( + true + ) + }) + + it('should return export data', function (ctx) { + const expectedExportData = { + project: { + id: ctx.project_id, + rootDocPath: 'other.tex', + historyId: ctx.project_history_id, + historyVersion: ctx.historyVersion, + v1ProjectId: ctx.project_history_id, + metadata: { + compiler: 'pdflatex', + imageName: 'mock-image-name', + title: ctx.title, + description: ctx.description, + author: ctx.author, + license: ctx.license, + showSource: ctx.show_source, + }, + }, + user: { + id: ctx.user_id, + firstName: ctx.user.first_name, + lastName: ctx.user.last_name, + email: ctx.user.email, + orcidId: null, + v1UserId: 876, + }, + destination: { + brandVariationId: ctx.brand_variation_id, + }, + options: { + callbackUrl: null, + }, + } + ctx.callback.calledWith(null, expectedExportData).should.equal(true) + }) + }) + + describe('when no root doc can be identified', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.ProjectLocator.findRootDoc = sinon + .stub() + .yields(null, [null, null]) + ctx.ExportsHandler._buildExport( + ctx.export_params, + (error, exportData) => { + ctx.callback(error, exportData) + resolve() + } + ) + }) + }) + + it('should return an error', function (ctx) { + expect(ctx.callback.args[0][0]).to.be.instanceOf(Error) + }) + }) + }) + + describe('when user is not found', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.UserGetter.getUser = sinon + .stub() + .yields(new Error('user not found')) + ctx.ExportsHandler._buildExport( + ctx.export_params, + (error, exportData) => { + ctx.callback(error, exportData) + resolve() + } + ) + }) + }) + + it('should return an error', function (ctx) { + expect(ctx.callback.args[0][0]).to.be.instanceOf(Error) + }) + }) + + describe('when project history request fails', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.ExportsHandler._requestVersion = sinon + .stub() + .yields(new Error('project history call failed')) + ctx.ExportsHandler._buildExport( + ctx.export_params, + (error, exportData) => { + ctx.callback(error, exportData) + resolve() + } + ) + }) + }) + + it('should return an error', function (ctx) { + expect(ctx.callback.args[0][0]).to.be.instanceOf(Error) + }) + }) + }) + + describe('_requestExport', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.settings.apis = { + v1: { + url: 'http://127.0.0.1:5000', + user: 'overleaf', + pass: 'pass', + timeout: 15000, + }, + } + ctx.export_data = { iAmAnExport: true } + ctx.export_id = 4096 + ctx.stubPost = sinon + .stub() + .yields(null, { statusCode: 200 }, { exportId: ctx.export_id }) + resolve() + }) + }) + + describe('when all goes well', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.stubRequest.post = ctx.stubPost + ctx.ExportsHandler._requestExport( + ctx.export_data, + (error, exportV1Id) => { + ctx.callback(error, exportV1Id) + resolve() + } + ) + }) + }) + + it('should issue the request', function (ctx) { + expect(ctx.stubPost.getCall(0).args[0]).to.deep.equal({ + url: ctx.settings.apis.v1.url + '/api/v1/overleaf/exports', + auth: { + user: ctx.settings.apis.v1.user, + pass: ctx.settings.apis.v1.pass, + }, + json: ctx.export_data, + timeout: 15000, + }) + }) + + it('should return the body with v1 export id', function (ctx) { + ctx.callback + .calledWith(null, { exportId: ctx.export_id }) + .should.equal(true) + }) + }) + + describe('when the request fails', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.stubRequest.post = sinon + .stub() + .yields(new Error('export request failed')) + ctx.ExportsHandler._requestExport( + ctx.export_data, + (error, exportV1Id) => { + ctx.callback(error, exportV1Id) + resolve() + } + ) + }) + }) + + it('should return an error', function (ctx) { + expect(ctx.callback.args[0][0]).to.be.instanceOf(Error) + }) + }) + + describe('when the request returns an error response to forward', function () { + beforeEach(function (ctx) { + ctx.error_code = 422 + ctx.error_json = { status: ctx.error_code, message: 'nope' } + ctx.stubRequest.post = sinon + .stub() + .yields(null, { statusCode: ctx.error_code }, ctx.error_json) + return new Promise(resolve => { + ctx.ExportsHandler._requestExport( + ctx.export_data, + (error, exportV1Id) => { + ctx.callback(error, exportV1Id) + resolve() + } + ) + }) + }) + + it('should return success and the response to forward', function (ctx) { + expect(ctx.callback.args[0][0]).not.to.be.instanceOf(Error) + ctx.callback.calledWith(null, { + forwardResponse: ctx.error_json, + }) + }) + }) + }) + + describe('fetchExport', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.settings.apis = { + v1: { + url: 'http://127.0.0.1:5000', + user: 'overleaf', + pass: 'pass', + timeout: 15000, + }, + } + ctx.export_id = 897 + ctx.body = '{"id":897, "status_summary":"completed"}' + ctx.stubGet = sinon + .stub() + .yields(null, { statusCode: 200 }, { body: ctx.body }) + resolve() + }) + }) + + describe('when all goes well', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.stubRequest.get = ctx.stubGet + ctx.ExportsHandler.fetchExport(ctx.export_id, (error, body) => { + ctx.callback(error, body) + resolve() + }) + }) + }) + + it('should issue the request', function (ctx) { + expect(ctx.stubGet.getCall(0).args[0]).to.deep.equal({ + url: + ctx.settings.apis.v1.url + + '/api/v1/overleaf/exports/' + + ctx.export_id, + auth: { + user: ctx.settings.apis.v1.user, + pass: ctx.settings.apis.v1.pass, + }, + timeout: 15000, + }) + }) + + it('should return the v1 export id', function (ctx) { + ctx.callback.calledWith(null, { body: ctx.body }).should.equal(true) + }) + }) + }) + + describe('fetchDownload', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.settings.apis = { + v1: { + url: 'http://127.0.0.1:5000', + user: 'overleaf', + pass: 'pass', + timeout: 15000, + }, + } + ctx.export_id = 897 + ctx.body = + 'https://writelatex-conversions-dev.s3.amazonaws.com/exports/ieee_latexqc/tnb/2912/xggmprcrpfwbsnqzqqmvktddnrbqkqkr.zip?X-Amz-Expires=14400&X-Amz-Date=20180730T181003Z&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AKIAJDGDIJFGLNVGZH6A/20180730/us-east-1/s3/aws4_request&X-Amz-SignedHeaders=host&X-Amz-Signature=dec990336913cef9933f0e269afe99722d7ab2830ebf2c618a75673ee7159fee' + ctx.stubGet = sinon + .stub() + .yields(null, { statusCode: 200 }, { body: ctx.body }) + resolve() + }) + }) + + describe('when all goes well', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.stubRequest.get = ctx.stubGet + ctx.ExportsHandler.fetchDownload( + ctx.export_id, + 'zip', + (error, body) => { + ctx.callback(error, body) + resolve() + } + ) + }) + }) + + it('should issue the request', function (ctx) { + expect(ctx.stubGet.getCall(0).args[0]).to.deep.equal({ + url: + ctx.settings.apis.v1.url + + '/api/v1/overleaf/exports/' + + ctx.export_id + + '/zip_url', + auth: { + user: ctx.settings.apis.v1.user, + pass: ctx.settings.apis.v1.pass, + }, + timeout: 15000, + }) + }) + + it('should return the v1 export id', function (ctx) { + ctx.callback.calledWith(null, { body: ctx.body }).should.equal(true) + }) + }) + }) +}) diff --git a/services/web/test/unit/src/Exports/ExportsHandlerTests.mjs b/services/web/test/unit/src/Exports/ExportsHandlerTests.mjs deleted file mode 100644 index 1a7f985250..0000000000 --- a/services/web/test/unit/src/Exports/ExportsHandlerTests.mjs +++ /dev/null @@ -1,698 +0,0 @@ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -import sinon from 'sinon' -import esmock from 'esmock' -import { expect } from 'chai' -const modulePath = '../../../../app/src/Features/Exports/ExportsHandler.mjs' - -describe('ExportsHandler', function () { - beforeEach(async function () { - this.stubRequest = {} - this.request = { - defaults: () => { - return this.stubRequest - }, - } - this.ExportsHandler = await esmock.strict(modulePath, { - '../../../../app/src/Features/Project/ProjectGetter': - (this.ProjectGetter = {}), - '../../../../app/src/Features/Project/ProjectHistoryHandler': - (this.ProjectHistoryHandler = {}), - '../../../../app/src/Features/Project/ProjectLocator': - (this.ProjectLocator = {}), - '../../../../app/src/Features/Project/ProjectRootDocManager': - (this.ProjectRootDocManager = {}), - '../../../../app/src/Features/User/UserGetter': (this.UserGetter = {}), - '@overleaf/settings': (this.settings = {}), - request: this.request, - }) - this.project_id = 'project-id-123' - this.project_history_id = 987 - this.user_id = 'user-id-456' - this.brand_variation_id = 789 - this.title = 'title' - this.description = 'description' - this.author = 'author' - this.license = 'other' - this.show_source = true - this.export_params = { - project_id: this.project_id, - brand_variation_id: this.brand_variation_id, - user_id: this.user_id, - title: this.title, - description: this.description, - author: this.author, - license: this.license, - show_source: this.show_source, - } - return (this.callback = sinon.stub()) - }) - - describe('exportProject', function () { - beforeEach(function () { - this.export_data = { iAmAnExport: true } - this.response_body = { iAmAResponseBody: true } - this.ExportsHandler._buildExport = sinon - .stub() - .yields(null, this.export_data) - return (this.ExportsHandler._requestExport = sinon - .stub() - .yields(null, this.response_body)) - }) - - describe('when all goes well', function () { - beforeEach(function (done) { - return this.ExportsHandler.exportProject( - this.export_params, - (error, exportData) => { - this.callback(error, exportData) - return done() - } - ) - }) - - it('should build the export', function () { - return this.ExportsHandler._buildExport - .calledWith(this.export_params) - .should.equal(true) - }) - - it('should request the export', function () { - return this.ExportsHandler._requestExport - .calledWith(this.export_data) - .should.equal(true) - }) - - it('should return the export', function () { - return this.callback - .calledWith(null, this.export_data) - .should.equal(true) - }) - }) - - describe("when request can't be built", function () { - beforeEach(function (done) { - this.ExportsHandler._buildExport = sinon - .stub() - .yields(new Error('cannot export project without root doc')) - return this.ExportsHandler.exportProject( - this.export_params, - (error, exportData) => { - this.callback(error, exportData) - return done() - } - ) - }) - - it('should return an error', function () { - return (this.callback.args[0][0] instanceof Error).should.equal(true) - }) - }) - - describe('when export request returns an error to forward to the user', function () { - beforeEach(function (done) { - this.error_json = { status: 422, message: 'nope' } - this.ExportsHandler._requestExport = sinon - .stub() - .yields(null, { forwardResponse: this.error_json }) - return this.ExportsHandler.exportProject( - this.export_params, - (error, exportData) => { - this.callback(error, exportData) - return done() - } - ) - }) - - it('should return success and the response to forward', function () { - ;(this.callback.args[0][0] instanceof Error).should.equal(false) - return this.callback.calledWith(null, { - forwardResponse: this.error_json, - }) - }) - }) - }) - - describe('_buildExport', function () { - beforeEach(function (done) { - this.project = { - id: this.project_id, - rootDoc_id: 'doc1_id', - compiler: 'pdflatex', - imageName: 'mock-image-name', - overleaf: { - id: this.project_history_id, // for projects imported from v1 - history: { - id: this.project_history_id, - }, - }, - } - this.user = { - id: this.user_id, - first_name: 'Arthur', - last_name: 'Author', - email: 'arthur.author@arthurauthoring.org', - overleaf: { - id: 876, - }, - } - this.rootDocPath = 'main.tex' - this.historyVersion = 777 - this.ProjectGetter.getProject = sinon.stub().yields(null, this.project) - this.ProjectHistoryHandler.ensureHistoryExistsForProject = sinon - .stub() - .yields(null) - this.ProjectLocator.findRootDoc = sinon - .stub() - .yields(null, [null, { fileSystem: 'main.tex' }]) - this.ProjectRootDocManager.ensureRootDocumentIsValid = sinon - .stub() - .callsArgWith(1, null) - this.UserGetter.getUser = sinon.stub().yields(null, this.user) - this.ExportsHandler._requestVersion = sinon - .stub() - .yields(null, this.historyVersion) - return done() - }) - - describe('when all goes well', function () { - beforeEach(function (done) { - return this.ExportsHandler._buildExport( - this.export_params, - (error, exportData) => { - this.callback(error, exportData) - return done() - } - ) - }) - - it('should ensure the project has history', function () { - return this.ProjectHistoryHandler.ensureHistoryExistsForProject.called.should.equal( - true - ) - }) - - it('should request the project history version', function () { - return this.ExportsHandler._requestVersion.called.should.equal(true) - }) - - it('should return export data', function () { - const expectedExportData = { - project: { - id: this.project_id, - rootDocPath: this.rootDocPath, - historyId: this.project_history_id, - historyVersion: this.historyVersion, - v1ProjectId: this.project_history_id, - metadata: { - compiler: 'pdflatex', - imageName: 'mock-image-name', - title: this.title, - description: this.description, - author: this.author, - license: this.license, - showSource: this.show_source, - }, - }, - user: { - id: this.user_id, - firstName: this.user.first_name, - lastName: this.user.last_name, - email: this.user.email, - orcidId: null, - v1UserId: 876, - }, - destination: { - brandVariationId: this.brand_variation_id, - }, - options: { - callbackUrl: null, - }, - } - return this.callback - .calledWith(null, expectedExportData) - .should.equal(true) - }) - }) - - describe('when we send replacement user first and last name', function () { - beforeEach(function (done) { - this.custom_first_name = 'FIRST' - this.custom_last_name = 'LAST' - this.export_params.first_name = this.custom_first_name - this.export_params.last_name = this.custom_last_name - return this.ExportsHandler._buildExport( - this.export_params, - (error, exportData) => { - this.callback(error, exportData) - return done() - } - ) - }) - - it('should send the data from the user input', function () { - const expectedExportData = { - project: { - id: this.project_id, - rootDocPath: this.rootDocPath, - historyId: this.project_history_id, - historyVersion: this.historyVersion, - v1ProjectId: this.project_history_id, - metadata: { - compiler: 'pdflatex', - imageName: 'mock-image-name', - title: this.title, - description: this.description, - author: this.author, - license: this.license, - showSource: this.show_source, - }, - }, - user: { - id: this.user_id, - firstName: this.custom_first_name, - lastName: this.custom_last_name, - email: this.user.email, - orcidId: null, - v1UserId: 876, - }, - destination: { - brandVariationId: this.brand_variation_id, - }, - options: { - callbackUrl: null, - }, - } - return this.callback - .calledWith(null, expectedExportData) - .should.equal(true) - }) - }) - - describe('when project is not found', function () { - beforeEach(function (done) { - this.ProjectGetter.getProject = sinon - .stub() - .yields(new Error('project not found')) - return this.ExportsHandler._buildExport( - this.export_params, - (error, exportData) => { - this.callback(error, exportData) - return done() - } - ) - }) - - it('should return an error', function () { - return (this.callback.args[0][0] instanceof Error).should.equal(true) - }) - }) - - describe('when project has no root doc', function () { - describe('when a root doc can be set automatically', function () { - beforeEach(function (done) { - this.project.rootDoc_id = null - this.ProjectLocator.findRootDoc = sinon - .stub() - .yields(null, [null, { fileSystem: 'other.tex' }]) - return this.ExportsHandler._buildExport( - this.export_params, - (error, exportData) => { - this.callback(error, exportData) - return done() - } - ) - }) - - it('should set a root doc', function () { - return this.ProjectRootDocManager.ensureRootDocumentIsValid.called.should.equal( - true - ) - }) - - it('should return export data', function () { - const expectedExportData = { - project: { - id: this.project_id, - rootDocPath: 'other.tex', - historyId: this.project_history_id, - historyVersion: this.historyVersion, - v1ProjectId: this.project_history_id, - metadata: { - compiler: 'pdflatex', - imageName: 'mock-image-name', - title: this.title, - description: this.description, - author: this.author, - license: this.license, - showSource: this.show_source, - }, - }, - user: { - id: this.user_id, - firstName: this.user.first_name, - lastName: this.user.last_name, - email: this.user.email, - orcidId: null, - v1UserId: 876, - }, - destination: { - brandVariationId: this.brand_variation_id, - }, - options: { - callbackUrl: null, - }, - } - return this.callback - .calledWith(null, expectedExportData) - .should.equal(true) - }) - }) - }) - - describe('when project has an invalid root doc', function () { - describe('when a new root doc can be set automatically', function () { - beforeEach(function (done) { - this.fakeDoc_id = '1a2b3c4d5e6f' - this.project.rootDoc_id = this.fakeDoc_id - this.ProjectLocator.findRootDoc = sinon - .stub() - .yields(null, [null, { fileSystem: 'other.tex' }]) - return this.ExportsHandler._buildExport( - this.export_params, - (error, exportData) => { - this.callback(error, exportData) - return done() - } - ) - }) - - it('should set a valid root doc', function () { - return this.ProjectRootDocManager.ensureRootDocumentIsValid.called.should.equal( - true - ) - }) - - it('should return export data', function () { - const expectedExportData = { - project: { - id: this.project_id, - rootDocPath: 'other.tex', - historyId: this.project_history_id, - historyVersion: this.historyVersion, - v1ProjectId: this.project_history_id, - metadata: { - compiler: 'pdflatex', - imageName: 'mock-image-name', - title: this.title, - description: this.description, - author: this.author, - license: this.license, - showSource: this.show_source, - }, - }, - user: { - id: this.user_id, - firstName: this.user.first_name, - lastName: this.user.last_name, - email: this.user.email, - orcidId: null, - v1UserId: 876, - }, - destination: { - brandVariationId: this.brand_variation_id, - }, - options: { - callbackUrl: null, - }, - } - return this.callback - .calledWith(null, expectedExportData) - .should.equal(true) - }) - }) - - describe('when no root doc can be identified', function () { - beforeEach(function (done) { - this.ProjectLocator.findRootDoc = sinon - .stub() - .yields(null, [null, null]) - return this.ExportsHandler._buildExport( - this.export_params, - (error, exportData) => { - this.callback(error, exportData) - return done() - } - ) - }) - - it('should return an error', function () { - return (this.callback.args[0][0] instanceof Error).should.equal(true) - }) - }) - }) - - describe('when user is not found', function () { - beforeEach(function (done) { - this.UserGetter.getUser = sinon - .stub() - .yields(new Error('user not found')) - return this.ExportsHandler._buildExport( - this.export_params, - (error, exportData) => { - this.callback(error, exportData) - return done() - } - ) - }) - - it('should return an error', function () { - return (this.callback.args[0][0] instanceof Error).should.equal(true) - }) - }) - - describe('when project history request fails', function () { - beforeEach(function (done) { - this.ExportsHandler._requestVersion = sinon - .stub() - .yields(new Error('project history call failed')) - return this.ExportsHandler._buildExport( - this.export_params, - (error, exportData) => { - this.callback(error, exportData) - return done() - } - ) - }) - - it('should return an error', function () { - return (this.callback.args[0][0] instanceof Error).should.equal(true) - }) - }) - }) - - describe('_requestExport', function () { - beforeEach(function (done) { - this.settings.apis = { - v1: { - url: 'http://127.0.0.1:5000', - user: 'overleaf', - pass: 'pass', - timeout: 15000, - }, - } - this.export_data = { iAmAnExport: true } - this.export_id = 4096 - this.stubPost = sinon - .stub() - .yields(null, { statusCode: 200 }, { exportId: this.export_id }) - return done() - }) - - describe('when all goes well', function () { - beforeEach(function (done) { - this.stubRequest.post = this.stubPost - return this.ExportsHandler._requestExport( - this.export_data, - (error, exportV1Id) => { - this.callback(error, exportV1Id) - return done() - } - ) - }) - - it('should issue the request', function () { - return expect(this.stubPost.getCall(0).args[0]).to.deep.equal({ - url: this.settings.apis.v1.url + '/api/v1/overleaf/exports', - auth: { - user: this.settings.apis.v1.user, - pass: this.settings.apis.v1.pass, - }, - json: this.export_data, - timeout: 15000, - }) - }) - - it('should return the body with v1 export id', function () { - return this.callback - .calledWith(null, { exportId: this.export_id }) - .should.equal(true) - }) - }) - - describe('when the request fails', function () { - beforeEach(function (done) { - this.stubRequest.post = sinon - .stub() - .yields(new Error('export request failed')) - return this.ExportsHandler._requestExport( - this.export_data, - (error, exportV1Id) => { - this.callback(error, exportV1Id) - return done() - } - ) - }) - - it('should return an error', function () { - return (this.callback.args[0][0] instanceof Error).should.equal(true) - }) - }) - - describe('when the request returns an error response to forward', function () { - beforeEach(function (done) { - this.error_code = 422 - this.error_json = { status: this.error_code, message: 'nope' } - this.stubRequest.post = sinon - .stub() - .yields(null, { statusCode: this.error_code }, this.error_json) - return this.ExportsHandler._requestExport( - this.export_data, - (error, exportV1Id) => { - this.callback(error, exportV1Id) - return done() - } - ) - }) - - it('should return success and the response to forward', function () { - ;(this.callback.args[0][0] instanceof Error).should.equal(false) - return this.callback.calledWith(null, { - forwardResponse: this.error_json, - }) - }) - }) - }) - - describe('fetchExport', function () { - beforeEach(function (done) { - this.settings.apis = { - v1: { - url: 'http://127.0.0.1:5000', - user: 'overleaf', - pass: 'pass', - timeout: 15000, - }, - } - this.export_id = 897 - this.body = '{"id":897, "status_summary":"completed"}' - this.stubGet = sinon - .stub() - .yields(null, { statusCode: 200 }, { body: this.body }) - return done() - }) - - describe('when all goes well', function () { - beforeEach(function (done) { - this.stubRequest.get = this.stubGet - return this.ExportsHandler.fetchExport( - this.export_id, - (error, body) => { - this.callback(error, body) - return done() - } - ) - }) - - it('should issue the request', function () { - return expect(this.stubGet.getCall(0).args[0]).to.deep.equal({ - url: - this.settings.apis.v1.url + - '/api/v1/overleaf/exports/' + - this.export_id, - auth: { - user: this.settings.apis.v1.user, - pass: this.settings.apis.v1.pass, - }, - timeout: 15000, - }) - }) - - it('should return the v1 export id', function () { - return this.callback - .calledWith(null, { body: this.body }) - .should.equal(true) - }) - }) - }) - - describe('fetchDownload', function () { - beforeEach(function (done) { - this.settings.apis = { - v1: { - url: 'http://127.0.0.1:5000', - user: 'overleaf', - pass: 'pass', - timeout: 15000, - }, - } - this.export_id = 897 - this.body = - 'https://writelatex-conversions-dev.s3.amazonaws.com/exports/ieee_latexqc/tnb/2912/xggmprcrpfwbsnqzqqmvktddnrbqkqkr.zip?X-Amz-Expires=14400&X-Amz-Date=20180730T181003Z&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AKIAJDGDIJFGLNVGZH6A/20180730/us-east-1/s3/aws4_request&X-Amz-SignedHeaders=host&X-Amz-Signature=dec990336913cef9933f0e269afe99722d7ab2830ebf2c618a75673ee7159fee' - this.stubGet = sinon - .stub() - .yields(null, { statusCode: 200 }, { body: this.body }) - return done() - }) - - describe('when all goes well', function () { - beforeEach(function (done) { - this.stubRequest.get = this.stubGet - return this.ExportsHandler.fetchDownload( - this.export_id, - 'zip', - (error, body) => { - this.callback(error, body) - return done() - } - ) - }) - - it('should issue the request', function () { - return expect(this.stubGet.getCall(0).args[0]).to.deep.equal({ - url: - this.settings.apis.v1.url + - '/api/v1/overleaf/exports/' + - this.export_id + - '/zip_url', - auth: { - user: this.settings.apis.v1.user, - pass: this.settings.apis.v1.pass, - }, - timeout: 15000, - }) - }) - - it('should return the v1 export id', function () { - return this.callback - .calledWith(null, { body: this.body }) - .should.equal(true) - }) - }) - }) -}) diff --git a/services/web/test/unit/src/FileStore/FileStoreController.test.mjs b/services/web/test/unit/src/FileStore/FileStoreController.test.mjs new file mode 100644 index 0000000000..ba0670d49c --- /dev/null +++ b/services/web/test/unit/src/FileStore/FileStoreController.test.mjs @@ -0,0 +1,255 @@ +import { expect, vi } from 'vitest' +import sinon from 'sinon' +import Errors from '../../../../app/src/Features/Errors/Errors.js' +import MockResponse from '../helpers/MockResponse.js' + +const MODULE_PATH = + '../../../../app/src/Features/FileStore/FileStoreController.mjs' + +const expectedFileHeaders = { + 'Cache-Control': 'private, max-age=3600', + 'X-Served-By': 'filestore', +} + +vi.mock('../../../../app/src/Features/Errors/Errors.js', () => + vi.importActual('../../../../app/src/Features/Errors/Errors.js') +) + +describe('FileStoreController', function () { + beforeEach(async function (ctx) { + ctx.FileStoreHandler = { + promises: { + getFileStream: sinon.stub(), + getFileSize: sinon.stub(), + }, + } + ctx.ProjectLocator = { promises: { findElement: sinon.stub() } } + ctx.Stream = { pipeline: sinon.stub().resolves() } + ctx.HistoryManager = {} + + vi.doMock('node:stream/promises', () => ctx.Stream) + + vi.doMock('@overleaf/settings', () => ({ + default: ctx.settings, + })) + + vi.doMock('../../../../app/src/Features/Project/ProjectLocator', () => ({ + default: ctx.ProjectLocator, + })) + + vi.doMock( + '../../../../app/src/Features/FileStore/FileStoreHandler', + () => ({ + default: ctx.FileStoreHandler, + }) + ) + + vi.doMock('../../../../app/src/Features/History/HistoryManager', () => ({ + default: ctx.HistoryManager, + })) + + ctx.controller = (await import(MODULE_PATH)).default + ctx.stream = {} + ctx.projectId = '2k3j1lk3j21lk3j' + ctx.fileId = '12321kklj1lk3jk12' + ctx.req = { + params: { + Project_id: ctx.projectId, + File_id: ctx.fileId, + }, + query: 'query string here', + get(key) { + return undefined + }, + logger: { + addFields: sinon.stub(), + }, + } + ctx.res = new MockResponse() + ctx.next = sinon.stub() + ctx.file = { name: 'myfile.png' } + }) + + describe('getFile', function () { + beforeEach(function (ctx) { + ctx.FileStoreHandler.promises.getFileStream.resolves(ctx.stream) + ctx.ProjectLocator.promises.findElement.resolves({ element: ctx.file }) + }) + + it('should call the file store handler with the project_id file_id and any query string', async function (ctx) { + await ctx.controller.getFile(ctx.req, ctx.res) + ctx.FileStoreHandler.promises.getFileStream.should.have.been.calledWith( + ctx.req.params.Project_id, + ctx.req.params.File_id, + ctx.req.query + ) + }) + + it('should pipe to res', async function (ctx) { + await ctx.controller.getFile(ctx.req, ctx.res) + ctx.Stream.pipeline.should.have.been.calledWith(ctx.stream, ctx.res) + }) + + it('should get the file from the db', async function (ctx) { + await ctx.controller.getFile(ctx.req, ctx.res) + ctx.ProjectLocator.promises.findElement.should.have.been.calledWith({ + project_id: ctx.projectId, + element_id: ctx.fileId, + type: 'file', + }) + }) + + it('should set the Content-Disposition header', async function (ctx) { + await ctx.controller.getFile(ctx.req, ctx.res) + ctx.res.setContentDisposition.should.be.calledWith('attachment', { + filename: ctx.file.name, + }) + }) + + it('should return a 404 when not found', async function (ctx) { + ctx.ProjectLocator.promises.findElement.rejects( + new Errors.NotFoundError() + ) + await ctx.controller.getFile(ctx.req, ctx.res) + expect(ctx.res.statusCode).to.equal(404) + }) + + // Test behaviour around handling html files + ;['.html', '.htm', '.xhtml'].forEach(extension => { + describe(`with a '${extension}' file extension`, function () { + beforeEach(function (ctx) { + ctx.file.name = `bad${extension}` + ctx.req.get = key => { + if (key === 'User-Agent') { + return 'A generic browser' + } + } + }) + + describe('from a non-ios browser', function () { + it('should not set Content-Type', async function (ctx) { + await ctx.controller.getFile(ctx.req, ctx.res) + ctx.res.headers.should.deep.equal({ + ...expectedFileHeaders, + }) + }) + }) + + describe('from an iPhone', function () { + beforeEach(function (ctx) { + ctx.req.get = key => { + if (key === 'User-Agent') { + return 'An iPhone browser' + } + } + }) + + it("should set Content-Type to 'text/plain'", async function (ctx) { + await ctx.controller.getFile(ctx.req, ctx.res) + ctx.res.headers.should.deep.equal({ + ...expectedFileHeaders, + 'Content-Type': 'text/plain; charset=utf-8', + 'X-Content-Type-Options': 'nosniff', + }) + }) + }) + + describe('from an iPad', function () { + beforeEach(function (ctx) { + ctx.req.get = key => { + if (key === 'User-Agent') { + return 'An iPad browser' + } + } + }) + + it("should set Content-Type to 'text/plain'", async function (ctx) { + await ctx.controller.getFile(ctx.req, ctx.res) + ctx.res.headers.should.deep.equal({ + ...expectedFileHeaders, + 'Content-Type': 'text/plain; charset=utf-8', + 'X-Content-Type-Options': 'nosniff', + }) + }) + }) + }) + }) + ;[ + // None of these should trigger the iOS/html logic + 'x.html-is-rad', + 'html.pdf', + '.html-is-good-for-hidden-files', + 'somefile', + ].forEach(filename => { + describe(`with filename as '${filename}'`, function () { + beforeEach(function (ctx) { + ctx.user_agent = 'A generic browser' + ctx.file.name = filename + ctx.req.get = key => { + if (key === 'User-Agent') { + return ctx.user_agent + } + } + }) + ;['iPhone', 'iPad', 'Firefox', 'Chrome'].forEach(browser => { + describe(`downloaded from ${browser}`, function () { + beforeEach(function (ctx) { + ctx.user_agent = `Some ${browser} thing` + }) + + it('Should not set the Content-type', async function (ctx) { + await ctx.controller.getFile(ctx.req, ctx.res) + ctx.res.headers.should.deep.equal({ + ...expectedFileHeaders, + }) + }) + }) + }) + }) + }) + }) + + describe('getFileHead', function () { + beforeEach(function (ctx) { + ctx.ProjectLocator.promises.findElement.resolves({ element: ctx.file }) + }) + + it('reports the file size', function (ctx) { + return new Promise(resolve => { + const expectedFileSize = 99393 + ctx.FileStoreHandler.promises.getFileSize.rejects( + new Error('getFileSize: unexpected arguments') + ) + ctx.FileStoreHandler.promises.getFileSize + .withArgs(ctx.projectId, ctx.fileId) + .resolves(expectedFileSize) + + ctx.res.end = () => { + expect(ctx.res.status.lastCall.args).to.deep.equal([200]) + expect(ctx.res.header.lastCall.args).to.deep.equal([ + 'Content-Length', + expectedFileSize, + ]) + resolve() + } + + ctx.controller.getFileHead(ctx.req, ctx.res) + }) + }) + + it('returns 404 on NotFoundError', function (ctx) { + return new Promise(resolve => { + ctx.FileStoreHandler.promises.getFileSize.rejects( + new Errors.NotFoundError() + ) + + ctx.res.end = () => { + expect(ctx.res.status.lastCall.args).to.deep.equal([404]) + resolve() + } + + ctx.controller.getFileHead(ctx.req, ctx.res) + }) + }) + }) +}) diff --git a/services/web/test/unit/src/FileStore/FileStoreControllerTests.mjs b/services/web/test/unit/src/FileStore/FileStoreControllerTests.mjs deleted file mode 100644 index 2758068ce3..0000000000 --- a/services/web/test/unit/src/FileStore/FileStoreControllerTests.mjs +++ /dev/null @@ -1,235 +0,0 @@ -import { expect } from 'chai' -import sinon from 'sinon' -import esmock from 'esmock' -import Errors from '../../../../app/src/Features/Errors/Errors.js' -import MockResponse from '../helpers/MockResponse.js' - -const MODULE_PATH = - '../../../../app/src/Features/FileStore/FileStoreController.mjs' - -const expectedFileHeaders = { - 'Cache-Control': 'private, max-age=3600', - 'X-Served-By': 'filestore', -} - -describe('FileStoreController', function () { - beforeEach(async function () { - this.FileStoreHandler = { - promises: { - getFileStream: sinon.stub(), - getFileSize: sinon.stub(), - }, - } - this.ProjectLocator = { promises: { findElement: sinon.stub() } } - this.Stream = { pipeline: sinon.stub().resolves() } - this.HistoryManager = {} - this.controller = await esmock.strict(MODULE_PATH, { - 'node:stream/promises': this.Stream, - '@overleaf/settings': this.settings, - '../../../../app/src/Features/Project/ProjectLocator': - this.ProjectLocator, - '../../../../app/src/Features/FileStore/FileStoreHandler': - this.FileStoreHandler, - '../../../../app/src/Features/History/HistoryManager': - this.HistoryManager, - }) - this.stream = {} - this.projectId = '2k3j1lk3j21lk3j' - this.fileId = '12321kklj1lk3jk12' - this.req = { - params: { - Project_id: this.projectId, - File_id: this.fileId, - }, - query: 'query string here', - get(key) { - return undefined - }, - logger: { - addFields: sinon.stub(), - }, - } - this.res = new MockResponse() - this.next = sinon.stub() - this.file = { name: 'myfile.png' } - }) - - describe('getFile', function () { - beforeEach(function () { - this.FileStoreHandler.promises.getFileStream.resolves(this.stream) - this.ProjectLocator.promises.findElement.resolves({ element: this.file }) - }) - - it('should call the file store handler with the project_id file_id and any query string', async function () { - await this.controller.getFile(this.req, this.res) - this.FileStoreHandler.promises.getFileStream.should.have.been.calledWith( - this.req.params.Project_id, - this.req.params.File_id, - this.req.query - ) - }) - - it('should pipe to res', async function () { - await this.controller.getFile(this.req, this.res) - this.Stream.pipeline.should.have.been.calledWith(this.stream, this.res) - }) - - it('should get the file from the db', async function () { - await this.controller.getFile(this.req, this.res) - this.ProjectLocator.promises.findElement.should.have.been.calledWith({ - project_id: this.projectId, - element_id: this.fileId, - type: 'file', - }) - }) - - it('should set the Content-Disposition header', async function () { - await this.controller.getFile(this.req, this.res) - this.res.setContentDisposition.should.be.calledWith('attachment', { - filename: this.file.name, - }) - }) - - it('should return a 404 when not found', async function () { - this.ProjectLocator.promises.findElement.rejects( - new Errors.NotFoundError() - ) - await this.controller.getFile(this.req, this.res) - expect(this.res.statusCode).to.equal(404) - }) - - // Test behaviour around handling html files - ;['.html', '.htm', '.xhtml'].forEach(extension => { - describe(`with a '${extension}' file extension`, function () { - beforeEach(function () { - this.file.name = `bad${extension}` - this.req.get = key => { - if (key === 'User-Agent') { - return 'A generic browser' - } - } - }) - - describe('from a non-ios browser', function () { - it('should not set Content-Type', async function () { - await this.controller.getFile(this.req, this.res) - this.res.headers.should.deep.equal({ - ...expectedFileHeaders, - }) - }) - }) - - describe('from an iPhone', function () { - beforeEach(function () { - this.req.get = key => { - if (key === 'User-Agent') { - return 'An iPhone browser' - } - } - }) - - it("should set Content-Type to 'text/plain'", async function () { - await this.controller.getFile(this.req, this.res) - this.res.headers.should.deep.equal({ - ...expectedFileHeaders, - 'Content-Type': 'text/plain; charset=utf-8', - 'X-Content-Type-Options': 'nosniff', - }) - }) - }) - - describe('from an iPad', function () { - beforeEach(function () { - this.req.get = key => { - if (key === 'User-Agent') { - return 'An iPad browser' - } - } - }) - - it("should set Content-Type to 'text/plain'", async function () { - await this.controller.getFile(this.req, this.res) - this.res.headers.should.deep.equal({ - ...expectedFileHeaders, - 'Content-Type': 'text/plain; charset=utf-8', - 'X-Content-Type-Options': 'nosniff', - }) - }) - }) - }) - }) - ;[ - // None of these should trigger the iOS/html logic - 'x.html-is-rad', - 'html.pdf', - '.html-is-good-for-hidden-files', - 'somefile', - ].forEach(filename => { - describe(`with filename as '${filename}'`, function () { - beforeEach(function () { - this.user_agent = 'A generic browser' - this.file.name = filename - this.req.get = key => { - if (key === 'User-Agent') { - return this.user_agent - } - } - }) - ;['iPhone', 'iPad', 'Firefox', 'Chrome'].forEach(browser => { - describe(`downloaded from ${browser}`, function () { - beforeEach(function () { - this.user_agent = `Some ${browser} thing` - }) - - it('Should not set the Content-type', async function () { - await this.controller.getFile(this.req, this.res) - this.res.headers.should.deep.equal({ - ...expectedFileHeaders, - }) - }) - }) - }) - }) - }) - }) - - describe('getFileHead', function () { - beforeEach(function () { - this.ProjectLocator.promises.findElement.resolves({ element: this.file }) - }) - - it('reports the file size', function (done) { - const expectedFileSize = 99393 - this.FileStoreHandler.promises.getFileSize.rejects( - new Error('getFileSize: unexpected arguments') - ) - this.FileStoreHandler.promises.getFileSize - .withArgs(this.projectId, this.fileId) - .resolves(expectedFileSize) - - this.res.end = () => { - expect(this.res.status.lastCall.args).to.deep.equal([200]) - expect(this.res.header.lastCall.args).to.deep.equal([ - 'Content-Length', - expectedFileSize, - ]) - done() - } - - this.controller.getFileHead(this.req, this.res) - }) - - it('returns 404 on NotFoundError', function (done) { - this.FileStoreHandler.promises.getFileSize.rejects( - new Errors.NotFoundError() - ) - - this.res.end = () => { - expect(this.res.status.lastCall.args).to.deep.equal([404]) - done() - } - - this.controller.getFileHead(this.req, this.res) - }) - }) -}) diff --git a/services/web/test/unit/src/HelperFiles/AuthorizationHelperTests.js b/services/web/test/unit/src/HelperFiles/AuthorizationHelperTests.js index ef8b5fcc6a..a82143dce6 100644 --- a/services/web/test/unit/src/HelperFiles/AuthorizationHelperTests.js +++ b/services/web/test/unit/src/HelperFiles/AuthorizationHelperTests.js @@ -63,72 +63,4 @@ describe('AuthorizationHelper', function () { expect(this.AuthorizationHelper.hasAnyStaffAccess(user)).to.be.false }) }) - - describe('isReviewerRoleEnabled', function () { - it('with no reviewers and no split test', async function () { - this.ProjectGetter.promises.getProject = sinon.stub().resolves({ - reviewer_refs: {}, - owner_ref: 'ownerId', - }) - this.SplitTestHandler.promises.getAssignmentForUser = sinon - .stub() - .resolves({ - variant: 'disabled', - }) - expect( - await this.AuthorizationHelper.promises.isReviewerRoleEnabled( - 'projectId' - ) - ).to.be.false - }) - - it('with no reviewers and enabled split test', async function () { - this.ProjectGetter.promises.getProject = sinon.stub().resolves({ - reviewer_refs: {}, - owner_ref: 'userId', - }) - this.SplitTestHandler.promises.getAssignmentForUser = sinon - .stub() - .resolves({ - variant: 'enabled', - }) - expect( - await this.AuthorizationHelper.promises.isReviewerRoleEnabled( - 'projectId' - ) - ).to.be.true - }) - - it('with reviewers and disabled split test', async function () { - this.ProjectGetter.promises.getProject = sinon.stub().resolves({ - reviewer_refs: [{ $oid: 'userId' }], - }) - this.SplitTestHandler.promises.getAssignmentForUser = sinon - .stub() - .resolves({ - variant: 'default', - }) - expect( - await this.AuthorizationHelper.promises.isReviewerRoleEnabled( - 'projectId' - ) - ).to.be.true - }) - - it('with reviewers and enabled split test', async function () { - this.ProjectGetter.promises.getProject = sinon.stub().resolves({ - reviewer_refs: [{ $oid: 'userId' }], - }) - this.SplitTestHandler.promises.getAssignmentForUser = sinon - .stub() - .resolves({ - variant: 'enabled', - }) - expect( - await this.AuthorizationHelper.promises.isReviewerRoleEnabled( - 'projectId' - ) - ).to.be.true - }) - }) }) diff --git a/services/web/test/unit/src/History/HistoryManagerTests.js b/services/web/test/unit/src/History/HistoryManagerTests.js index 6b83d15ed0..aa59cda4e6 100644 --- a/services/web/test/unit/src/History/HistoryManagerTests.js +++ b/services/web/test/unit/src/History/HistoryManagerTests.js @@ -3,9 +3,9 @@ const sinon = require('sinon') const SandboxedModule = require('sandboxed-module') const { ObjectId } = require('mongodb-legacy') const { - connectionPromise, cleanupTestDatabase, db, + waitForDb, } = require('../../../../app/src/infrastructure/mongodb') const MODULE_PATH = '../../../../app/src/Features/History/HistoryManager' @@ -19,7 +19,7 @@ const GLOBAL_BLOBS = { describe('HistoryManager', function () { before(async function () { - await connectionPromise + await waitForDb() }) before(cleanupTestDatabase) before(async function () { @@ -90,7 +90,7 @@ describe('HistoryManager', function () { this.HistoryManager = SandboxedModule.require(MODULE_PATH, { requires: { - '../../infrastructure/mongodb': { ObjectId, db }, + '../../infrastructure/mongodb': { ObjectId, db, waitForDb }, '@overleaf/fetch-utils': this.FetchUtils, '@overleaf/settings': this.settings, '../User/UserGetter': this.UserGetter, diff --git a/services/web/test/unit/src/History/RestoreManagerTests.js b/services/web/test/unit/src/History/RestoreManagerTests.js index 2474425bfb..f76ba506ad 100644 --- a/services/web/test/unit/src/History/RestoreManagerTests.js +++ b/services/web/test/unit/src/History/RestoreManagerTests.js @@ -9,6 +9,12 @@ const tk = require('timekeeper') const moment = require('moment') const { expect } = require('chai') +function nestedMapWithSetToObject(m) { + return Object.fromEntries( + Array.from(m.entries()).map(([key, set]) => [key, Array.from(set)]) + ) +} + describe('RestoreManager', function () { beforeEach(function () { tk.freeze(Date.now()) // freeze the time for these tests @@ -28,7 +34,7 @@ describe('RestoreManager', function () { promises: { flushProjectToMongo: sinon.stub().resolves() }, }), '../Docstore/DocstoreManager': (this.DocstoreManager = { - promises: {}, + promises: { getCommentThreadIds: sinon.stub().resolves({}) }, }), '../Chat/ChatApiHandler': (this.ChatApiHandler = { promises: {} }), '../Chat/ChatManager': (this.ChatManager = { promises: {} }), @@ -260,22 +266,33 @@ describe('RestoreManager', function () { beforeEach(function () { this.pathname = 'foo.tex' this.comments = [ - { op: { t: 'comment-in-other-doc', p: 0, c: 'foo' } }, - { op: { t: 'single-comment', p: 10, c: 'bar' } }, - { op: { t: 'deleted-comment', p: 20, c: 'baz' } }, + { + id: 'comment-in-other-doc', + op: { t: 'comment-in-other-doc', p: 0, c: 'foo' }, + }, + { + id: 'single-comment', + op: { t: 'single-comment', p: 10, c: 'bar' }, + }, + { + id: 'deleted-comment', + op: { t: 'deleted-comment', p: 20, c: 'baz' }, + }, ] this.remappedComments = [ - { op: { t: 'duplicate-comment', p: 0, c: 'foo' } }, - { op: { t: 'single-comment', p: 10, c: 'bar' } }, + { + id: 'duplicate-comment', + op: { t: 'duplicate-comment', p: 0, c: 'foo' }, + }, + { + id: 'single-comment', + op: { t: 'single-comment', p: 10, c: 'bar' }, + }, ] this.ProjectLocator.promises.findElementByPath = sinon.stub().rejects() - this.DocstoreManager.promises.getAllRanges = sinon.stub().resolves([ - { - ranges: { - comments: this.comments.slice(0, 1), - }, - }, - ]) + this.DocstoreManager.promises.getCommentThreadIds = sinon + .stub() + .resolves({ 'other-doc': [this.comments[0].op.t] }) this.ChatApiHandler.promises.duplicateCommentThreads = sinon .stub() .resolves({ @@ -355,7 +372,7 @@ describe('RestoreManager', function () { expect( this.DocumentUpdaterHandler.promises.flushProjectToMongo ).to.have.been.calledBefore( - this.DocstoreManager.promises.getAllRanges + this.DocstoreManager.promises.getCommentThreadIds ) }) @@ -451,19 +468,11 @@ describe('RestoreManager', function () { ) }) - it('should delete the document before flushing', function () { - expect( - this.EditorController.promises.deleteEntity - ).to.have.been.calledBefore( - this.DocumentUpdaterHandler.promises.flushProjectToMongo - ) - }) - it('should flush the document before fetching ranges', function () { expect( this.DocumentUpdaterHandler.promises.flushProjectToMongo ).to.have.been.calledBefore( - this.DocstoreManager.promises.getAllRanges + this.DocstoreManager.promises.getCommentThreadIds ) }) @@ -499,6 +508,143 @@ describe('RestoreManager', function () { ) }) }) + + describe('with comments in same doc', function () { + // copy of the above, addition: inject and later inspect threadIds set + beforeEach(async function () { + this.ProjectLocator.promises.findElementByPath = sinon + .stub() + .resolves({ type: 'doc', element: { _id: 'mock-file-id' } }) + this.EditorController.promises.deleteEntity = sinon.stub().resolves() + this.ChatApiHandler.promises.generateThreadData = sinon + .stub() + .resolves( + (this.threadData = { + [this.comments[0].op.t]: { + messages: [ + { + content: 'message', + timestamp: '2024-01-01T00:00:00.000Z', + user_id: 'user-1', + }, + ], + }, + [this.comments[1].op.t]: { + messages: [ + { + content: 'other message', + timestamp: '2024-01-01T00:00:00.000Z', + user_id: 'user-1', + }, + ], + }, + }) + ) + + this.threadIds = new Map([ + [ + 'mock-file-id', + new Set([this.comments[0].op.t, this.comments[1].op.t]), + ], + ]) + // Comments are updated in-place. Look up threads before reverting. + this.afterThreadIds = { + // mock-file-id removed + [this.addedFile._id]: [ + this.comments[0].op.t, + this.comments[1].op.t, + ], + } + this.data = await this.RestoreManager.promises._revertSingleFile( + this.user_id, + this.project_id, + this.version, + this.pathname, + this.threadIds + ) + }) + + it('should import the file with original comments minus the deleted one', function () { + expect( + this.EditorController.promises.addDocWithRanges + ).to.have.been.calledWith( + this.project_id, + this.folder_id, + 'foo.tex', + ['foo', 'bar', 'baz'], + { + changes: this.tracked_changes, + comments: this.comments.slice(0, 2), + }, + { + kind: 'file-restore', + path: this.pathname, + version: this.version, + timestamp: new Date(this.endTs).toISOString(), + } + ) + }) + + it('should add the seen thread ids to the map', function () { + expect(nestedMapWithSetToObject(this.threadIds)).to.deep.equal( + this.afterThreadIds + ) + }) + }) + + describe('with remapped comments during revertProject', function () { + // copy of the above, addition: inject and later inspect threadIds set + beforeEach(async function () { + this.ProjectLocator.promises.findElementByPath = sinon + .stub() + .resolves({ type: 'doc', element: { _id: 'mock-file-id' } }) + this.EditorController.promises.deleteEntity = sinon.stub().resolves() + + this.threadIds = new Map([ + ['other-doc', new Set([this.comments[0].op.t])], + ]) + // Comments are updated in-place. Look up threads before reverting. + this.afterThreadIds = { + // mock-file-id removed + 'other-doc': [this.comments[0].op.t], + [this.addedFile._id]: [ + this.remappedComments[0].op.t, + this.remappedComments[1].op.t, + ], + } + this.data = await this.RestoreManager.promises._revertSingleFile( + this.user_id, + this.project_id, + this.version, + this.pathname, + this.threadIds + ) + }) + + it('should import the file', function () { + expect( + this.EditorController.promises.addDocWithRanges + ).to.have.been.calledWith( + this.project_id, + this.folder_id, + 'foo.tex', + ['foo', 'bar', 'baz'], + { changes: this.tracked_changes, comments: this.remappedComments }, + { + kind: 'file-restore', + path: this.pathname, + version: this.version, + timestamp: new Date(this.endTs).toISOString(), + } + ) + }) + + it('should add the seen thread ids to the map', function () { + expect(nestedMapWithSetToObject(this.threadIds)).to.deep.equal( + this.afterThreadIds + ) + }) + }) }) describe('reverting a file or document with metadata', function () { @@ -524,7 +670,9 @@ describe('RestoreManager', function () { .stub() .resolves((this.addedFile = { _id: 'mock-doc-id', type: 'doc' })) - this.DocstoreManager.promises.getAllRanges = sinon.stub().resolves([]) + this.DocstoreManager.promises.getCommentThreadIds = sinon + .stub() + .resolves({}) this.ChatApiHandler.promises.generateThreadData = sinon .stub() .resolves({}) @@ -741,7 +889,7 @@ describe('RestoreManager', function () { this.ProjectGetter.promises.getProject .withArgs(this.project_id) .resolves({ overleaf: { history: { rangesSupportEnabled: true } } }) - this.RestoreManager.promises.revertFile = sinon.stub().resolves() + this.RestoreManager.promises._revertSingleFile = sinon.stub().resolves() this.RestoreManager.promises._getProjectPathsAtVersion = sinon .stub() .resolves([]) @@ -832,21 +980,27 @@ describe('RestoreManager', function () { }) it('should revert the old files', function () { - expect(this.RestoreManager.promises.revertFile).to.have.been.calledWith( + expect( + this.RestoreManager.promises._revertSingleFile + ).to.have.been.calledWith( this.user_id, this.project_id, this.version, 'main.tex' ) - expect(this.RestoreManager.promises.revertFile).to.have.been.calledWith( + expect( + this.RestoreManager.promises._revertSingleFile + ).to.have.been.calledWith( this.user_id, this.project_id, this.version, 'figures/image.png' ) - expect(this.RestoreManager.promises.revertFile).to.have.been.calledWith( + expect( + this.RestoreManager.promises._revertSingleFile + ).to.have.been.calledWith( this.user_id, this.project_id, this.version, @@ -856,7 +1010,7 @@ describe('RestoreManager', function () { it('should not revert the current files', function () { expect( - this.RestoreManager.promises.revertFile + this.RestoreManager.promises._revertSingleFile ).to.not.have.been.calledWith( this.user_id, this.project_id, diff --git a/services/web/test/unit/src/LinkedFiles/LinkedFilesController.test.mjs b/services/web/test/unit/src/LinkedFiles/LinkedFilesController.test.mjs new file mode 100644 index 0000000000..e712d17198 --- /dev/null +++ b/services/web/test/unit/src/LinkedFiles/LinkedFilesController.test.mjs @@ -0,0 +1,204 @@ +import { expect, vi } from 'vitest' +import sinon from 'sinon' +const modulePath = + '../../../../app/src/Features/LinkedFiles/LinkedFilesController.mjs' + +describe('LinkedFilesController', function () { + beforeEach(function (ctx) { + ctx.fakeTime = new Date() + ctx.clock = sinon.useFakeTimers(ctx.fakeTime.getTime()) + }) + + afterEach(function (ctx) { + ctx.clock.restore() + }) + + beforeEach(async function (ctx) { + ctx.userId = 'user-id' + ctx.Agent = { + promises: { + createLinkedFile: sinon.stub().resolves(), + refreshLinkedFile: sinon.stub().resolves(), + }, + } + ctx.projectId = 'projectId' + ctx.provider = 'provider' + ctx.fileName = 'linked-file-name' + ctx.data = { customAgentData: 'foo' } + ctx.LinkedFilesHandler = { + promises: { + getFileById: sinon.stub(), + }, + } + ctx.AnalyticsManager = {} + ctx.SessionManager = { + getLoggedInUserId: sinon.stub().returns(ctx.userId), + } + ctx.EditorRealTimeController = {} + ctx.ReferencesHandler = {} + ctx.UrlAgent = {} + ctx.ProjectFileAgent = {} + ctx.ProjectOutputFileAgent = {} + ctx.EditorController = {} + ctx.ProjectLocator = {} + ctx.logger = { + error: sinon.stub(), + } + ctx.settings = { enabledLinkedFileTypes: [] } + + vi.doMock( + '.../../../../app/src/Features/Authentication/SessionManager', + () => ({ + default: ctx.SessionManager, + }) + ) + + vi.doMock( + '../../../../app/src/Features/Analytics/AnalyticsManager', + () => ({ + default: ctx.AnalyticsManager, + }) + ) + + vi.doMock( + '../../../../app/src/Features/LinkedFiles/LinkedFilesHandler', + () => ({ + default: ctx.LinkedFilesHandler, + }) + ) + + vi.doMock( + '../../../../app/src/Features/Editor/EditorRealTimeController', + () => ({ + default: ctx.EditorRealTimeController, + }) + ) + + vi.doMock( + '../../../../app/src/Features/References/ReferencesHandler', + () => ({ + default: ctx.ReferencesHandler, + }) + ) + + vi.doMock('../../../../app/src/Features/LinkedFiles/UrlAgent', () => ({ + default: ctx.UrlAgent, + })) + + vi.doMock( + '../../../../app/src/Features/LinkedFiles/ProjectFileAgent', + () => ({ + default: ctx.ProjectFileAgent, + }) + ) + + vi.doMock( + '../../../../app/src/Features/LinkedFiles/ProjectOutputFileAgent', + () => ({ + default: ctx.ProjectOutputFileAgent, + }) + ) + + vi.doMock('../../../../app/src/Features/Editor/EditorController', () => ({ + default: ctx.EditorController, + })) + + vi.doMock('../../../../app/src/Features/Project/ProjectLocator', () => ({ + default: ctx.ProjectLocator, + })) + + vi.doMock('@overleaf/logger', () => ({ + default: ctx.logger, + })) + + vi.doMock('@overleaf/settings', () => ({ + default: ctx.settings, + })) + + ctx.LinkedFilesController = (await import(modulePath)).default + ctx.LinkedFilesController._getAgent = sinon.stub().resolves(ctx.Agent) + }) + + describe('createLinkedFile', function () { + beforeEach(function (ctx) { + ctx.req = { + params: { project_id: ctx.projectId }, + body: { + name: ctx.fileName, + provider: ctx.provider, + data: ctx.data, + }, + } + ctx.next = sinon.stub() + }) + + it('sets importedAt timestamp on linkedFileData', function (ctx) { + return new Promise(resolve => { + ctx.next = sinon.stub().callsFake(() => resolve('unexpected error')) + ctx.res = { + json: () => { + expect(ctx.Agent.promises.createLinkedFile).to.have.been.calledWith( + ctx.projectId, + { ...ctx.data, importedAt: ctx.fakeTime.toISOString() }, + ctx.fileName, + undefined, + ctx.userId + ) + resolve() + }, + } + ctx.LinkedFilesController.createLinkedFile(ctx.req, ctx.res, ctx.next) + }) + }) + }) + describe('refreshLinkedFiles', function () { + beforeEach(function (ctx) { + ctx.data.provider = ctx.provider + ctx.file = { + name: ctx.fileName, + linkedFileData: { + ...ctx.data, + importedAt: new Date(2020, 1, 1).toISOString(), + }, + } + ctx.LinkedFilesHandler.promises.getFileById + .withArgs(ctx.projectId, 'file-id') + .resolves({ + file: ctx.file, + path: 'fake-path', + parentFolder: { + _id: 'parent-folder-id', + }, + }) + ctx.req = { + params: { project_id: ctx.projectId, file_id: 'file-id' }, + body: {}, + } + ctx.next = sinon.stub() + }) + + it('resets importedAt timestamp on linkedFileData', function (ctx) { + return new Promise(resolve => { + ctx.next = sinon.stub().callsFake(() => resolve('unexpected error')) + ctx.res = { + json: () => { + expect( + ctx.Agent.promises.refreshLinkedFile + ).to.have.been.calledWith( + ctx.projectId, + { + ...ctx.data, + importedAt: ctx.fakeTime.toISOString(), + }, + ctx.name, + 'parent-folder-id', + ctx.userId + ) + resolve() + }, + } + ctx.LinkedFilesController.refreshLinkedFile(ctx.req, ctx.res, ctx.next) + }) + }) + }) +}) diff --git a/services/web/test/unit/src/LinkedFiles/LinkedFilesControllerTests.mjs b/services/web/test/unit/src/LinkedFiles/LinkedFilesControllerTests.mjs deleted file mode 100644 index f1b7b58c10..0000000000 --- a/services/web/test/unit/src/LinkedFiles/LinkedFilesControllerTests.mjs +++ /dev/null @@ -1,155 +0,0 @@ -import { expect } from 'chai' -import esmock from 'esmock' -import sinon from 'sinon' -const modulePath = - '../../../../app/src/Features/LinkedFiles/LinkedFilesController.mjs' - -describe('LinkedFilesController', function () { - beforeEach(function () { - this.fakeTime = new Date() - this.clock = sinon.useFakeTimers(this.fakeTime.getTime()) - }) - - afterEach(function () { - this.clock.restore() - }) - - beforeEach(async function () { - this.userId = 'user-id' - this.Agent = { - promises: { - createLinkedFile: sinon.stub().resolves(), - refreshLinkedFile: sinon.stub().resolves(), - }, - } - this.projectId = 'projectId' - this.provider = 'provider' - this.name = 'linked-file-name' - this.data = { customAgentData: 'foo' } - this.LinkedFilesHandler = { - promises: { - getFileById: sinon.stub(), - }, - } - this.AnalyticsManager = {} - this.SessionManager = { - getLoggedInUserId: sinon.stub().returns(this.userId), - } - this.EditorRealTimeController = {} - this.ReferencesHandler = {} - this.UrlAgent = {} - this.ProjectFileAgent = {} - this.ProjectOutputFileAgent = {} - this.EditorController = {} - this.ProjectLocator = {} - this.logger = { - error: sinon.stub(), - } - this.settings = { enabledLinkedFileTypes: [] } - this.LinkedFilesController = await esmock.strict(modulePath, { - '.../../../../app/src/Features/Authentication/SessionManager': - this.SessionManager, - '../../../../app/src/Features/Analytics/AnalyticsManager': - this.AnalyticsManager, - '../../../../app/src/Features/LinkedFiles/LinkedFilesHandler': - this.LinkedFilesHandler, - '../../../../app/src/Features/Editor/EditorRealTimeController': - this.EditorRealTimeController, - '../../../../app/src/Features/References/ReferencesHandler': - this.ReferencesHandler, - '../../../../app/src/Features/LinkedFiles/UrlAgent': this.UrlAgent, - '../../../../app/src/Features/LinkedFiles/ProjectFileAgent': - this.ProjectFileAgent, - '../../../../app/src/Features/LinkedFiles/ProjectOutputFileAgent': - this.ProjectOutputFileAgent, - '../../../../app/src/Features/Editor/EditorController': - this.EditorController, - '../../../../app/src/Features/Project/ProjectLocator': - this.ProjectLocator, - '@overleaf/logger': this.logger, - '@overleaf/settings': this.settings, - }) - this.LinkedFilesController._getAgent = sinon.stub().resolves(this.Agent) - }) - - describe('createLinkedFile', function () { - beforeEach(function () { - this.req = { - params: { project_id: this.projectId }, - body: { - name: this.name, - provider: this.provider, - data: this.data, - }, - } - this.next = sinon.stub() - }) - - it('sets importedAt timestamp on linkedFileData', function (done) { - this.next = sinon.stub().callsFake(() => done('unexpected error')) - this.res = { - json: () => { - expect(this.Agent.promises.createLinkedFile).to.have.been.calledWith( - this.projectId, - { ...this.data, importedAt: this.fakeTime.toISOString() }, - this.name, - undefined, - this.userId - ) - done() - }, - } - this.LinkedFilesController.createLinkedFile(this.req, this.res, this.next) - }) - }) - describe('refreshLinkedFiles', function () { - beforeEach(function () { - this.data.provider = this.provider - this.file = { - name: this.name, - linkedFileData: { - ...this.data, - importedAt: new Date(2020, 1, 1).toISOString(), - }, - } - this.LinkedFilesHandler.promises.getFileById - .withArgs(this.projectId, 'file-id') - .resolves({ - file: this.file, - path: 'fake-path', - parentFolder: { - _id: 'parent-folder-id', - }, - }) - this.req = { - params: { project_id: this.projectId, file_id: 'file-id' }, - body: {}, - } - this.next = sinon.stub() - }) - - it('resets importedAt timestamp on linkedFileData', function (done) { - this.next = sinon.stub().callsFake(() => done('unexpected error')) - this.res = { - json: () => { - expect(this.Agent.promises.refreshLinkedFile).to.have.been.calledWith( - this.projectId, - { - ...this.data, - importedAt: this.fakeTime.toISOString(), - }, - this.name, - 'parent-folder-id', - this.userId - ) - done() - }, - } - this.LinkedFilesController.refreshLinkedFile( - this.req, - this.res, - this.next - ) - }) - }) -}) diff --git a/services/web/test/unit/src/Metadata/MetaControllerTests.mjs b/services/web/test/unit/src/Metadata/MetaController.test.mjs similarity index 58% rename from services/web/test/unit/src/Metadata/MetaControllerTests.mjs rename to services/web/test/unit/src/Metadata/MetaController.test.mjs index 5695d289f7..ee3488137a 100644 --- a/services/web/test/unit/src/Metadata/MetaControllerTests.mjs +++ b/services/web/test/unit/src/Metadata/MetaController.test.mjs @@ -1,31 +1,37 @@ -import { expect } from 'chai' +import { expect, vi } from 'vitest' import sinon from 'sinon' -import esmock from 'esmock' import MockResponse from '../helpers/MockResponse.js' const modulePath = '../../../../app/src/Features/Metadata/MetaController.mjs' describe('MetaController', function () { - beforeEach(async function () { - this.EditorRealTimeController = { + beforeEach(async function (ctx) { + ctx.EditorRealTimeController = { emitToRoom: sinon.stub(), } - this.MetaHandler = { + ctx.MetaHandler = { promises: { getAllMetaForProject: sinon.stub(), getMetaForDoc: sinon.stub(), }, } - this.MetadataController = await esmock.strict(modulePath, { - '../../../../app/src/Features/Editor/EditorRealTimeController': - this.EditorRealTimeController, - '../../../../app/src/Features/Metadata/MetaHandler': this.MetaHandler, - }) + vi.doMock( + '../../../../app/src/Features/Editor/EditorRealTimeController', + () => ({ + default: ctx.EditorRealTimeController, + }) + ) + + vi.doMock('../../../../app/src/Features/Metadata/MetaHandler', () => ({ + default: ctx.MetaHandler, + })) + + ctx.MetadataController = (await import(modulePath)).default }) describe('getMetadata', function () { - it('should respond with json', async function () { + it('should respond with json', async function (ctx) { const projectMeta = { 'doc-id': { labels: ['foo'], @@ -34,7 +40,7 @@ describe('MetaController', function () { }, } - this.MetaHandler.promises.getAllMetaForProject = sinon + ctx.MetaHandler.promises.getAllMetaForProject = sinon .stub() .resolves(projectMeta) @@ -42,9 +48,9 @@ describe('MetaController', function () { const res = new MockResponse() const next = sinon.stub() - await this.MetadataController.getMetadata(req, res, next) + await ctx.MetadataController.getMetadata(req, res, next) - this.MetaHandler.promises.getAllMetaForProject.should.have.been.calledWith( + ctx.MetaHandler.promises.getAllMetaForProject.should.have.been.calledWith( 'project-id' ) res.json.should.have.been.calledOnceWith({ @@ -54,8 +60,8 @@ describe('MetaController', function () { next.should.not.have.been.called }) - it('should handle an error', async function () { - this.MetaHandler.promises.getAllMetaForProject = sinon + it('should handle an error', async function (ctx) { + ctx.MetaHandler.promises.getAllMetaForProject = sinon .stub() .throws(new Error('woops')) @@ -63,9 +69,9 @@ describe('MetaController', function () { const res = new MockResponse() const next = sinon.stub() - await this.MetadataController.getMetadata(req, res, next) + await ctx.MetadataController.getMetadata(req, res, next) - this.MetaHandler.promises.getAllMetaForProject.should.have.been.calledWith( + ctx.MetaHandler.promises.getAllMetaForProject.should.have.been.calledWith( 'project-id' ) res.json.should.not.have.been.called @@ -74,14 +80,14 @@ describe('MetaController', function () { }) describe('broadcastMetadataForDoc', function () { - it('should broadcast on broadcast:true ', async function () { - this.MetaHandler.promises.getMetaForDoc = sinon.stub().resolves({ + it('should broadcast on broadcast:true ', async function (ctx) { + ctx.MetaHandler.promises.getMetaForDoc = sinon.stub().resolves({ labels: ['foo'], packages: { a: { commands: [] } }, packageNames: ['a'], }) - this.EditorRealTimeController.emitToRoom = sinon.stub() + ctx.EditorRealTimeController.emitToRoom = sinon.stub() const req = { params: { project_id: 'project-id', doc_id: 'doc-id' }, @@ -90,32 +96,32 @@ describe('MetaController', function () { const res = new MockResponse() const next = sinon.stub() - await this.MetadataController.broadcastMetadataForDoc(req, res, next) + await ctx.MetadataController.broadcastMetadataForDoc(req, res, next) - this.MetaHandler.promises.getMetaForDoc.should.have.been.calledWith( + ctx.MetaHandler.promises.getMetaForDoc.should.have.been.calledWith( 'project-id' ) res.json.should.not.have.been.called res.sendStatus.should.have.been.calledOnceWith(200) next.should.not.have.been.called - this.EditorRealTimeController.emitToRoom.should.have.been.calledOnce - const { lastCall } = this.EditorRealTimeController.emitToRoom + ctx.EditorRealTimeController.emitToRoom.should.have.been.calledOnce + const { lastCall } = ctx.EditorRealTimeController.emitToRoom expect(lastCall.args[0]).to.equal('project-id') expect(lastCall.args[1]).to.equal('broadcastDocMeta') expect(lastCall.args[2]).to.have.all.keys(['docId', 'meta']) }) - it('should return json on broadcast:false ', async function () { + it('should return json on broadcast:false ', async function (ctx) { const docMeta = { labels: ['foo'], packages: { a: [] }, packageNames: ['a'], } - this.MetaHandler.promises.getMetaForDoc = sinon.stub().resolves(docMeta) + ctx.MetaHandler.promises.getMetaForDoc = sinon.stub().resolves(docMeta) - this.EditorRealTimeController.emitToRoom = sinon.stub() + ctx.EditorRealTimeController.emitToRoom = sinon.stub() const req = { params: { project_id: 'project-id', doc_id: 'doc-id' }, @@ -124,12 +130,12 @@ describe('MetaController', function () { const res = new MockResponse() const next = sinon.stub() - await this.MetadataController.broadcastMetadataForDoc(req, res, next) + await ctx.MetadataController.broadcastMetadataForDoc(req, res, next) - this.MetaHandler.promises.getMetaForDoc.should.have.been.calledWith( + ctx.MetaHandler.promises.getMetaForDoc.should.have.been.calledWith( 'project-id' ) - this.EditorRealTimeController.emitToRoom.should.not.have.been.called + ctx.EditorRealTimeController.emitToRoom.should.not.have.been.called res.json.should.have.been.calledOnceWith({ docId: 'doc-id', meta: docMeta, @@ -137,12 +143,12 @@ describe('MetaController', function () { next.should.not.have.been.called }) - it('should handle an error', async function () { - this.MetaHandler.promises.getMetaForDoc = sinon + it('should handle an error', async function (ctx) { + ctx.MetaHandler.promises.getMetaForDoc = sinon .stub() .throws(new Error('woops')) - this.EditorRealTimeController.emitToRoom = sinon.stub() + ctx.EditorRealTimeController.emitToRoom = sinon.stub() const req = { params: { project_id: 'project-id', doc_id: 'doc-id' }, @@ -151,9 +157,9 @@ describe('MetaController', function () { const res = new MockResponse() const next = sinon.stub() - await this.MetadataController.broadcastMetadataForDoc(req, res, next) + await ctx.MetadataController.broadcastMetadataForDoc(req, res, next) - this.MetaHandler.promises.getMetaForDoc.should.have.been.calledWith( + ctx.MetaHandler.promises.getMetaForDoc.should.have.been.calledWith( 'project-id' ) res.json.should.not.have.been.called diff --git a/services/web/test/unit/src/Metadata/MetaHandlerTests.mjs b/services/web/test/unit/src/Metadata/MetaHandler.test.mjs similarity index 71% rename from services/web/test/unit/src/Metadata/MetaHandlerTests.mjs rename to services/web/test/unit/src/Metadata/MetaHandler.test.mjs index 289fd0b164..48d5cc51a4 100644 --- a/services/web/test/unit/src/Metadata/MetaHandlerTests.mjs +++ b/services/web/test/unit/src/Metadata/MetaHandler.test.mjs @@ -1,15 +1,14 @@ -import { expect } from 'chai' +import { expect, vi } from 'vitest' import sinon from 'sinon' -import esmock from 'esmock' const modulePath = '../../../../app/src/Features/Metadata/MetaHandler.mjs' describe('MetaHandler', function () { - beforeEach(async function () { - this.projectId = 'someprojectid' - this.docId = 'somedocid' + beforeEach(async function (ctx) { + ctx.projectId = 'someprojectid' + ctx.docId = 'somedocid' - this.lines = [ + ctx.lines = [ '\\usepackage{ foo, bar }', '\\usepackage{baz}', 'one', @@ -23,28 +22,28 @@ describe('MetaHandler', function () { '\\begin{lstlisting}[label={lst:foo},caption={Test}]', // lst:foo should be in the returned labels ] - this.docs = { - [this.docId]: { - _id: this.docId, - lines: this.lines, + ctx.docs = { + [ctx.docId]: { + _id: ctx.docId, + lines: ctx.lines, }, } - this.ProjectEntityHandler = { + ctx.ProjectEntityHandler = { promises: { - getAllDocs: sinon.stub().resolves(this.docs), - getDoc: sinon.stub().resolves(this.docs[this.docId]), + getAllDocs: sinon.stub().resolves(ctx.docs), + getDoc: sinon.stub().resolves(ctx.docs[ctx.docId]), }, } - this.DocumentUpdaterHandler = { + ctx.DocumentUpdaterHandler = { promises: { flushDocToMongo: sinon.stub().resolves(), flushProjectToMongo: sinon.stub().resolves(), }, } - this.packageMapping = { + ctx.packageMapping = { foo: [ { caption: '\\bar', @@ -69,47 +68,58 @@ describe('MetaHandler', function () { ], } - this.MetaHandler = await esmock.strict(modulePath, { - '../../../../app/src/Features/Project/ProjectEntityHandler': - this.ProjectEntityHandler, - '../../../../app/src/Features/DocumentUpdater/DocumentUpdaterHandler': - this.DocumentUpdaterHandler, - '../../../../app/src/Features/Metadata/packageMapping': - this.packageMapping, - }) + vi.doMock( + '../../../../app/src/Features/Project/ProjectEntityHandler', + () => ({ + default: ctx.ProjectEntityHandler, + }) + ) + + vi.doMock( + '../../../../app/src/Features/DocumentUpdater/DocumentUpdaterHandler', + () => ({ + default: ctx.DocumentUpdaterHandler, + }) + ) + + vi.doMock('../../../../app/src/Features/Metadata/packageMapping', () => ({ + default: ctx.packageMapping, + })) + + ctx.MetaHandler = (await import(modulePath)).default }) describe('getMetaForDoc', function () { - it('should extract all the labels and packages', async function () { - const result = await this.MetaHandler.promises.getMetaForDoc( - this.projectId, - this.docId + it('should extract all the labels and packages', async function (ctx) { + const result = await ctx.MetaHandler.promises.getMetaForDoc( + ctx.projectId, + ctx.docId ) expect(result).to.deep.equal({ labels: ['aaa', 'ccc', 'ddd', 'e,f,g', 'foo', 'lst:foo'], packages: { - foo: this.packageMapping.foo, - baz: this.packageMapping.baz, + foo: ctx.packageMapping.foo, + baz: ctx.packageMapping.baz, }, packageNames: ['foo', 'bar', 'baz'], }) - this.DocumentUpdaterHandler.promises.flushDocToMongo.should.be.calledWith( - this.projectId, - this.docId + ctx.DocumentUpdaterHandler.promises.flushDocToMongo.should.be.calledWith( + ctx.projectId, + ctx.docId ) - this.ProjectEntityHandler.promises.getDoc.should.be.calledWith( - this.projectId, - this.docId + ctx.ProjectEntityHandler.promises.getDoc.should.be.calledWith( + ctx.projectId, + ctx.docId ) }) }) describe('getAllMetaForProject', function () { - it('should extract all metadata', async function () { - this.ProjectEntityHandler.promises.getAllDocs = sinon.stub().resolves({ + it('should extract all metadata', async function (ctx) { + ctx.ProjectEntityHandler.promises.getAllDocs = sinon.stub().resolves({ doc_one: { _id: 'id_one', lines: ['one', '\\label{aaa} two', 'three'], @@ -142,8 +152,8 @@ describe('MetaHandler', function () { }, }) - const result = await this.MetaHandler.promises.getAllMetaForProject( - this.projectId + const result = await ctx.MetaHandler.promises.getAllMetaForProject( + ctx.projectId ) expect(result).to.deep.equal({ @@ -206,12 +216,12 @@ describe('MetaHandler', function () { }, }) - this.DocumentUpdaterHandler.promises.flushProjectToMongo.should.be.calledWith( - this.projectId + ctx.DocumentUpdaterHandler.promises.flushProjectToMongo.should.be.calledWith( + ctx.projectId ) - this.ProjectEntityHandler.promises.getAllDocs.should.be.calledWith( - this.projectId + ctx.ProjectEntityHandler.promises.getAllDocs.should.be.calledWith( + ctx.projectId ) }) }) diff --git a/services/web/test/unit/src/Notifications/NotificationsController.test.mjs b/services/web/test/unit/src/Notifications/NotificationsController.test.mjs new file mode 100644 index 0000000000..1bc5c51b31 --- /dev/null +++ b/services/web/test/unit/src/Notifications/NotificationsController.test.mjs @@ -0,0 +1,101 @@ +import { vi } from 'vitest' +import sinon from 'sinon' + +const modulePath = new URL( + '../../../../app/src/Features/Notifications/NotificationsController.mjs', + import.meta.url +).pathname + +describe('NotificationsController', function () { + const userId = '123nd3ijdks' + const notificationId = '123njdskj9jlk' + + beforeEach(async function (ctx) { + ctx.handler = { + getUserNotifications: sinon.stub().callsArgWith(1), + markAsRead: sinon.stub().callsArgWith(2), + promises: { + getUserNotifications: sinon.stub().callsArgWith(1), + }, + } + ctx.req = { + params: { + notificationId, + }, + session: { + user: { + _id: userId, + }, + }, + i18n: { + translate() {}, + }, + } + ctx.AuthenticationController = { + getLoggedInUserId: sinon.stub().returns(ctx.req.session.user._id), + } + + vi.doMock( + '../../../../app/src/Features/Notifications/NotificationsHandler', + () => ({ + default: ctx.handler, + }) + ) + + vi.doMock( + '../../../../app/src/Features/Authentication/AuthenticationController', + () => ({ + default: ctx.AuthenticationController, + }) + ) + + ctx.controller = (await import(modulePath)).default + }) + + it('should ask the handler for all unread notifications', function (ctx) { + return new Promise(resolve => { + const allNotifications = [{ _id: notificationId, user_id: userId }] + ctx.handler.getUserNotifications = sinon + .stub() + .callsArgWith(1, null, allNotifications) + ctx.controller.getAllUnreadNotifications(ctx.req, { + json: body => { + body.should.deep.equal(allNotifications) + ctx.handler.getUserNotifications.calledWith(userId).should.equal(true) + resolve() + }, + }) + }) + }) + + it('should send a delete request when a delete has been received to mark a notification', function (ctx) { + return new Promise(resolve => { + ctx.controller.markNotificationAsRead(ctx.req, { + sendStatus: () => { + ctx.handler.markAsRead + .calledWith(userId, notificationId) + .should.equal(true) + resolve() + }, + }) + }) + }) + + it('should get a notification by notification id', function (ctx) { + return new Promise(resolve => { + const notification = { _id: notificationId, user_id: userId } + ctx.handler.getUserNotifications = sinon + .stub() + .callsArgWith(1, null, [notification]) + ctx.controller.getNotification(ctx.req, { + json: body => { + body.should.deep.equal(notification) + resolve() + }, + status: () => ({ + end: () => {}, + }), + }) + }) + }) +}) diff --git a/services/web/test/unit/src/Notifications/NotificationsControllerTests.mjs b/services/web/test/unit/src/Notifications/NotificationsControllerTests.mjs deleted file mode 100644 index 0e22b228c5..0000000000 --- a/services/web/test/unit/src/Notifications/NotificationsControllerTests.mjs +++ /dev/null @@ -1,66 +0,0 @@ -import esmock from 'esmock' -import sinon from 'sinon' - -const modulePath = new URL( - '../../../../app/src/Features/Notifications/NotificationsController.mjs', - import.meta.url -).pathname - -describe('NotificationsController', function () { - const userId = '123nd3ijdks' - const notificationId = '123njdskj9jlk' - - beforeEach(async function () { - this.handler = { - getUserNotifications: sinon.stub().callsArgWith(1), - markAsRead: sinon.stub().callsArgWith(2), - } - this.req = { - params: { - notificationId, - }, - session: { - user: { - _id: userId, - }, - }, - i18n: { - translate() {}, - }, - } - this.AuthenticationController = { - getLoggedInUserId: sinon.stub().returns(this.req.session.user._id), - } - this.controller = await esmock.strict(modulePath, { - '../../../../app/src/Features/Notifications/NotificationsHandler': - this.handler, - '../../../../app/src/Features/Authentication/AuthenticationController': - this.AuthenticationController, - }) - }) - - it('should ask the handler for all unread notifications', function (done) { - const allNotifications = [{ _id: notificationId, user_id: userId }] - this.handler.getUserNotifications = sinon - .stub() - .callsArgWith(1, null, allNotifications) - this.controller.getAllUnreadNotifications(this.req, { - json: body => { - body.should.deep.equal(allNotifications) - this.handler.getUserNotifications.calledWith(userId).should.equal(true) - done() - }, - }) - }) - - it('should send a delete request when a delete has been received to mark a notification', function (done) { - this.controller.markNotificationAsRead(this.req, { - sendStatus: () => { - this.handler.markAsRead - .calledWith(userId, notificationId) - .should.equal(true) - done() - }, - }) - }) -}) diff --git a/services/web/test/unit/src/PasswordReset/PasswordResetController.test.mjs b/services/web/test/unit/src/PasswordReset/PasswordResetController.test.mjs new file mode 100644 index 0000000000..05bbfdb433 --- /dev/null +++ b/services/web/test/unit/src/PasswordReset/PasswordResetController.test.mjs @@ -0,0 +1,568 @@ +import { expect, vi } from 'vitest' +import sinon from 'sinon' +import MockResponse from '../helpers/MockResponse.js' + +const MODULE_PATH = new URL( + '../../../../app/src/Features/PasswordReset/PasswordResetController.mjs', + import.meta.url +).pathname + +describe('PasswordResetController', function () { + beforeEach(async function (ctx) { + ctx.email = 'bob@bob.com' + ctx.user_id = 'mock-user-id' + ctx.token = 'my security token that was emailed to me' + ctx.password = 'my new password' + ctx.req = { + body: { + email: ctx.email, + passwordResetToken: ctx.token, + password: ctx.password, + }, + i18n: { + translate() { + return '.' + }, + }, + session: {}, + query: {}, + } + ctx.res = new MockResponse() + + ctx.settings = {} + ctx.PasswordResetHandler = { + generateAndEmailResetToken: sinon.stub(), + promises: { + generateAndEmailResetToken: sinon.stub(), + setNewUserPassword: sinon.stub().resolves({ + found: true, + reset: true, + userID: ctx.user_id, + mustReconfirm: true, + }), + getUserForPasswordResetToken: sinon + .stub() + .withArgs(ctx.token) + .resolves({ + user: { _id: ctx.user_id }, + remainingPeeks: 1, + }), + }, + } + ctx.UserSessionsManager = { + promises: { + removeSessionsFromRedis: sinon.stub().resolves(), + }, + } + ctx.UserUpdater = { + promises: { + removeReconfirmFlag: sinon.stub().resolves(), + }, + } + ctx.SplitTestHandler = { + promises: { + getAssignment: sinon.stub().resolves('default'), + }, + } + + vi.doMock('@overleaf/settings', () => ({ + default: ctx.settings, + })) + + vi.doMock( + '../../../../app/src/Features/PasswordReset/PasswordResetHandler', + () => ({ + default: ctx.PasswordResetHandler, + }) + ) + + vi.doMock( + '../../../../app/src/Features/Authentication/AuthenticationManager', + () => ({ + default: { + validatePassword: sinon.stub().returns(null), + }, + }) + ) + + vi.doMock( + '../../../../app/src/Features/Authentication/AuthenticationController', + () => ({ + default: (ctx.AuthenticationController = { + getLoggedInUserId: sinon.stub(), + finishLogin: sinon.stub(), + setAuditInfo: sinon.stub(), + }), + }) + ) + + vi.doMock('../../../../app/src/Features/User/UserGetter', () => ({ + default: (ctx.UserGetter = { + promises: { + getUser: sinon.stub(), + }, + }), + })) + + vi.doMock('../../../../app/src/Features/User/UserSessionsManager', () => ({ + default: ctx.UserSessionsManager, + })) + + vi.doMock('../../../../app/src/Features/User/UserUpdater', () => ({ + default: ctx.UserUpdater, + })) + + vi.doMock( + '../../../../app/src/Features/SplitTests/SplitTestHandler', + () => ({ + default: ctx.SplitTestHandler, + }) + ) + + ctx.PasswordResetController = (await import(MODULE_PATH)).default + }) + + describe('requestReset', function () { + it('should tell the handler to process that email', function (ctx) { + return new Promise(resolve => { + ctx.PasswordResetHandler.promises.generateAndEmailResetToken.resolves( + 'primary' + ) + ctx.res.callback = () => { + ctx.res.statusCode.should.equal(200) + ctx.res.json.calledWith(sinon.match.has('message')).should.equal(true) + expect( + ctx.PasswordResetHandler.promises.generateAndEmailResetToken + .lastCall.args[0] + ).equal(ctx.email) + resolve() + } + ctx.PasswordResetController.requestReset(ctx.req, ctx.res) + }) + }) + + it('should send a 500 if there is an error', function (ctx) { + return new Promise(resolve => { + ctx.PasswordResetHandler.promises.generateAndEmailResetToken.rejects( + new Error('error') + ) + ctx.PasswordResetController.requestReset(ctx.req, ctx.res, error => { + expect(error).to.exist + resolve() + }) + }) + }) + + it("should send a 404 if the email doesn't exist", function (ctx) { + return new Promise(resolve => { + ctx.PasswordResetHandler.promises.generateAndEmailResetToken.resolves( + null + ) + ctx.res.callback = () => { + ctx.res.statusCode.should.equal(404) + ctx.res.json.calledWith(sinon.match.has('message')).should.equal(true) + resolve() + } + ctx.PasswordResetController.requestReset(ctx.req, ctx.res) + }) + }) + + it('should send a 404 if the email is registered as a secondard email', function (ctx) { + return new Promise(resolve => { + ctx.PasswordResetHandler.promises.generateAndEmailResetToken.resolves( + 'secondary' + ) + ctx.res.callback = () => { + ctx.res.statusCode.should.equal(404) + ctx.res.json.calledWith(sinon.match.has('message')).should.equal(true) + resolve() + } + ctx.PasswordResetController.requestReset(ctx.req, ctx.res) + }) + }) + + it('should normalize the email address', function (ctx) { + return new Promise(resolve => { + ctx.email = ' UPperCaseEMAILWithSpacesAround@example.Com ' + ctx.req.body.email = ctx.email + ctx.PasswordResetHandler.promises.generateAndEmailResetToken.resolves( + 'primary' + ) + ctx.res.callback = () => { + ctx.res.statusCode.should.equal(200) + ctx.res.json.calledWith(sinon.match.has('message')).should.equal(true) + resolve() + } + ctx.PasswordResetController.requestReset(ctx.req, ctx.res) + }) + }) + }) + + describe('setNewUserPassword', function () { + beforeEach(function (ctx) { + ctx.req.session.resetToken = ctx.token + }) + + it('should tell the user handler to reset the password', function (ctx) { + return new Promise(resolve => { + ctx.res.sendStatus = code => { + code.should.equal(200) + ctx.PasswordResetHandler.promises.setNewUserPassword + .calledWith(ctx.token, ctx.password) + .should.equal(true) + resolve() + } + ctx.PasswordResetController.setNewUserPassword(ctx.req, ctx.res) + }) + }) + + it('should preserve spaces in the password', function (ctx) { + return new Promise(resolve => { + ctx.password = ctx.req.body.password = ' oh! clever! spaces around! ' + ctx.res.sendStatus = code => { + code.should.equal(200) + ctx.PasswordResetHandler.promises.setNewUserPassword.should.have.been.calledWith( + ctx.token, + ctx.password + ) + resolve() + } + ctx.PasswordResetController.setNewUserPassword(ctx.req, ctx.res) + }) + }) + + it('should send 404 if the token was not found', function (ctx) { + return new Promise(resolve => { + ctx.PasswordResetHandler.promises.setNewUserPassword.resolves({ + found: false, + reset: false, + userId: ctx.user_id, + }) + ctx.res.status = code => { + code.should.equal(404) + return ctx.res + } + ctx.res.json = data => { + data.message.key.should.equal('token-expired') + resolve() + } + ctx.PasswordResetController.setNewUserPassword(ctx.req, ctx.res) + }) + }) + + it('should return 500 if not reset', function (ctx) { + return new Promise(resolve => { + ctx.PasswordResetHandler.promises.setNewUserPassword.resolves({ + found: true, + reset: false, + userId: ctx.user_id, + }) + ctx.res.status = code => { + code.should.equal(500) + return ctx.res + } + ctx.res.json = data => { + expect(data.message).to.exist + resolve() + } + ctx.PasswordResetController.setNewUserPassword(ctx.req, ctx.res) + }) + }) + + it('should return 400 (Bad Request) if there is no password', function (ctx) { + return new Promise(resolve => { + ctx.req.body.password = '' + ctx.res.status = code => { + code.should.equal(400) + return ctx.res + } + ctx.res.json = data => { + data.message.key.should.equal('invalid-password') + ctx.PasswordResetHandler.promises.setNewUserPassword.called.should.equal( + false + ) + resolve() + } + ctx.PasswordResetController.setNewUserPassword(ctx.req, ctx.res) + }) + }) + + it('should return 400 (Bad Request) if there is no passwordResetToken', function (ctx) { + return new Promise(resolve => { + ctx.req.body.passwordResetToken = '' + ctx.res.status = code => { + code.should.equal(400) + return ctx.res + } + ctx.res.json = data => { + data.message.key.should.equal('invalid-password') + ctx.PasswordResetHandler.promises.setNewUserPassword.called.should.equal( + false + ) + resolve() + } + ctx.PasswordResetController.setNewUserPassword(ctx.req, ctx.res) + }) + }) + + it('should return 400 (Bad Request) if the password is invalid', function (ctx) { + return new Promise(resolve => { + ctx.req.body.password = 'correct horse battery staple' + const err = new Error('bad') + err.name = 'InvalidPasswordError' + ctx.PasswordResetHandler.promises.setNewUserPassword.rejects(err) + ctx.res.status = code => { + code.should.equal(400) + return ctx.res + } + ctx.res.json = data => { + data.message.key.should.equal('invalid-password') + ctx.PasswordResetHandler.promises.setNewUserPassword.called.should.equal( + true + ) + resolve() + } + ctx.PasswordResetController.setNewUserPassword(ctx.req, ctx.res) + }) + }) + + it('should clear sessions', function (ctx) { + return new Promise(resolve => { + ctx.res.sendStatus = code => { + ctx.UserSessionsManager.promises.removeSessionsFromRedis.callCount.should.equal( + 1 + ) + resolve() + } + ctx.PasswordResetController.setNewUserPassword(ctx.req, ctx.res) + }) + }) + + it('should call removeReconfirmFlag if user.must_reconfirm', function (ctx) { + return new Promise(resolve => { + ctx.res.sendStatus = code => { + ctx.UserUpdater.promises.removeReconfirmFlag.callCount.should.equal(1) + resolve() + } + ctx.PasswordResetController.setNewUserPassword(ctx.req, ctx.res) + }) + }) + + describe('catch errors', function () { + it('should return 404 for NotFoundError', function (ctx) { + return new Promise(resolve => { + const anError = new Error('oops') + anError.name = 'NotFoundError' + ctx.PasswordResetHandler.promises.setNewUserPassword.rejects(anError) + ctx.res.status = code => { + code.should.equal(404) + return ctx.res + } + ctx.res.json = data => { + data.message.key.should.equal('token-expired') + resolve() + } + ctx.PasswordResetController.setNewUserPassword(ctx.req, ctx.res) + }) + }) + it('should return 400 for InvalidPasswordError', function (ctx) { + return new Promise(resolve => { + const anError = new Error('oops') + anError.name = 'InvalidPasswordError' + ctx.PasswordResetHandler.promises.setNewUserPassword.rejects(anError) + ctx.res.status = code => { + code.should.equal(400) + return ctx.res + } + ctx.res.json = data => { + data.message.key.should.equal('invalid-password') + resolve() + } + ctx.PasswordResetController.setNewUserPassword(ctx.req, ctx.res) + }) + }) + it('should return 500 for other errors', function (ctx) { + return new Promise(resolve => { + const anError = new Error('oops') + ctx.PasswordResetHandler.promises.setNewUserPassword.rejects(anError) + ctx.res.status = code => { + code.should.equal(500) + return ctx.res + } + ctx.res.json = data => { + expect(data.message).to.exist + resolve() + } + ctx.res.sendStatus = code => { + code.should.equal(500) + resolve() + } + ctx.PasswordResetController.setNewUserPassword(ctx.req, ctx.res) + }) + }) + }) + + describe('when doLoginAfterPasswordReset is set', function () { + beforeEach(function (ctx) { + ctx.user = { + _id: ctx.userId, + email: 'joe@example.com', + } + ctx.UserGetter.promises.getUser.resolves(ctx.user) + ctx.req.session.doLoginAfterPasswordReset = 'true' + }) + + it('should login user', function (ctx) { + return new Promise(resolve => { + ctx.AuthenticationController.finishLogin.callsFake((...args) => { + expect(args[0]).to.equal(ctx.user) + resolve() + }) + ctx.PasswordResetController.setNewUserPassword(ctx.req, ctx.res) + }) + }) + }) + }) + + describe('renderSetPasswordForm', function () { + describe('with token in query-string', function () { + beforeEach(function (ctx) { + ctx.req.query.passwordResetToken = ctx.token + }) + + it('should set session.resetToken and redirect', function (ctx) { + return new Promise(resolve => { + ctx.req.session.should.not.have.property('resetToken') + ctx.res.redirect = path => { + path.should.equal('/user/password/set') + ctx.req.session.resetToken.should.equal(ctx.token) + resolve() + } + ctx.PasswordResetController.renderSetPasswordForm(ctx.req, ctx.res) + }) + }) + }) + + describe('with expired token in query', function () { + beforeEach(function (ctx) { + ctx.req.query.passwordResetToken = ctx.token + ctx.PasswordResetHandler.promises.getUserForPasswordResetToken = sinon + .stub() + .withArgs(ctx.token) + .resolves({ user: { _id: ctx.user_id }, remainingPeeks: 0 }) + }) + + it('should redirect to the reset request page with an error message', function (ctx) { + return new Promise(resolve => { + ctx.res.redirect = path => { + path.should.equal('/user/password/reset?error=token_expired') + ctx.req.session.should.not.have.property('resetToken') + resolve() + } + ctx.res.render = (templatePath, options) => { + resolve('should not render') + } + ctx.PasswordResetController.renderSetPasswordForm(ctx.req, ctx.res) + }) + }) + }) + + describe('with token and email in query-string', function () { + beforeEach(function (ctx) { + ctx.req.query.passwordResetToken = ctx.token + ctx.req.query.email = 'foo@bar.com' + }) + + it('should set session.resetToken and redirect with email', function (ctx) { + return new Promise(resolve => { + ctx.req.session.should.not.have.property('resetToken') + ctx.res.redirect = path => { + path.should.equal('/user/password/set?email=foo%40bar.com') + ctx.req.session.resetToken.should.equal(ctx.token) + resolve() + } + ctx.PasswordResetController.renderSetPasswordForm(ctx.req, ctx.res) + }) + }) + }) + + describe('with token and invalid email in query-string', function () { + beforeEach(function (ctx) { + ctx.req.query.passwordResetToken = ctx.token + ctx.req.query.email = 'not-an-email' + }) + + it('should set session.resetToken and redirect without email', function (ctx) { + return new Promise(resolve => { + ctx.req.session.should.not.have.property('resetToken') + ctx.res.redirect = path => { + path.should.equal('/user/password/set') + ctx.req.session.resetToken.should.equal(ctx.token) + resolve() + } + ctx.PasswordResetController.renderSetPasswordForm(ctx.req, ctx.res) + }) + }) + }) + + describe('with token and non-string email in query-string', function () { + beforeEach(function (ctx) { + ctx.req.query.passwordResetToken = ctx.token + ctx.req.query.email = { foo: 'bar' } + }) + + it('should set session.resetToken and redirect without email', function (ctx) { + return new Promise(resolve => { + ctx.req.session.should.not.have.property('resetToken') + ctx.res.redirect = path => { + path.should.equal('/user/password/set') + ctx.req.session.resetToken.should.equal(ctx.token) + resolve() + } + ctx.PasswordResetController.renderSetPasswordForm(ctx.req, ctx.res) + }) + }) + }) + + describe('without a token in query-string', function () { + describe('with token in session', function () { + beforeEach(function (ctx) { + ctx.req.session.resetToken = ctx.token + }) + + it('should render the page, passing the reset token', function (ctx) { + return new Promise(resolve => { + ctx.res.render = (templatePath, options) => { + options.passwordResetToken.should.equal(ctx.token) + resolve() + } + ctx.PasswordResetController.renderSetPasswordForm(ctx.req, ctx.res) + }) + }) + + it('should clear the req.session.resetToken', function (ctx) { + return new Promise(resolve => { + ctx.res.render = (templatePath, options) => { + ctx.req.session.should.not.have.property('resetToken') + resolve() + } + ctx.PasswordResetController.renderSetPasswordForm(ctx.req, ctx.res) + }) + }) + }) + + describe('without a token in session', function () { + it('should redirect to the reset request page', function (ctx) { + return new Promise(resolve => { + ctx.res.redirect = path => { + path.should.equal('/user/password/reset') + ctx.req.session.should.not.have.property('resetToken') + resolve() + } + ctx.PasswordResetController.renderSetPasswordForm(ctx.req, ctx.res) + }) + }) + }) + }) + }) +}) diff --git a/services/web/test/unit/src/PasswordReset/PasswordResetControllerTests.mjs b/services/web/test/unit/src/PasswordReset/PasswordResetControllerTests.mjs deleted file mode 100644 index 6df3c765b1..0000000000 --- a/services/web/test/unit/src/PasswordReset/PasswordResetControllerTests.mjs +++ /dev/null @@ -1,485 +0,0 @@ -import esmock from 'esmock' -import sinon from 'sinon' -import { expect } from 'chai' -import MockResponse from '../helpers/MockResponse.js' - -const MODULE_PATH = new URL( - '../../../../app/src/Features/PasswordReset/PasswordResetController.mjs', - import.meta.url -).pathname - -describe('PasswordResetController', function () { - beforeEach(async function () { - this.email = 'bob@bob.com' - this.user_id = 'mock-user-id' - this.token = 'my security token that was emailed to me' - this.password = 'my new password' - this.req = { - body: { - email: this.email, - passwordResetToken: this.token, - password: this.password, - }, - i18n: { - translate() { - return '.' - }, - }, - session: {}, - query: {}, - } - this.res = new MockResponse() - - this.settings = {} - this.PasswordResetHandler = { - generateAndEmailResetToken: sinon.stub(), - promises: { - generateAndEmailResetToken: sinon.stub(), - setNewUserPassword: sinon.stub().resolves({ - found: true, - reset: true, - userID: this.user_id, - mustReconfirm: true, - }), - getUserForPasswordResetToken: sinon - .stub() - .withArgs(this.token) - .resolves({ - user: { _id: this.user_id }, - remainingPeeks: 1, - }), - }, - } - this.UserSessionsManager = { - promises: { - removeSessionsFromRedis: sinon.stub().resolves(), - }, - } - this.UserUpdater = { - promises: { - removeReconfirmFlag: sinon.stub().resolves(), - }, - } - this.SplitTestHandler = { - promises: { - getAssignment: sinon.stub().resolves('default'), - }, - } - this.PasswordResetController = await esmock.strict(MODULE_PATH, { - '@overleaf/settings': this.settings, - '../../../../app/src/Features/PasswordReset/PasswordResetHandler': - this.PasswordResetHandler, - '../../../../app/src/Features/Authentication/AuthenticationManager': { - validatePassword: sinon.stub().returns(null), - }, - '../../../../app/src/Features/Authentication/AuthenticationController': - (this.AuthenticationController = { - getLoggedInUserId: sinon.stub(), - finishLogin: sinon.stub(), - setAuditInfo: sinon.stub(), - }), - '../../../../app/src/Features/User/UserGetter': (this.UserGetter = { - promises: { - getUser: sinon.stub(), - }, - }), - '../../../../app/src/Features/User/UserSessionsManager': - this.UserSessionsManager, - '../../../../app/src/Features/User/UserUpdater': this.UserUpdater, - '../../../../app/src/Features/SplitTests/SplitTestHandler': - this.SplitTestHandler, - }) - }) - - describe('requestReset', function () { - it('should tell the handler to process that email', function (done) { - this.PasswordResetHandler.promises.generateAndEmailResetToken.resolves( - 'primary' - ) - this.res.callback = () => { - this.res.statusCode.should.equal(200) - this.res.json.calledWith(sinon.match.has('message')).should.equal(true) - expect( - this.PasswordResetHandler.promises.generateAndEmailResetToken.lastCall - .args[0] - ).equal(this.email) - done() - } - this.PasswordResetController.requestReset(this.req, this.res) - }) - - it('should send a 500 if there is an error', function (done) { - this.PasswordResetHandler.promises.generateAndEmailResetToken.rejects( - new Error('error') - ) - this.PasswordResetController.requestReset(this.req, this.res, error => { - expect(error).to.exist - done() - }) - }) - - it("should send a 404 if the email doesn't exist", function (done) { - this.PasswordResetHandler.promises.generateAndEmailResetToken.resolves( - null - ) - this.res.callback = () => { - this.res.statusCode.should.equal(404) - this.res.json.calledWith(sinon.match.has('message')).should.equal(true) - done() - } - this.PasswordResetController.requestReset(this.req, this.res) - }) - - it('should send a 404 if the email is registered as a secondard email', function (done) { - this.PasswordResetHandler.promises.generateAndEmailResetToken.resolves( - 'secondary' - ) - this.res.callback = () => { - this.res.statusCode.should.equal(404) - this.res.json.calledWith(sinon.match.has('message')).should.equal(true) - done() - } - this.PasswordResetController.requestReset(this.req, this.res) - }) - - it('should normalize the email address', function (done) { - this.email = ' UPperCaseEMAILWithSpacesAround@example.Com ' - this.req.body.email = this.email - this.PasswordResetHandler.promises.generateAndEmailResetToken.resolves( - 'primary' - ) - this.res.callback = () => { - this.res.statusCode.should.equal(200) - this.res.json.calledWith(sinon.match.has('message')).should.equal(true) - done() - } - this.PasswordResetController.requestReset(this.req, this.res) - }) - }) - - describe('setNewUserPassword', function () { - beforeEach(function () { - this.req.session.resetToken = this.token - }) - - it('should tell the user handler to reset the password', function (done) { - this.res.sendStatus = code => { - code.should.equal(200) - this.PasswordResetHandler.promises.setNewUserPassword - .calledWith(this.token, this.password) - .should.equal(true) - done() - } - this.PasswordResetController.setNewUserPassword(this.req, this.res) - }) - - it('should preserve spaces in the password', function (done) { - this.password = this.req.body.password = ' oh! clever! spaces around! ' - this.res.sendStatus = code => { - code.should.equal(200) - this.PasswordResetHandler.promises.setNewUserPassword.should.have.been.calledWith( - this.token, - this.password - ) - done() - } - this.PasswordResetController.setNewUserPassword(this.req, this.res) - }) - - it('should send 404 if the token was not found', function (done) { - this.PasswordResetHandler.promises.setNewUserPassword.resolves({ - found: false, - reset: false, - userId: this.user_id, - }) - this.res.status = code => { - code.should.equal(404) - return this.res - } - this.res.json = data => { - data.message.key.should.equal('token-expired') - done() - } - this.PasswordResetController.setNewUserPassword(this.req, this.res) - }) - - it('should return 500 if not reset', function (done) { - this.PasswordResetHandler.promises.setNewUserPassword.resolves({ - found: true, - reset: false, - userId: this.user_id, - }) - this.res.status = code => { - code.should.equal(500) - return this.res - } - this.res.json = data => { - expect(data.message).to.exist - done() - } - this.PasswordResetController.setNewUserPassword(this.req, this.res) - }) - - it('should return 400 (Bad Request) if there is no password', function (done) { - this.req.body.password = '' - this.res.status = code => { - code.should.equal(400) - return this.res - } - this.res.json = data => { - data.message.key.should.equal('invalid-password') - this.PasswordResetHandler.promises.setNewUserPassword.called.should.equal( - false - ) - done() - } - this.PasswordResetController.setNewUserPassword(this.req, this.res) - }) - - it('should return 400 (Bad Request) if there is no passwordResetToken', function (done) { - this.req.body.passwordResetToken = '' - this.res.status = code => { - code.should.equal(400) - return this.res - } - this.res.json = data => { - data.message.key.should.equal('invalid-password') - this.PasswordResetHandler.promises.setNewUserPassword.called.should.equal( - false - ) - done() - } - this.PasswordResetController.setNewUserPassword(this.req, this.res) - }) - - it('should return 400 (Bad Request) if the password is invalid', function (done) { - this.req.body.password = 'correct horse battery staple' - const err = new Error('bad') - err.name = 'InvalidPasswordError' - this.PasswordResetHandler.promises.setNewUserPassword.rejects(err) - this.res.status = code => { - code.should.equal(400) - return this.res - } - this.res.json = data => { - data.message.key.should.equal('invalid-password') - this.PasswordResetHandler.promises.setNewUserPassword.called.should.equal( - true - ) - done() - } - this.PasswordResetController.setNewUserPassword(this.req, this.res) - }) - - it('should clear sessions', function (done) { - this.res.sendStatus = code => { - this.UserSessionsManager.promises.removeSessionsFromRedis.callCount.should.equal( - 1 - ) - done() - } - this.PasswordResetController.setNewUserPassword(this.req, this.res) - }) - - it('should call removeReconfirmFlag if user.must_reconfirm', function (done) { - this.res.sendStatus = code => { - this.UserUpdater.promises.removeReconfirmFlag.callCount.should.equal(1) - done() - } - this.PasswordResetController.setNewUserPassword(this.req, this.res) - }) - - describe('catch errors', function () { - it('should return 404 for NotFoundError', function (done) { - const anError = new Error('oops') - anError.name = 'NotFoundError' - this.PasswordResetHandler.promises.setNewUserPassword.rejects(anError) - this.res.status = code => { - code.should.equal(404) - return this.res - } - this.res.json = data => { - data.message.key.should.equal('token-expired') - done() - } - this.PasswordResetController.setNewUserPassword(this.req, this.res) - }) - it('should return 400 for InvalidPasswordError', function (done) { - const anError = new Error('oops') - anError.name = 'InvalidPasswordError' - this.PasswordResetHandler.promises.setNewUserPassword.rejects(anError) - this.res.status = code => { - code.should.equal(400) - return this.res - } - this.res.json = data => { - data.message.key.should.equal('invalid-password') - done() - } - this.PasswordResetController.setNewUserPassword(this.req, this.res) - }) - it('should return 500 for other errors', function (done) { - const anError = new Error('oops') - this.PasswordResetHandler.promises.setNewUserPassword.rejects(anError) - this.res.status = code => { - code.should.equal(500) - return this.res - } - this.res.json = data => { - expect(data.message).to.exist - done() - } - this.res.sendStatus = code => { - code.should.equal(500) - done() - } - this.PasswordResetController.setNewUserPassword(this.req, this.res) - }) - }) - - describe('when doLoginAfterPasswordReset is set', function () { - beforeEach(function () { - this.user = { - _id: this.userId, - email: 'joe@example.com', - } - this.UserGetter.promises.getUser.resolves(this.user) - this.req.session.doLoginAfterPasswordReset = 'true' - }) - - it('should login user', function (done) { - this.AuthenticationController.finishLogin.callsFake((...args) => { - expect(args[0]).to.equal(this.user) - done() - }) - this.PasswordResetController.setNewUserPassword(this.req, this.res) - }) - }) - }) - - describe('renderSetPasswordForm', function () { - describe('with token in query-string', function () { - beforeEach(function () { - this.req.query.passwordResetToken = this.token - }) - - it('should set session.resetToken and redirect', function (done) { - this.req.session.should.not.have.property('resetToken') - this.res.redirect = path => { - path.should.equal('/user/password/set') - this.req.session.resetToken.should.equal(this.token) - done() - } - this.PasswordResetController.renderSetPasswordForm(this.req, this.res) - }) - }) - - describe('with expired token in query', function () { - beforeEach(function () { - this.req.query.passwordResetToken = this.token - this.PasswordResetHandler.promises.getUserForPasswordResetToken = sinon - .stub() - .withArgs(this.token) - .resolves({ user: { _id: this.user_id }, remainingPeeks: 0 }) - }) - - it('should redirect to the reset request page with an error message', function (done) { - this.res.redirect = path => { - path.should.equal('/user/password/reset?error=token_expired') - this.req.session.should.not.have.property('resetToken') - done() - } - this.res.render = (templatePath, options) => { - done('should not render') - } - this.PasswordResetController.renderSetPasswordForm(this.req, this.res) - }) - }) - - describe('with token and email in query-string', function () { - beforeEach(function () { - this.req.query.passwordResetToken = this.token - this.req.query.email = 'foo@bar.com' - }) - - it('should set session.resetToken and redirect with email', function (done) { - this.req.session.should.not.have.property('resetToken') - this.res.redirect = path => { - path.should.equal('/user/password/set?email=foo%40bar.com') - this.req.session.resetToken.should.equal(this.token) - done() - } - this.PasswordResetController.renderSetPasswordForm(this.req, this.res) - }) - }) - - describe('with token and invalid email in query-string', function () { - beforeEach(function () { - this.req.query.passwordResetToken = this.token - this.req.query.email = 'not-an-email' - }) - - it('should set session.resetToken and redirect without email', function (done) { - this.req.session.should.not.have.property('resetToken') - this.res.redirect = path => { - path.should.equal('/user/password/set') - this.req.session.resetToken.should.equal(this.token) - done() - } - this.PasswordResetController.renderSetPasswordForm(this.req, this.res) - }) - }) - - describe('with token and non-string email in query-string', function () { - beforeEach(function () { - this.req.query.passwordResetToken = this.token - this.req.query.email = { foo: 'bar' } - }) - - it('should set session.resetToken and redirect without email', function (done) { - this.req.session.should.not.have.property('resetToken') - this.res.redirect = path => { - path.should.equal('/user/password/set') - this.req.session.resetToken.should.equal(this.token) - done() - } - this.PasswordResetController.renderSetPasswordForm(this.req, this.res) - }) - }) - - describe('without a token in query-string', function () { - describe('with token in session', function () { - beforeEach(function () { - this.req.session.resetToken = this.token - }) - - it('should render the page, passing the reset token', function (done) { - this.res.render = (templatePath, options) => { - options.passwordResetToken.should.equal(this.token) - done() - } - this.PasswordResetController.renderSetPasswordForm(this.req, this.res) - }) - - it('should clear the req.session.resetToken', function (done) { - this.res.render = (templatePath, options) => { - this.req.session.should.not.have.property('resetToken') - done() - } - this.PasswordResetController.renderSetPasswordForm(this.req, this.res) - }) - }) - - describe('without a token in session', function () { - it('should redirect to the reset request page', function (done) { - this.res.redirect = path => { - path.should.equal('/user/password/reset') - this.req.session.should.not.have.property('resetToken') - done() - } - this.PasswordResetController.renderSetPasswordForm(this.req, this.res) - }) - }) - }) - }) -}) diff --git a/services/web/test/unit/src/PasswordReset/PasswordResetHandler.test.mjs b/services/web/test/unit/src/PasswordReset/PasswordResetHandler.test.mjs new file mode 100644 index 0000000000..aab46ae2bf --- /dev/null +++ b/services/web/test/unit/src/PasswordReset/PasswordResetHandler.test.mjs @@ -0,0 +1,616 @@ +import { expect, vi } from 'vitest' +import sinon from 'sinon' +const modulePath = new URL( + '../../../../app/src/Features/PasswordReset/PasswordResetHandler', + import.meta.url +).pathname + +describe('PasswordResetHandler', function () { + beforeEach(async function (ctx) { + ctx.settings = { siteUrl: 'https://www.overleaf.com' } + ctx.OneTimeTokenHandler = { + promises: { + getNewToken: sinon.stub(), + peekValueFromToken: sinon.stub(), + }, + peekValueFromToken: sinon.stub(), + expireToken: sinon.stub(), + } + ctx.UserGetter = { + getUserByMainEmail: sinon.stub(), + getUser: sinon.stub(), + promises: { + getUserByAnyEmail: sinon.stub(), + getUserByMainEmail: sinon.stub(), + }, + } + ctx.EmailHandler = { promises: { sendEmail: sinon.stub() } } + ctx.AuthenticationManager = { + setUserPasswordInV2: sinon.stub(), + promises: { + setUserPassword: sinon.stub().resolves(), + }, + } + + vi.doMock('../../../../app/src/Features/User/UserAuditLogHandler', () => ({ + default: (ctx.UserAuditLogHandler = { + promises: { + addEntry: sinon.stub().resolves(), + }, + }), + })) + + vi.doMock('../../../../app/src/Features/User/UserGetter', () => ({ + default: ctx.UserGetter, + })) + + vi.doMock( + '../../../../app/src/Features/Security/OneTimeTokenHandler', + () => ({ + default: ctx.OneTimeTokenHandler, + }) + ) + + vi.doMock('../../../../app/src/Features/Email/EmailHandler', () => ({ + default: ctx.EmailHandler, + })) + + vi.doMock( + '../../../../app/src/Features/Authentication/AuthenticationManager', + () => ({ + default: ctx.AuthenticationManager, + }) + ) + + vi.doMock('@overleaf/settings', () => ({ + default: ctx.settings, + })) + + vi.doMock( + '../../../../app/src/Features/Authorization/PermissionsManager', + () => ({ + default: (ctx.PermissionsManager = { + promises: { + assertUserPermissions: sinon.stub(), + }, + }), + }) + ) + + ctx.PasswordResetHandler = (await import(modulePath)).default + ctx.token = '12312321i' + ctx.user_id = 'user_id_here' + ctx.user = { email: (ctx.email = 'bob@bob.com'), _id: ctx.user_id } + ctx.password = 'my great secret password' + ctx.callback = sinon.stub() + // this should not have any effect now + ctx.settings.overleaf = true + }) + + afterEach(function (ctx) { + ctx.settings.overleaf = false + }) + + describe('generateAndEmailResetToken', function () { + it('should check the user exists', function (ctx) { + ctx.UserGetter.promises.getUserByAnyEmail.resolves() + ctx.PasswordResetHandler.generateAndEmailResetToken( + ctx.user.email, + ctx.callback + ) + ctx.UserGetter.promises.getUserByAnyEmail.should.have.been.calledWith( + ctx.user.email + ) + }) + + it('should send the email with the token', function (ctx) { + return new Promise(resolve => { + ctx.UserGetter.promises.getUserByAnyEmail.resolves(ctx.user) + ctx.OneTimeTokenHandler.promises.getNewToken.resolves(ctx.token) + ctx.EmailHandler.promises.sendEmail.resolves() + ctx.PasswordResetHandler.generateAndEmailResetToken( + ctx.user.email, + (err, status) => { + expect(err).to.not.exist + ctx.EmailHandler.promises.sendEmail.called.should.equal(true) + status.should.equal('primary') + const args = ctx.EmailHandler.promises.sendEmail.args[0] + args[0].should.equal('passwordResetRequested') + args[1].setNewPasswordUrl.should.equal( + `${ctx.settings.siteUrl}/user/password/set?passwordResetToken=${ + ctx.token + }&email=${encodeURIComponent(ctx.user.email)}` + ) + resolve() + } + ) + }) + }) + + it('should return errors from getUserByAnyEmail', function (ctx) { + return new Promise(resolve => { + const err = new Error('oops') + ctx.UserGetter.promises.getUserByAnyEmail.rejects(err) + ctx.PasswordResetHandler.generateAndEmailResetToken( + ctx.user.email, + err => { + expect(err).to.equal(err) + resolve() + } + ) + }) + }) + + describe('when the email exists', function () { + let result + beforeEach(async function (ctx) { + ctx.UserGetter.promises.getUserByAnyEmail.resolves(ctx.user) + ctx.OneTimeTokenHandler.promises.getNewToken.resolves(ctx.token) + ctx.EmailHandler.promises.sendEmail.resolves() + result = + await ctx.PasswordResetHandler.promises.generateAndEmailResetToken( + ctx.email + ) + }) + + it('should set the password token data to the user id and email', function (ctx) { + ctx.OneTimeTokenHandler.promises.getNewToken.should.have.been.calledWith( + 'password', + { + email: ctx.email, + user_id: ctx.user._id, + } + ) + }) + + it('should send an email with the token', function (ctx) { + ctx.EmailHandler.promises.sendEmail.called.should.equal(true) + const args = ctx.EmailHandler.promises.sendEmail.args[0] + args[0].should.equal('passwordResetRequested') + args[1].setNewPasswordUrl.should.equal( + `${ctx.settings.siteUrl}/user/password/set?passwordResetToken=${ + ctx.token + }&email=${encodeURIComponent(ctx.user.email)}` + ) + }) + + it('should return status == true', async function () { + expect(result).to.equal('primary') + }) + }) + + describe("when the email doesn't exist", function () { + let result + beforeEach(async function (ctx) { + ctx.UserGetter.promises.getUserByAnyEmail.resolves(null) + result = + await ctx.PasswordResetHandler.promises.generateAndEmailResetToken( + ctx.email + ) + }) + + it('should not set the password token data', function (ctx) { + ctx.OneTimeTokenHandler.promises.getNewToken.called.should.equal(false) + }) + + it('should send an email with the token', function (ctx) { + ctx.EmailHandler.promises.sendEmail.called.should.equal(false) + }) + + it('should return status == null', function () { + expect(result).to.equal(null) + }) + }) + + describe('when the email is a secondary email', function () { + let result + beforeEach(async function (ctx) { + ctx.UserGetter.promises.getUserByAnyEmail.resolves(ctx.user) + result = + await ctx.PasswordResetHandler.promises.generateAndEmailResetToken( + 'secondary@email.com' + ) + }) + + it('should not set the password token data', function (ctx) { + ctx.OneTimeTokenHandler.promises.getNewToken.called.should.equal(false) + }) + + it('should not send an email with the token', function (ctx) { + ctx.EmailHandler.promises.sendEmail.called.should.equal(false) + }) + + it('should return status == secondary', function () { + expect(result).to.equal('secondary') + }) + }) + }) + + describe('setNewUserPassword', function () { + beforeEach(function (ctx) { + ctx.auditLog = { ip: '0:0:0:0' } + }) + describe('when no data is found', function () { + beforeEach(function (ctx) { + ctx.OneTimeTokenHandler.promises.peekValueFromToken.resolves(null) + }) + + it('should return found == false and reset == false', function (ctx) { + ctx.PasswordResetHandler.setNewUserPassword( + ctx.token, + ctx.password, + ctx.auditLog, + (error, result) => { + expect(error).to.not.exist + expect(result).to.deep.equal({ + found: false, + reset: false, + userId: null, + }) + } + ) + }) + }) + + describe('when the token has a user_id and email', function () { + beforeEach(function (ctx) { + ctx.OneTimeTokenHandler.promises.peekValueFromToken.resolves({ + data: { + user_id: ctx.user._id, + email: ctx.email, + }, + }) + ctx.AuthenticationManager.promises.setUserPassword + .withArgs(ctx.user, ctx.password) + .resolves(true) + ctx.OneTimeTokenHandler.expireToken = sinon.stub().callsArgWith(2, null) + }) + + describe('when no user is found with this email', function () { + beforeEach(function (ctx) { + ctx.UserGetter.getUserByMainEmail + .withArgs(ctx.email) + .yields(null, null) + }) + + it('should return found == false and reset == false', function (ctx) { + return new Promise(resolve => { + ctx.PasswordResetHandler.setNewUserPassword( + ctx.token, + ctx.password, + ctx.auditLog, + (err, result) => { + const { found, reset } = result + expect(err).to.not.exist + expect(found).to.be.false + expect(reset).to.be.false + expect(ctx.OneTimeTokenHandler.expireToken.callCount).to.equal( + 0 + ) + resolve() + } + ) + }) + }) + }) + + describe("when the email and user don't match", function () { + beforeEach(function (ctx) { + ctx.UserGetter.getUserByMainEmail + .withArgs(ctx.email) + .yields(null, { _id: 'not-the-same', email: ctx.email }) + ctx.OneTimeTokenHandler.expireToken.callsArgWith(2, null) + }) + + it('should return found == false and reset == false', function (ctx) { + return new Promise(resolve => { + ctx.PasswordResetHandler.setNewUserPassword( + ctx.token, + ctx.password, + ctx.auditLog, + (err, result) => { + const { found, reset } = result + expect(err).to.not.exist + expect(found).to.be.false + expect(reset).to.be.false + expect(ctx.OneTimeTokenHandler.expireToken.callCount).to.equal( + 0 + ) + resolve() + } + ) + }) + }) + }) + + describe('when the email and user match', function () { + describe('success', function () { + beforeEach(function (ctx) { + ctx.UserGetter.promises.getUserByMainEmail.resolves(ctx.user) + ctx.OneTimeTokenHandler.expireToken = sinon + .stub() + .callsArgWith(2, null) + }) + + it('should update the user audit log', function (ctx) { + return new Promise(resolve => { + ctx.PasswordResetHandler.setNewUserPassword( + ctx.token, + ctx.password, + ctx.auditLog, + (error, result) => { + sinon.assert.calledWith( + ctx.UserAuditLogHandler.promises.addEntry, + ctx.user_id, + 'reset-password', + undefined, + ctx.auditLog.ip, + { token: ctx.token.substring(0, 10) } + ) + expect(error).to.not.exist + resolve() + } + ) + }) + }) + + it('should return reset == true and the user id', function (ctx) { + return new Promise(resolve => { + ctx.PasswordResetHandler.setNewUserPassword( + ctx.token, + ctx.password, + ctx.auditLog, + (err, result) => { + const { reset, userId } = result + expect(err).to.not.exist + expect(reset).to.be.true + expect(userId).to.equal(ctx.user._id) + resolve() + } + ) + }) + }) + + it('should expire the token', function (ctx) { + return new Promise(resolve => { + ctx.PasswordResetHandler.setNewUserPassword( + ctx.token, + ctx.password, + ctx.auditLog, + (_err, _result) => { + expect(ctx.OneTimeTokenHandler.expireToken.called).to.equal( + true + ) + resolve() + } + ) + }) + }) + + describe('when logged in', function () { + beforeEach(function (ctx) { + ctx.auditLog.initiatorId = ctx.user_id + }) + it('should update the user audit log with initiatorId', function (ctx) { + return new Promise(resolve => { + ctx.PasswordResetHandler.setNewUserPassword( + ctx.token, + ctx.password, + ctx.auditLog, + (error, result) => { + expect(error).to.not.exist + sinon.assert.calledWith( + ctx.UserAuditLogHandler.promises.addEntry, + ctx.user_id, + 'reset-password', + ctx.user_id, + ctx.auditLog.ip, + { token: ctx.token.substring(0, 10) } + ) + resolve() + } + ) + }) + }) + }) + }) + + describe('errors', function () { + describe('via setUserPassword', function () { + beforeEach(function (ctx) { + ctx.PasswordResetHandler.promises.getUserForPasswordResetToken = + sinon.stub().withArgs(ctx.token).resolves({ user: ctx.user }) + ctx.AuthenticationManager.promises.setUserPassword + .withArgs(ctx.user, ctx.password) + .rejects() + }) + it('should return the error', function (ctx) { + return new Promise(resolve => { + ctx.PasswordResetHandler.setNewUserPassword( + ctx.token, + ctx.password, + ctx.auditLog, + (error, _result) => { + expect(error).to.exist + expect( + ctx.UserAuditLogHandler.promises.addEntry.callCount + ).to.equal(1) + resolve() + } + ) + }) + }) + }) + + describe('via UserAuditLogHandler', function () { + beforeEach(function (ctx) { + ctx.PasswordResetHandler.promises.getUserForPasswordResetToken = + sinon.stub().withArgs(ctx.token).resolves({ user: ctx.user }) + ctx.UserAuditLogHandler.promises.addEntry.rejects( + new Error('oops') + ) + }) + it('should return the error', function (ctx) { + return new Promise(resolve => { + ctx.PasswordResetHandler.setNewUserPassword( + ctx.token, + ctx.password, + ctx.auditLog, + (error, _result) => { + expect(error).to.exist + expect( + ctx.UserAuditLogHandler.promises.addEntry.callCount + ).to.equal(1) + expect(ctx.AuthenticationManager.promises.setUserPassword) + .to.not.have.been.called + resolve() + } + ) + }) + }) + }) + }) + }) + }) + + describe('when the token has a v1_user_id and email', function () { + beforeEach(function (ctx) { + ctx.user.overleaf = { id: 184 } + ctx.OneTimeTokenHandler.promises.peekValueFromToken.resolves({ + data: { + v1_user_id: ctx.user.overleaf.id, + email: ctx.email, + }, + }) + ctx.AuthenticationManager.promises.setUserPassword + .withArgs(ctx.user, ctx.password) + .resolves(true) + ctx.OneTimeTokenHandler.expireToken = sinon.stub().callsArgWith(2, null) + }) + + describe('when no user is reset with this email', function () { + beforeEach(function (ctx) { + ctx.UserGetter.getUserByMainEmail + .withArgs(ctx.email) + .yields(null, null) + }) + + it('should return reset == false', function (ctx) { + return new Promise(resolve => { + ctx.PasswordResetHandler.setNewUserPassword( + ctx.token, + ctx.password, + ctx.auditLog, + (err, result) => { + const { reset } = result + expect(err).to.not.exist + expect(reset).to.be.false + expect(ctx.OneTimeTokenHandler.expireToken.called).to.equal( + false + ) + resolve() + } + ) + }) + }) + }) + + describe("when the email and user don't match", function () { + beforeEach(function (ctx) { + ctx.UserGetter.getUserByMainEmail.withArgs(ctx.email).yields(null, { + _id: ctx.user._id, + email: ctx.email, + overleaf: { id: 'not-the-same' }, + }) + }) + + it('should return reset == false', function (ctx) { + return new Promise(resolve => { + ctx.PasswordResetHandler.setNewUserPassword( + ctx.token, + ctx.password, + ctx.auditLog, + (err, result) => { + const { reset } = result + expect(err).to.not.exist + expect(reset).to.be.false + expect(ctx.OneTimeTokenHandler.expireToken.called).to.equal( + false + ) + resolve() + } + ) + }) + }) + }) + + describe('when the email and user match', function () { + beforeEach(function (ctx) { + ctx.UserGetter.promises.getUserByMainEmail.resolves(ctx.user) + }) + + it('should return reset == true and the user id', function (ctx) { + return new Promise(resolve => { + ctx.PasswordResetHandler.setNewUserPassword( + ctx.token, + ctx.password, + ctx.auditLog, + (err, result) => { + const { reset, userId } = result + expect(err).to.not.exist + expect(reset).to.be.true + expect(userId).to.equal(ctx.user._id) + expect(ctx.OneTimeTokenHandler.expireToken.called).to.equal( + true + ) + resolve() + } + ) + }) + }) + }) + }) + }) + + describe('getUserForPasswordResetToken', function () { + beforeEach(function (ctx) { + ctx.OneTimeTokenHandler.promises.peekValueFromToken.resolves({ + data: { + user_id: ctx.user._id, + email: ctx.email, + }, + remainingPeeks: 1, + }) + + ctx.UserGetter.promises.getUserByMainEmail.resolves({ + _id: ctx.user._id, + email: ctx.email, + }) + }) + + it('should returns errors from user permissions', async function (ctx) { + let error + const err = new Error('nope') + ctx.PermissionsManager.promises.assertUserPermissions.rejects(err) + try { + await ctx.PasswordResetHandler.promises.getUserForPasswordResetToken( + 'abc123' + ) + } catch (e) { + error = e + } + expect(error).to.deep.equal(error) + }) + + it('returns user when user has permissions and remaining peaks', async function (ctx) { + const result = + await ctx.PasswordResetHandler.promises.getUserForPasswordResetToken( + 'abc123' + ) + expect(result).to.deep.equal({ + user: { _id: ctx.user._id, email: ctx.email }, + remainingPeeks: 1, + }) + }) + }) +}) diff --git a/services/web/test/unit/src/PasswordReset/PasswordResetHandlerTests.mjs b/services/web/test/unit/src/PasswordReset/PasswordResetHandlerTests.mjs deleted file mode 100644 index b99cc527e2..0000000000 --- a/services/web/test/unit/src/PasswordReset/PasswordResetHandlerTests.mjs +++ /dev/null @@ -1,563 +0,0 @@ -import esmock from 'esmock' -import sinon from 'sinon' -import { expect } from 'chai' -const modulePath = new URL( - '../../../../app/src/Features/PasswordReset/PasswordResetHandler', - import.meta.url -).pathname - -describe('PasswordResetHandler', function () { - beforeEach(async function () { - this.settings = { siteUrl: 'https://www.overleaf.com' } - this.OneTimeTokenHandler = { - promises: { - getNewToken: sinon.stub(), - peekValueFromToken: sinon.stub(), - }, - peekValueFromToken: sinon.stub(), - expireToken: sinon.stub(), - } - this.UserGetter = { - getUserByMainEmail: sinon.stub(), - getUser: sinon.stub(), - promises: { - getUserByAnyEmail: sinon.stub(), - getUserByMainEmail: sinon.stub(), - }, - } - this.EmailHandler = { promises: { sendEmail: sinon.stub() } } - this.AuthenticationManager = { - setUserPasswordInV2: sinon.stub(), - promises: { - setUserPassword: sinon.stub().resolves(), - }, - } - this.PasswordResetHandler = await esmock.strict(modulePath, { - '../../../../app/src/Features/User/UserAuditLogHandler': - (this.UserAuditLogHandler = { - promises: { - addEntry: sinon.stub().resolves(), - }, - }), - '../../../../app/src/Features/User/UserGetter': this.UserGetter, - '../../../../app/src/Features/Security/OneTimeTokenHandler': - this.OneTimeTokenHandler, - '../../../../app/src/Features/Email/EmailHandler': this.EmailHandler, - '../../../../app/src/Features/Authentication/AuthenticationManager': - this.AuthenticationManager, - '@overleaf/settings': this.settings, - '../../../../app/src/Features/Authorization/PermissionsManager': - (this.PermissionsManager = { - promises: { - assertUserPermissions: sinon.stub(), - }, - }), - }) - this.token = '12312321i' - this.user_id = 'user_id_here' - this.user = { email: (this.email = 'bob@bob.com'), _id: this.user_id } - this.password = 'my great secret password' - this.callback = sinon.stub() - // this should not have any effect now - this.settings.overleaf = true - }) - - afterEach(function () { - this.settings.overleaf = false - }) - - describe('generateAndEmailResetToken', function () { - it('should check the user exists', function () { - this.UserGetter.promises.getUserByAnyEmail.resolves() - this.PasswordResetHandler.generateAndEmailResetToken( - this.user.email, - this.callback - ) - this.UserGetter.promises.getUserByAnyEmail.should.have.been.calledWith( - this.user.email - ) - }) - - it('should send the email with the token', function (done) { - this.UserGetter.promises.getUserByAnyEmail.resolves(this.user) - this.OneTimeTokenHandler.promises.getNewToken.resolves(this.token) - this.EmailHandler.promises.sendEmail.resolves() - this.PasswordResetHandler.generateAndEmailResetToken( - this.user.email, - (err, status) => { - expect(err).to.not.exist - this.EmailHandler.promises.sendEmail.called.should.equal(true) - status.should.equal('primary') - const args = this.EmailHandler.promises.sendEmail.args[0] - args[0].should.equal('passwordResetRequested') - args[1].setNewPasswordUrl.should.equal( - `${this.settings.siteUrl}/user/password/set?passwordResetToken=${ - this.token - }&email=${encodeURIComponent(this.user.email)}` - ) - done() - } - ) - }) - - it('should return errors from getUserByAnyEmail', function (done) { - const err = new Error('oops') - this.UserGetter.promises.getUserByAnyEmail.rejects(err) - this.PasswordResetHandler.generateAndEmailResetToken( - this.user.email, - err => { - expect(err).to.equal(err) - done() - } - ) - }) - - describe('when the email exists', function () { - let result - beforeEach(async function () { - this.UserGetter.promises.getUserByAnyEmail.resolves(this.user) - this.OneTimeTokenHandler.promises.getNewToken.resolves(this.token) - this.EmailHandler.promises.sendEmail.resolves() - result = - await this.PasswordResetHandler.promises.generateAndEmailResetToken( - this.email - ) - }) - - it('should set the password token data to the user id and email', function () { - this.OneTimeTokenHandler.promises.getNewToken.should.have.been.calledWith( - 'password', - { - email: this.email, - user_id: this.user._id, - } - ) - }) - - it('should send an email with the token', function () { - this.EmailHandler.promises.sendEmail.called.should.equal(true) - const args = this.EmailHandler.promises.sendEmail.args[0] - args[0].should.equal('passwordResetRequested') - args[1].setNewPasswordUrl.should.equal( - `${this.settings.siteUrl}/user/password/set?passwordResetToken=${ - this.token - }&email=${encodeURIComponent(this.user.email)}` - ) - }) - - it('should return status == true', async function () { - expect(result).to.equal('primary') - }) - }) - - describe("when the email doesn't exist", function () { - let result - beforeEach(async function () { - this.UserGetter.promises.getUserByAnyEmail.resolves(null) - result = - await this.PasswordResetHandler.promises.generateAndEmailResetToken( - this.email - ) - }) - - it('should not set the password token data', function () { - this.OneTimeTokenHandler.promises.getNewToken.called.should.equal(false) - }) - - it('should send an email with the token', function () { - this.EmailHandler.promises.sendEmail.called.should.equal(false) - }) - - it('should return status == null', function () { - expect(result).to.equal(null) - }) - }) - - describe('when the email is a secondary email', function () { - let result - beforeEach(async function () { - this.UserGetter.promises.getUserByAnyEmail.resolves(this.user) - result = - await this.PasswordResetHandler.promises.generateAndEmailResetToken( - 'secondary@email.com' - ) - }) - - it('should not set the password token data', function () { - this.OneTimeTokenHandler.promises.getNewToken.called.should.equal(false) - }) - - it('should not send an email with the token', function () { - this.EmailHandler.promises.sendEmail.called.should.equal(false) - }) - - it('should return status == secondary', function () { - expect(result).to.equal('secondary') - }) - }) - }) - - describe('setNewUserPassword', function () { - beforeEach(function () { - this.auditLog = { ip: '0:0:0:0' } - }) - describe('when no data is found', function () { - beforeEach(function () { - this.OneTimeTokenHandler.promises.peekValueFromToken.resolves(null) - }) - - it('should return found == false and reset == false', function () { - this.PasswordResetHandler.setNewUserPassword( - this.token, - this.password, - this.auditLog, - (error, result) => { - expect(error).to.not.exist - expect(result).to.deep.equal({ - found: false, - reset: false, - userId: null, - }) - } - ) - }) - }) - - describe('when the token has a user_id and email', function () { - beforeEach(function () { - this.OneTimeTokenHandler.promises.peekValueFromToken.resolves({ - data: { - user_id: this.user._id, - email: this.email, - }, - }) - this.AuthenticationManager.promises.setUserPassword - .withArgs(this.user, this.password) - .resolves(true) - this.OneTimeTokenHandler.expireToken = sinon - .stub() - .callsArgWith(2, null) - }) - - describe('when no user is found with this email', function () { - beforeEach(function () { - this.UserGetter.getUserByMainEmail - .withArgs(this.email) - .yields(null, null) - }) - - it('should return found == false and reset == false', function (done) { - this.PasswordResetHandler.setNewUserPassword( - this.token, - this.password, - this.auditLog, - (err, result) => { - const { found, reset } = result - expect(err).to.not.exist - expect(found).to.be.false - expect(reset).to.be.false - expect(this.OneTimeTokenHandler.expireToken.callCount).to.equal(0) - done() - } - ) - }) - }) - - describe("when the email and user don't match", function () { - beforeEach(function () { - this.UserGetter.getUserByMainEmail - .withArgs(this.email) - .yields(null, { _id: 'not-the-same', email: this.email }) - this.OneTimeTokenHandler.expireToken.callsArgWith(2, null) - }) - - it('should return found == false and reset == false', function (done) { - this.PasswordResetHandler.setNewUserPassword( - this.token, - this.password, - this.auditLog, - (err, result) => { - const { found, reset } = result - expect(err).to.not.exist - expect(found).to.be.false - expect(reset).to.be.false - expect(this.OneTimeTokenHandler.expireToken.callCount).to.equal(0) - done() - } - ) - }) - }) - - describe('when the email and user match', function () { - describe('success', function () { - beforeEach(function () { - this.UserGetter.promises.getUserByMainEmail.resolves(this.user) - this.OneTimeTokenHandler.expireToken = sinon - .stub() - .callsArgWith(2, null) - }) - - it('should update the user audit log', function (done) { - this.PasswordResetHandler.setNewUserPassword( - this.token, - this.password, - this.auditLog, - (error, result) => { - sinon.assert.calledWith( - this.UserAuditLogHandler.promises.addEntry, - this.user_id, - 'reset-password', - undefined, - this.auditLog.ip, - { token: this.token.substring(0, 10) } - ) - expect(error).to.not.exist - done() - } - ) - }) - - it('should return reset == true and the user id', function (done) { - this.PasswordResetHandler.setNewUserPassword( - this.token, - this.password, - this.auditLog, - (err, result) => { - const { reset, userId } = result - expect(err).to.not.exist - expect(reset).to.be.true - expect(userId).to.equal(this.user._id) - done() - } - ) - }) - - it('should expire the token', function (done) { - this.PasswordResetHandler.setNewUserPassword( - this.token, - this.password, - this.auditLog, - (_err, _result) => { - expect(this.OneTimeTokenHandler.expireToken.called).to.equal( - true - ) - done() - } - ) - }) - - describe('when logged in', function () { - beforeEach(function () { - this.auditLog.initiatorId = this.user_id - }) - it('should update the user audit log with initiatorId', function (done) { - this.PasswordResetHandler.setNewUserPassword( - this.token, - this.password, - this.auditLog, - (error, result) => { - expect(error).to.not.exist - sinon.assert.calledWith( - this.UserAuditLogHandler.promises.addEntry, - this.user_id, - 'reset-password', - this.user_id, - this.auditLog.ip, - { token: this.token.substring(0, 10) } - ) - done() - } - ) - }) - }) - }) - - describe('errors', function () { - describe('via setUserPassword', function () { - beforeEach(function () { - this.PasswordResetHandler.promises.getUserForPasswordResetToken = - sinon.stub().withArgs(this.token).resolves({ user: this.user }) - this.AuthenticationManager.promises.setUserPassword - .withArgs(this.user, this.password) - .rejects() - }) - it('should return the error', function (done) { - this.PasswordResetHandler.setNewUserPassword( - this.token, - this.password, - this.auditLog, - (error, _result) => { - expect(error).to.exist - expect( - this.UserAuditLogHandler.promises.addEntry.callCount - ).to.equal(1) - done() - } - ) - }) - }) - - describe('via UserAuditLogHandler', function () { - beforeEach(function () { - this.PasswordResetHandler.promises.getUserForPasswordResetToken = - sinon.stub().withArgs(this.token).resolves({ user: this.user }) - this.UserAuditLogHandler.promises.addEntry.rejects( - new Error('oops') - ) - }) - it('should return the error', function (done) { - this.PasswordResetHandler.setNewUserPassword( - this.token, - this.password, - this.auditLog, - (error, _result) => { - expect(error).to.exist - expect( - this.UserAuditLogHandler.promises.addEntry.callCount - ).to.equal(1) - expect(this.AuthenticationManager.promises.setUserPassword).to - .not.have.been.called - done() - } - ) - }) - }) - }) - }) - }) - - describe('when the token has a v1_user_id and email', function () { - beforeEach(function () { - this.user.overleaf = { id: 184 } - this.OneTimeTokenHandler.promises.peekValueFromToken.resolves({ - data: { - v1_user_id: this.user.overleaf.id, - email: this.email, - }, - }) - this.AuthenticationManager.promises.setUserPassword - .withArgs(this.user, this.password) - .resolves(true) - this.OneTimeTokenHandler.expireToken = sinon - .stub() - .callsArgWith(2, null) - }) - - describe('when no user is reset with this email', function () { - beforeEach(function () { - this.UserGetter.getUserByMainEmail - .withArgs(this.email) - .yields(null, null) - }) - - it('should return reset == false', function (done) { - this.PasswordResetHandler.setNewUserPassword( - this.token, - this.password, - this.auditLog, - (err, result) => { - const { reset } = result - expect(err).to.not.exist - expect(reset).to.be.false - expect(this.OneTimeTokenHandler.expireToken.called).to.equal( - false - ) - done() - } - ) - }) - }) - - describe("when the email and user don't match", function () { - beforeEach(function () { - this.UserGetter.getUserByMainEmail.withArgs(this.email).yields(null, { - _id: this.user._id, - email: this.email, - overleaf: { id: 'not-the-same' }, - }) - }) - - it('should return reset == false', function (done) { - this.PasswordResetHandler.setNewUserPassword( - this.token, - this.password, - this.auditLog, - (err, result) => { - const { reset } = result - expect(err).to.not.exist - expect(reset).to.be.false - expect(this.OneTimeTokenHandler.expireToken.called).to.equal( - false - ) - done() - } - ) - }) - }) - - describe('when the email and user match', function () { - beforeEach(function () { - this.UserGetter.promises.getUserByMainEmail.resolves(this.user) - }) - - it('should return reset == true and the user id', function (done) { - this.PasswordResetHandler.setNewUserPassword( - this.token, - this.password, - this.auditLog, - (err, result) => { - const { reset, userId } = result - expect(err).to.not.exist - expect(reset).to.be.true - expect(userId).to.equal(this.user._id) - expect(this.OneTimeTokenHandler.expireToken.called).to.equal(true) - done() - } - ) - }) - }) - }) - }) - - describe('getUserForPasswordResetToken', function () { - beforeEach(function () { - this.OneTimeTokenHandler.promises.peekValueFromToken.resolves({ - data: { - user_id: this.user._id, - email: this.email, - }, - remainingPeeks: 1, - }) - - this.UserGetter.promises.getUserByMainEmail.resolves({ - _id: this.user._id, - email: this.email, - }) - }) - - it('should returns errors from user permissions', async function () { - let error - const err = new Error('nope') - this.PermissionsManager.promises.assertUserPermissions.rejects(err) - try { - await this.PasswordResetHandler.promises.getUserForPasswordResetToken( - 'abc123' - ) - } catch (e) { - error = e - } - expect(error).to.deep.equal(error) - }) - - it('returns user when user has permissions and remaining peaks', async function () { - const result = - await this.PasswordResetHandler.promises.getUserForPasswordResetToken( - 'abc123' - ) - expect(result).to.deep.equal({ - user: { _id: this.user._id, email: this.email }, - remainingPeeks: 1, - }) - }) - }) -}) diff --git a/services/web/test/unit/src/Project/DocLinesComparitorTests.mjs b/services/web/test/unit/src/Project/DocLinesComparitor.test.mjs similarity index 68% rename from services/web/test/unit/src/Project/DocLinesComparitorTests.mjs rename to services/web/test/unit/src/Project/DocLinesComparitor.test.mjs index 4f1f3b4f5f..55c4187f83 100644 --- a/services/web/test/unit/src/Project/DocLinesComparitorTests.mjs +++ b/services/web/test/unit/src/Project/DocLinesComparitor.test.mjs @@ -1,16 +1,14 @@ -import esmock from 'esmock' - const modulePath = '../../../../app/src/Features/Project/DocLinesComparitor.mjs' describe('doc lines comparitor', function () { - beforeEach(async function () { - this.comparitor = await esmock.strict(modulePath, {}) + beforeEach(async function (ctx) { + ctx.comparitor = (await import(modulePath)).default }) - it('should return true when the lines are the same', function () { + it('should return true when the lines are the same', function (ctx) { const lines1 = ['hello', 'world'] const lines2 = ['hello', 'world'] - const result = this.comparitor.areSame(lines1, lines2) + const result = ctx.comparitor.areSame(lines1, lines2) result.should.equal(true) }) ;[ @@ -23,58 +21,58 @@ describe('doc lines comparitor', function () { lines2: ['hello', 'wrld'], }, ].forEach(({ lines1, lines2 }) => { - it('should return false when the lines are different', function () { - const result = this.comparitor.areSame(lines1, lines2) + it('should return false when the lines are different', function (ctx) { + const result = ctx.comparitor.areSame(lines1, lines2) result.should.equal(false) }) }) - it('should return true when the lines are same', function () { + it('should return true when the lines are same', function (ctx) { const lines1 = ['hello', 'world'] const lines2 = ['hello', 'world'] - const result = this.comparitor.areSame(lines1, lines2) + const result = ctx.comparitor.areSame(lines1, lines2) result.should.equal(true) }) - it('should return false if the doc lines are different in length', function () { + it('should return false if the doc lines are different in length', function (ctx) { const lines1 = ['hello', 'world'] const lines2 = ['hello', 'world', 'please'] - const result = this.comparitor.areSame(lines1, lines2) + const result = ctx.comparitor.areSame(lines1, lines2) result.should.equal(false) }) - it('should return false if the first array is undefined', function () { + it('should return false if the first array is undefined', function (ctx) { const lines1 = undefined const lines2 = ['hello', 'world'] - const result = this.comparitor.areSame(lines1, lines2) + const result = ctx.comparitor.areSame(lines1, lines2) result.should.equal(false) }) - it('should return false if the second array is undefined', function () { + it('should return false if the second array is undefined', function (ctx) { const lines1 = ['hello'] const lines2 = undefined - const result = this.comparitor.areSame(lines1, lines2) + const result = ctx.comparitor.areSame(lines1, lines2) result.should.equal(false) }) - it('should return false if the second array is not an array', function () { + it('should return false if the second array is not an array', function (ctx) { const lines1 = ['hello'] const lines2 = '' - const result = this.comparitor.areSame(lines1, lines2) + const result = ctx.comparitor.areSame(lines1, lines2) result.should.equal(false) }) - it('should return true when comparing equal orchard docs', function () { + it('should return true when comparing equal orchard docs', function (ctx) { const lines1 = [{ text: 'hello world' }] const lines2 = [{ text: 'hello world' }] - const result = this.comparitor.areSame(lines1, lines2) + const result = ctx.comparitor.areSame(lines1, lines2) result.should.equal(true) }) - it('should return false when comparing different orchard docs', function () { + it('should return false when comparing different orchard docs', function (ctx) { const lines1 = [{ text: 'goodbye world' }] const lines2 = [{ text: 'hello world' }] - const result = this.comparitor.areSame(lines1, lines2) + const result = ctx.comparitor.areSame(lines1, lines2) result.should.equal(false) }) }) diff --git a/services/web/test/unit/src/Project/ProjectApiController.test.mjs b/services/web/test/unit/src/Project/ProjectApiController.test.mjs new file mode 100644 index 0000000000..c73f327cd2 --- /dev/null +++ b/services/web/test/unit/src/Project/ProjectApiController.test.mjs @@ -0,0 +1,57 @@ +import { vi } from 'vitest' +import sinon from 'sinon' + +const modulePath = '../../../../app/src/Features/Project/ProjectApiController' + +describe('Project api controller', function () { + beforeEach(async function (ctx) { + ctx.ProjectDetailsHandler = { getDetails: sinon.stub() } + + vi.doMock( + '../../../../app/src/Features/Project/ProjectDetailsHandler', + () => ({ + default: ctx.ProjectDetailsHandler, + }) + ) + + ctx.controller = (await import(modulePath)).default + ctx.project_id = '321l3j1kjkjl' + ctx.req = { + params: { + project_id: ctx.project_id, + }, + session: { + destroy: sinon.stub(), + }, + } + ctx.res = {} + ctx.next = sinon.stub() + return (ctx.projDetails = { name: 'something' }) + }) + + describe('getProjectDetails', function () { + it('should ask the project details handler for proj details', function (ctx) { + return new Promise(resolve => { + ctx.ProjectDetailsHandler.getDetails.callsArgWith( + 1, + null, + ctx.projDetails + ) + ctx.res.json = data => { + ctx.ProjectDetailsHandler.getDetails + .calledWith(ctx.project_id) + .should.equal(true) + data.should.deep.equal(ctx.projDetails) + return resolve() + } + return ctx.controller.getProjectDetails(ctx.req, ctx.res) + }) + }) + + it('should send a 500 if there is an error', function (ctx) { + ctx.ProjectDetailsHandler.getDetails.callsArgWith(1, 'error') + ctx.controller.getProjectDetails(ctx.req, ctx.res, ctx.next) + return ctx.next.calledWith('error').should.equal(true) + }) + }) +}) diff --git a/services/web/test/unit/src/Project/ProjectApiControllerTests.mjs b/services/web/test/unit/src/Project/ProjectApiControllerTests.mjs deleted file mode 100644 index bda54a932c..0000000000 --- a/services/web/test/unit/src/Project/ProjectApiControllerTests.mjs +++ /dev/null @@ -1,57 +0,0 @@ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -import esmock from 'esmock' -import sinon from 'sinon' - -const modulePath = '../../../../app/src/Features/Project/ProjectApiController' - -describe('Project api controller', function () { - beforeEach(async function () { - this.ProjectDetailsHandler = { getDetails: sinon.stub() } - this.controller = await esmock.strict(modulePath, { - '../../../../app/src/Features/Project/ProjectDetailsHandler': - this.ProjectDetailsHandler, - }) - this.project_id = '321l3j1kjkjl' - this.req = { - params: { - project_id: this.project_id, - }, - session: { - destroy: sinon.stub(), - }, - } - this.res = {} - this.next = sinon.stub() - return (this.projDetails = { name: 'something' }) - }) - - describe('getProjectDetails', function () { - it('should ask the project details handler for proj details', function (done) { - this.ProjectDetailsHandler.getDetails.callsArgWith( - 1, - null, - this.projDetails - ) - this.res.json = data => { - this.ProjectDetailsHandler.getDetails - .calledWith(this.project_id) - .should.equal(true) - data.should.deep.equal(this.projDetails) - return done() - } - return this.controller.getProjectDetails(this.req, this.res) - }) - - it('should send a 500 if there is an error', function () { - this.ProjectDetailsHandler.getDetails.callsArgWith(1, 'error') - this.controller.getProjectDetails(this.req, this.res, this.next) - return this.next.calledWith('error').should.equal(true) - }) - }) -}) diff --git a/services/web/test/unit/src/Project/ProjectControllerTests.js b/services/web/test/unit/src/Project/ProjectControllerTests.js index 46427171da..0acd900b90 100644 --- a/services/web/test/unit/src/Project/ProjectControllerTests.js +++ b/services/web/test/unit/src/Project/ProjectControllerTests.js @@ -201,9 +201,6 @@ describe('ProjectController', function () { getCurrentAffiliations: sinon.stub().resolves([]), }, } - this.SubscriptionViewModelBuilder = { - getBestSubscription: sinon.stub().yields(null, { type: 'free' }), - } this.SurveyHandler = { getSurvey: sinon.stub().yields(null, {}), } @@ -303,6 +300,7 @@ describe('ProjectController', function () { translate() {}, }, ip: '192.170.18.1', + capabilitySet: new Set(['chat']), } this.res = { locals: { @@ -1088,34 +1086,12 @@ describe('ProjectController', function () { this.ProjectController.loadEditor(this.req, this.res) }) - describe('chatEnabled flag', function () { - it('should be set to false when the feature is disabled', function (done) { + describe('capabilitySet', function () { + it('should be passed as an array when loading the editor', function (done) { this.Features.hasFeature = sinon.stub().withArgs('chat').returns(false) this.res.render = (pageName, opts) => { - expect(opts.chatEnabled).to.be.false - done() - } - this.ProjectController.loadEditor(this.req, this.res) - }) - - it('should be set to false when the feature is enabled but the capability is not available', function (done) { - this.Features.hasFeature = sinon.stub().withArgs('chat').returns(false) - this.req.capabilitySet = new Set() - - this.res.render = (pageName, opts) => { - expect(opts.chatEnabled).to.be.false - done() - } - this.ProjectController.loadEditor(this.req, this.res) - }) - - it('should be set to true when the feature is enabled and the capability is available', function (done) { - this.Features.hasFeature = sinon.stub().withArgs('chat').returns(true) - this.req.capabilitySet = new Set(['chat']) - - this.res.render = (pageName, opts) => { - expect(opts.chatEnabled).to.be.true + expect(opts.capabilities).to.deep.equal(['chat']) done() } this.ProjectController.loadEditor(this.req, this.res) diff --git a/services/web/test/unit/src/Project/ProjectDeleterTests.js b/services/web/test/unit/src/Project/ProjectDeleterTests.js index 9e05a0f1a0..20f8cf2ead 100644 --- a/services/web/test/unit/src/Project/ProjectDeleterTests.js +++ b/services/web/test/unit/src/Project/ProjectDeleterTests.js @@ -99,10 +99,6 @@ describe('ProjectDeleter', function () { } this.db = { - deletedFiles: { - indexExists: sinon.stub().resolves(false), - deleteMany: sinon.stub(), - }, projects: { insertOne: sinon.stub().resolves(), }, diff --git a/services/web/test/unit/src/Project/ProjectEditorHandlerTests.js b/services/web/test/unit/src/Project/ProjectEditorHandlerTests.js index 0fb5b5fce4..8456fe2227 100644 --- a/services/web/test/unit/src/Project/ProjectEditorHandlerTests.js +++ b/services/web/test/unit/src/Project/ProjectEditorHandlerTests.js @@ -8,6 +8,7 @@ describe('ProjectEditorHandler', function () { beforeEach(function () { this.project = { _id: 'project-id', + owner_ref: 'owner-id', name: 'Project Name', rootDoc_id: 'file-id', publicAccesLevel: 'private', @@ -43,16 +44,19 @@ describe('ProjectEditorHandler', function () { }, ], } + this.ownerMember = { + user: (this.owner = { + _id: 'owner-id', + first_name: 'Owner', + last_name: 'Overleaf', + email: 'owner@overleaf.com', + features: { + compileTimeout: 240, + }, + }), + privilegeLevel: 'owner', + } this.members = [ - { - user: (this.owner = { - _id: 'owner-id', - first_name: 'Owner', - last_name: 'Overleaf', - email: 'owner@overleaf.com', - }), - privilegeLevel: 'owner', - }, { user: { _id: 'read-only-id', @@ -96,8 +100,10 @@ describe('ProjectEditorHandler', function () { beforeEach(function () { this.result = this.handler.buildProjectModelView( this.project, + this.ownerMember, this.members, - this.invites + this.invites, + false ) }) @@ -206,6 +212,93 @@ describe('ProjectEditorHandler', function () { expect(invite.token).not.to.exist } }) + + it('should have the correct features', function () { + expect(this.result.features.compileTimeout).to.equal(240) + }) + }) + + describe('with a restricted user', function () { + beforeEach(function () { + this.result = this.handler.buildProjectModelView( + this.project, + this.ownerMember, + [], + [], + true + ) + }) + + it('should include the id', function () { + expect(this.result._id).to.exist + this.result._id.should.equal('project-id') + }) + + it('should include the name', function () { + expect(this.result.name).to.exist + this.result.name.should.equal('Project Name') + }) + + it('should include the root doc id', function () { + expect(this.result.rootDoc_id).to.exist + this.result.rootDoc_id.should.equal('file-id') + }) + + it('should include the public access level', function () { + expect(this.result.publicAccesLevel).to.exist + this.result.publicAccesLevel.should.equal('private') + }) + + it('should hide the owner', function () { + expect(this.result.owner).to.deep.equal({ _id: 'owner-id' }) + }) + + it('should hide members', function () { + this.result.members.length.should.equal(0) + }) + + it('should include folders in the project', function () { + this.result.rootFolder[0]._id.should.equal('root-folder-id') + this.result.rootFolder[0].name.should.equal('') + + this.result.rootFolder[0].folders[0]._id.should.equal('sub-folder-id') + this.result.rootFolder[0].folders[0].name.should.equal('folder') + }) + + it('should not duplicate folder contents', function () { + this.result.rootFolder[0].docs.length.should.equal(0) + this.result.rootFolder[0].fileRefs.length.should.equal(0) + }) + + it('should include files in the project', function () { + this.result.rootFolder[0].folders[0].fileRefs[0]._id.should.equal( + 'file-id' + ) + this.result.rootFolder[0].folders[0].fileRefs[0].name.should.equal( + 'image.png' + ) + this.result.rootFolder[0].folders[0].fileRefs[0].created.should.equal( + this.created + ) + expect(this.result.rootFolder[0].folders[0].fileRefs[0].size).not.to + .exist + }) + + it('should include docs in the project but not the lines', function () { + this.result.rootFolder[0].folders[0].docs[0]._id.should.equal('doc-id') + this.result.rootFolder[0].folders[0].docs[0].name.should.equal( + 'main.tex' + ) + expect(this.result.rootFolder[0].folders[0].docs[0].lines).not.to.exist + }) + + it('should hide invites', function () { + expect(this.result.invites).to.have.length(0) + }) + + it('should have the correct features', function () { + expect(this.result.features.compileTimeout).to.equal(240) + }) }) describe('deletedByExternalDataSource', function () { @@ -213,8 +306,10 @@ describe('ProjectEditorHandler', function () { delete this.project.deletedByExternalDataSource const result = this.handler.buildProjectModelView( this.project, + this.ownerMember, this.members, - [] + [], + false ) result.deletedByExternalDataSource.should.equal(false) }) @@ -222,8 +317,10 @@ describe('ProjectEditorHandler', function () { it('should set the deletedByExternalDataSource flag to false when it is false', function () { const result = this.handler.buildProjectModelView( this.project, + this.ownerMember, this.members, - [] + [], + false ) result.deletedByExternalDataSource.should.equal(false) }) @@ -232,8 +329,10 @@ describe('ProjectEditorHandler', function () { this.project.deletedByExternalDataSource = true const result = this.handler.buildProjectModelView( this.project, + this.ownerMember, this.members, - [] + [], + false ) result.deletedByExternalDataSource.should.equal(true) }) @@ -249,8 +348,10 @@ describe('ProjectEditorHandler', function () { } this.result = this.handler.buildProjectModelView( this.project, + this.ownerMember, this.members, - [] + [], + false ) }) @@ -278,8 +379,10 @@ describe('ProjectEditorHandler', function () { } this.result = this.handler.buildProjectModelView( this.project, + this.ownerMember, this.members, - [] + [], + false ) }) it('should not emit trackChangesState', function () { @@ -302,8 +405,10 @@ describe('ProjectEditorHandler', function () { this.project.track_changes = dbEntry this.result = this.handler.buildProjectModelView( this.project, + this.ownerMember, this.members, - [] + [], + false ) }) it(`should set trackChangesState=${expected}`, function () { @@ -322,66 +427,4 @@ describe('ProjectEditorHandler', function () { }) }) }) - - describe('buildOwnerAndMembersViews', function () { - beforeEach(function () { - this.owner.features = { - versioning: true, - collaborators: 3, - compileGroup: 'priority', - compileTimeout: 22, - } - this.result = this.handler.buildOwnerAndMembersViews(this.members) - }) - - it('should produce an object with the right keys', function () { - expect(this.result).to.have.all.keys([ - 'owner', - 'ownerFeatures', - 'members', - ]) - }) - - it('should separate the owner from the members', function () { - this.result.members.length.should.equal(this.members.length - 1) - expect(this.result.owner._id).to.equal(this.owner._id) - expect(this.result.owner.email).to.equal(this.owner.email) - expect( - this.result.members.filter(m => m._id === this.owner._id).length - ).to.equal(0) - }) - - it('should extract the ownerFeatures from the owner object', function () { - expect(this.result.ownerFeatures).to.deep.equal(this.owner.features) - }) - - describe('when there is no owner', function () { - beforeEach(function () { - // remove the owner from members list - this.membersWithoutOwner = this.members.filter( - m => m.user._id !== this.owner._id - ) - this.result = this.handler.buildOwnerAndMembersViews( - this.membersWithoutOwner - ) - }) - - it('should produce an object with the right keys', function () { - expect(this.result).to.have.all.keys([ - 'owner', - 'ownerFeatures', - 'members', - ]) - }) - - it('should not separate out an owner', function () { - this.result.members.length.should.equal(this.membersWithoutOwner.length) - expect(this.result.owner).to.equal(null) - }) - - it('should not extract the ownerFeatures from the owner object', function () { - expect(this.result.ownerFeatures).to.equal(null) - }) - }) - }) }) diff --git a/services/web/test/unit/src/Project/ProjectEntityMongoUpdateHandlerTests.js b/services/web/test/unit/src/Project/ProjectEntityMongoUpdateHandlerTests.js index b1b29c5145..ce6fa4ccc6 100644 --- a/services/web/test/unit/src/Project/ProjectEntityMongoUpdateHandlerTests.js +++ b/services/web/test/unit/src/Project/ProjectEntityMongoUpdateHandlerTests.js @@ -4,7 +4,6 @@ const tk = require('timekeeper') const Errors = require('../../../../app/src/Features/Errors/Errors') const { ObjectId } = require('mongodb-legacy') const SandboxedModule = require('sandboxed-module') -const { DeletedFile } = require('../helpers/models/DeletedFile') const { Project } = require('../helpers/models/Project') const MODULE_PATH = @@ -77,7 +76,6 @@ describe('ProjectEntityMongoUpdateHandler', function () { } this.FolderModel = sinon.stub() - this.DeletedFileMock = sinon.mock(DeletedFile) this.ProjectMock = sinon.mock(Project) this.ProjectEntityHandler = { getAllEntitiesFromProject: sinon.stub(), @@ -197,7 +195,6 @@ describe('ProjectEntityMongoUpdateHandler', function () { '../Cooldown/CooldownManager': this.CooldownManager, '../../models/Folder': { Folder: this.FolderModel }, '../../infrastructure/LockManager': this.LockManager, - '../../models/DeletedFile': { DeletedFile }, '../../models/Project': { Project }, './ProjectEntityHandler': this.ProjectEntityHandler, './ProjectLocator': this.ProjectLocator, @@ -208,7 +205,6 @@ describe('ProjectEntityMongoUpdateHandler', function () { }) afterEach(function () { - this.DeletedFileMock.restore() this.ProjectMock.restore() tk.reset() }) @@ -374,17 +370,6 @@ describe('ProjectEntityMongoUpdateHandler', function () { linkedFileData: { some: 'data' }, hash: 'some-hash', } - // Add a deleted file record - this.DeletedFileMock.expects('create') - .withArgs({ - projectId: this.project._id, - _id: this.file._id, - name: this.file.name, - linkedFileData: this.file.linkedFileData, - hash: this.file.hash, - deletedAt: sinon.match.date, - }) - .resolves() // Update the file in place this.ProjectMock.expects('findOneAndUpdate') .withArgs( @@ -421,7 +406,6 @@ describe('ProjectEntityMongoUpdateHandler', function () { }) it('updates the database', function () { - this.DeletedFileMock.verify() this.ProjectMock.verify() }) }) @@ -1059,29 +1043,6 @@ describe('ProjectEntityMongoUpdateHandler', function () { }) }) - describe('_insertDeletedFileReference', function () { - beforeEach(async function () { - this.DeletedFileMock.expects('create') - .withArgs({ - projectId: this.project._id, - _id: this.file._id, - name: this.file.name, - linkedFileData: this.file.linkedFileData, - hash: this.file.hash, - deletedAt: sinon.match.date, - }) - .resolves() - await this.subject.promises._insertDeletedFileReference( - this.project._id, - this.file - ) - }) - - it('should update the database', function () { - this.DeletedFileMock.verify() - }) - }) - describe('createNewFolderStructure', function () { beforeEach(function () { this.mockRootFolder = 'MOCK_ROOT_FOLDER' diff --git a/services/web/test/unit/src/Project/ProjectEntityUpdateHandlerTests.js b/services/web/test/unit/src/Project/ProjectEntityUpdateHandlerTests.js index 6cfe01e206..72c5080d62 100644 --- a/services/web/test/unit/src/Project/ProjectEntityUpdateHandlerTests.js +++ b/services/web/test/unit/src/Project/ProjectEntityUpdateHandlerTests.js @@ -133,7 +133,6 @@ describe('ProjectEntityUpdateHandler', function () { addFolder: sinon.stub(), _confirmFolder: sinon.stub(), _putElement: sinon.stub(), - _insertDeletedFileReference: sinon.stub(), replaceFileWithNew: sinon.stub(), mkdirp: sinon.stub(), moveEntity: sinon.stub(), @@ -2572,7 +2571,6 @@ describe('ProjectEntityUpdateHandler', function () { this.ProjectEntityUpdateHandler.promises.unsetRootDoc = sinon .stub() .resolves() - this.ProjectEntityMongoUpdateHandler.promises._insertDeletedFileReference.resolves() }) describe('a file', function () { @@ -2592,12 +2590,6 @@ describe('ProjectEntityUpdateHandler', function () { ) }) - it('should insert the file into the deletedFiles collection', function () { - this.ProjectEntityMongoUpdateHandler.promises._insertDeletedFileReference - .calledWith(this.project._id, this.entity) - .should.equal(true) - }) - it('should not delete the file from FileStoreHandler', function () { this.FileStoreHandler.promises.deleteFile .calledWith(projectId, this.entityId) @@ -2696,7 +2688,6 @@ describe('ProjectEntityUpdateHandler', function () { } this.ProjectEntityUpdateHandler._cleanUpDoc = sinon.stub().resolves() - this.ProjectEntityUpdateHandler._cleanUpFile = sinon.stub().resolves() const path = '/folder' this.newProject = 'new-project' this.subtreeListing = @@ -2711,17 +2702,6 @@ describe('ProjectEntityUpdateHandler', function () { ) }) - it('should clean up all sub files', function () { - this.ProjectEntityUpdateHandler._cleanUpFile.should.have.been.calledWith( - this.project, - this.file1 - ) - this.ProjectEntityUpdateHandler._cleanUpFile.should.have.been.calledWith( - this.project, - this.file2 - ) - }) - it('should clean up all sub docs', function () { this.ProjectEntityUpdateHandler._cleanUpDoc .calledWith( diff --git a/services/web/test/unit/src/Project/ProjectListController.test.mjs b/services/web/test/unit/src/Project/ProjectListController.test.mjs new file mode 100644 index 0000000000..ae1bc72210 --- /dev/null +++ b/services/web/test/unit/src/Project/ProjectListController.test.mjs @@ -0,0 +1,865 @@ +import { expect, vi } from 'vitest' +import sinon from 'sinon' +import mongodb from 'mongodb-legacy' +import Errors from '../../../../app/src/Features/Errors/Errors.js' + +const ObjectId = mongodb.ObjectId + +const MODULE_PATH = new URL( + '../../../../app/src/Features/Project/ProjectListController', + import.meta.url +).pathname + +describe('ProjectListController', function () { + beforeEach(async function (ctx) { + ctx.project_id = new ObjectId('abcdefabcdefabcdefabcdef') + + ctx.user = { + _id: new ObjectId('123456123456123456123456'), + email: 'test@overleaf.com', + first_name: 'bjkdsjfk', + features: {}, + emails: [{ email: 'test@overleaf.com' }], + lastLoginIp: '111.111.111.112', + } + ctx.users = { + 'user-1': { + first_name: 'James', + }, + 'user-2': { + first_name: 'Henry', + }, + } + ctx.users[ctx.user._id] = ctx.user // Owner + ctx.usersArr = Object.entries(ctx.users).map(([key, value]) => ({ + _id: key, + ...value, + })) + ctx.tags = [ + { name: 1, project_ids: ['1', '2', '3'] }, + { name: 2, project_ids: ['a', '1'] }, + { name: 3, project_ids: ['a', 'b', 'c', 'd'] }, + ] + ctx.notifications = [ + { + _id: '1', + user_id: '2', + templateKey: '3', + messageOpts: '4', + key: '5', + }, + ] + ctx.settings = { + siteUrl: 'https://overleaf.com', + } + ctx.TagsHandler = { + promises: { + getAllTags: sinon.stub().resolves(ctx.tags), + }, + } + ctx.NotificationsHandler = { + promises: { + getUserNotifications: sinon.stub().resolves(ctx.notifications), + }, + } + ctx.UserModel = { + findById: sinon.stub().resolves(ctx.user), + } + ctx.UserPrimaryEmailCheckHandler = { + requiresPrimaryEmailCheck: sinon.stub().returns(false), + } + ctx.ProjectGetter = { + promises: { + findAllUsersProjects: sinon.stub(), + }, + } + ctx.ProjectHelper = { + isArchived: sinon.stub(), + isTrashed: sinon.stub(), + } + ctx.SessionManager = { + getLoggedInUserId: sinon.stub().returns(ctx.user._id), + } + ctx.UserController = { + logout: sinon.stub(), + } + ctx.UserGetter = { + promises: { + getUsers: sinon.stub().resolves(ctx.usersArr), + getUserFullEmails: sinon.stub().resolves([]), + }, + } + ctx.Features = { + hasFeature: sinon.stub(), + } + ctx.Metrics = { + inc: sinon.stub(), + } + ctx.SplitTestHandler = { + promises: { + getAssignment: sinon.stub().resolves({ variant: 'default' }), + hasUserBeenAssignedToVariant: sinon.stub().resolves(false), + }, + } + ctx.SplitTestSessionHandler = { + promises: { + sessionMaintenance: sinon.stub().resolves(), + }, + } + ctx.SubscriptionViewModelBuilder = { + promises: { + getUsersSubscriptionDetails: sinon.stub().resolves({ + bestSubscription: { type: 'free' }, + individualSubscription: null, + memberGroupSubscriptions: [], + }), + }, + } + ctx.SurveyHandler = { + promises: { + getSurvey: sinon.stub().resolves({}), + }, + } + ctx.NotificationBuilder = { + promises: { + ipMatcherAffiliation: sinon.stub().returns({ create: sinon.stub() }), + }, + } + ctx.GeoIpLookup = { + promises: { + getCurrencyCode: sinon.stub().resolves({ + countryCode: 'US', + currencyCode: 'USD', + }), + }, + } + ctx.TutorialHandler = { + getInactiveTutorials: sinon.stub().returns([]), + } + + ctx.Modules = { + promises: { + hooks: { + fire: sinon.stub().resolves([]), + }, + }, + } + + vi.doMock('mongodb-legacy', () => ({ + default: { ObjectId }, + })) + + vi.doMock('@overleaf/settings', () => ({ + default: ctx.settings, + })) + + vi.doMock('@overleaf/metrics', () => ({ + default: ctx.Metrics, + })) + + vi.doMock( + '../../../../app/src/Features/SplitTests/SplitTestHandler', + () => ({ + default: ctx.SplitTestHandler, + }) + ) + + vi.doMock( + '../../../../app/src/Features/SplitTests/SplitTestSessionHandler', + () => ({ + default: ctx.SplitTestSessionHandler, + }) + ) + + vi.doMock('../../../../app/src/Features/User/UserController', () => ({ + default: ctx.UserController, + })) + + vi.doMock('../../../../app/src/Features/Project/ProjectHelper', () => ({ + default: ctx.ProjectHelper, + })) + + vi.doMock('../../../../app/src/Features/Tags/TagsHandler', () => ({ + default: ctx.TagsHandler, + })) + + vi.doMock( + '../../../../app/src/Features/Notifications/NotificationsHandler', + () => ({ + default: ctx.NotificationsHandler, + }) + ) + + vi.doMock('../../../../app/src/models/User', () => ({ + User: ctx.UserModel, + })) + + vi.doMock('../../../../app/src/Features/Project/ProjectGetter', () => ({ + default: ctx.ProjectGetter, + })) + + vi.doMock( + '../../../../app/src/Features/Authentication/SessionManager', + () => ({ + default: ctx.SessionManager, + }) + ) + + vi.doMock('../../../../app/src/infrastructure/Features', () => ({ + default: ctx.Features, + })) + + vi.doMock('../../../../app/src/Features/User/UserGetter', () => ({ + default: ctx.UserGetter, + })) + + vi.doMock( + '../../../../app/src/Features/Subscription/SubscriptionViewModelBuilder', + () => ({ + default: ctx.SubscriptionViewModelBuilder, + }) + ) + + vi.doMock('../../../../app/src/infrastructure/Modules', () => ({ + default: ctx.Modules, + })) + + vi.doMock('../../../../app/src/Features/Survey/SurveyHandler', () => ({ + default: ctx.SurveyHandler, + })) + + vi.doMock( + '../../../../app/src/Features/User/UserPrimaryEmailCheckHandler', + () => ({ + default: ctx.UserPrimaryEmailCheckHandler, + }) + ) + + vi.doMock( + '../../../../app/src/Features/Notifications/NotificationsBuilder', + () => ({ + default: ctx.NotificationBuilder, + }) + ) + + vi.doMock('../../../../app/src/infrastructure/GeoIpLookup', () => ({ + default: ctx.GeoIpLookup, + })) + + vi.doMock('../../../../app/src/Features/Tutorial/TutorialHandler', () => ({ + default: ctx.TutorialHandler, + })) + + ctx.ProjectListController = (await import(MODULE_PATH)).default + + ctx.req = { + query: {}, + params: { + Project_id: ctx.project_id, + }, + headers: {}, + session: { + user: ctx.user, + }, + body: {}, + i18n: { + translate() {}, + }, + } + ctx.res = {} + }) + + describe('projectListPage', function () { + beforeEach(function (ctx) { + ctx.projects = [ + { _id: 1, lastUpdated: 1, owner_ref: 'user-1' }, + { + _id: 2, + lastUpdated: 2, + owner_ref: 'user-2', + lastUpdatedBy: 'user-1', + }, + ] + ctx.readAndWrite = [{ _id: 5, lastUpdated: 5, owner_ref: 'user-1' }] + ctx.readOnly = [{ _id: 3, lastUpdated: 3, owner_ref: 'user-1' }] + ctx.tokenReadAndWrite = [{ _id: 6, lastUpdated: 5, owner_ref: 'user-4' }] + ctx.tokenReadOnly = [{ _id: 7, lastUpdated: 4, owner_ref: 'user-5' }] + ctx.review = [{ _id: 8, lastUpdated: 4, owner_ref: 'user-6' }] + ctx.allProjects = { + owned: ctx.projects, + readAndWrite: ctx.readAndWrite, + readOnly: ctx.readOnly, + tokenReadAndWrite: ctx.tokenReadAndWrite, + tokenReadOnly: ctx.tokenReadOnly, + review: ctx.review, + } + + ctx.ProjectGetter.promises.findAllUsersProjects.resolves(ctx.allProjects) + }) + + it('should render the project/list-react page', function (ctx) { + return new Promise(resolve => { + ctx.res.render = (pageName, opts) => { + pageName.should.equal('project/list-react') + resolve() + } + ctx.ProjectListController.projectListPage(ctx.req, ctx.res) + }) + }) + + it('should invoke the session maintenance', function (ctx) { + return new Promise(resolve => { + ctx.Features.hasFeature.withArgs('saas').returns(true) + ctx.res.render = () => { + ctx.SplitTestSessionHandler.promises.sessionMaintenance.should.have.been.calledWith( + ctx.req, + ctx.user + ) + resolve() + } + ctx.ProjectListController.projectListPage(ctx.req, ctx.res) + }) + }) + + it('should send the tags', function (ctx) { + return new Promise(resolve => { + ctx.res.render = (pageName, opts) => { + opts.tags.length.should.equal(ctx.tags.length) + resolve() + } + ctx.ProjectListController.projectListPage(ctx.req, ctx.res) + }) + }) + + it('should create trigger ip matcher notifications', function (ctx) { + return new Promise(resolve => { + ctx.settings.overleaf = true + ctx.req.ip = '111.111.111.111' + ctx.res.render = (pageName, opts) => { + ctx.NotificationBuilder.promises.ipMatcherAffiliation.called.should.equal( + true + ) + resolve() + } + ctx.ProjectListController.projectListPage(ctx.req, ctx.res) + }) + }) + + it('should send the projects', function (ctx) { + return new Promise(resolve => { + ctx.res.render = (pageName, opts) => { + opts.prefetchedProjectsBlob.projects.length.should.equal( + ctx.projects.length + + ctx.readAndWrite.length + + ctx.readOnly.length + + ctx.tokenReadAndWrite.length + + ctx.tokenReadOnly.length + + ctx.review.length + ) + resolve() + } + ctx.ProjectListController.projectListPage(ctx.req, ctx.res) + }) + }) + + it('should send the user', function (ctx) { + return new Promise(resolve => { + ctx.res.render = (pageName, opts) => { + opts.user.should.deep.equal(ctx.user) + resolve() + } + ctx.ProjectListController.projectListPage(ctx.req, ctx.res) + }) + }) + + it('should inject the users', function (ctx) { + return new Promise(resolve => { + ctx.res.render = (pageName, opts) => { + const projects = opts.prefetchedProjectsBlob.projects + + projects + .filter(p => p.id === '1')[0] + .owner.firstName.should.equal( + ctx.users[ctx.projects.filter(p => p._id === 1)[0].owner_ref] + .first_name + ) + projects + .filter(p => p.id === '2')[0] + .owner.firstName.should.equal( + ctx.users[ctx.projects.filter(p => p._id === 2)[0].owner_ref] + .first_name + ) + projects + .filter(p => p.id === '2')[0] + .lastUpdatedBy.firstName.should.equal( + ctx.users[ctx.projects.filter(p => p._id === 2)[0].lastUpdatedBy] + .first_name + ) + resolve() + } + ctx.ProjectListController.projectListPage(ctx.req, ctx.res) + }) + }) + + it("should send the user's best subscription when saas feature present", function (ctx) { + return new Promise(resolve => { + ctx.Features.hasFeature.withArgs('saas').returns(true) + ctx.res.render = (pageName, opts) => { + expect(opts.usersBestSubscription).to.deep.include({ type: 'free' }) + resolve() + } + ctx.ProjectListController.projectListPage(ctx.req, ctx.res) + }) + }) + + it('should not return a best subscription without saas feature', function (ctx) { + return new Promise(resolve => { + ctx.Features.hasFeature.withArgs('saas').returns(false) + ctx.res.render = (pageName, opts) => { + expect(opts.usersBestSubscription).to.be.undefined + resolve() + } + ctx.ProjectListController.projectListPage(ctx.req, ctx.res) + }) + }) + + it('should show INR Banner for Indian users with free account', function (ctx) { + return new Promise(resolve => { + // usersBestSubscription is only available when saas feature is present + ctx.Features.hasFeature.withArgs('saas').returns(true) + ctx.SubscriptionViewModelBuilder.promises.getUsersSubscriptionDetails.resolves( + { + bestSubscription: { + type: 'free', + }, + } + ) + ctx.GeoIpLookup.promises.getCurrencyCode.resolves({ + countryCode: 'IN', + }) + ctx.res.render = (pageName, opts) => { + expect(opts.showInrGeoBanner).to.be.true + resolve() + } + ctx.ProjectListController.projectListPage(ctx.req, ctx.res) + }) + }) + + it('should not show INR Banner for Indian users with premium account', function (ctx) { + return new Promise(resolve => { + // usersBestSubscription is only available when saas feature is present + ctx.Features.hasFeature.withArgs('saas').returns(true) + ctx.SubscriptionViewModelBuilder.promises.getUsersSubscriptionDetails.resolves( + { + bestSubscription: { + type: 'individual', + }, + } + ) + ctx.GeoIpLookup.promises.getCurrencyCode.resolves({ + countryCode: 'IN', + }) + ctx.res.render = (pageName, opts) => { + expect(opts.showInrGeoBanner).to.be.false + resolve() + } + ctx.ProjectListController.projectListPage(ctx.req, ctx.res) + }) + }) + + describe('With Institution SSO feature', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.institutionEmail = 'test@overleaf.com' + ctx.institutionName = 'Overleaf' + ctx.Features.hasFeature.withArgs('saml').returns(true) + ctx.Features.hasFeature.withArgs('affiliations').returns(true) + ctx.Features.hasFeature.withArgs('saas').returns(true) + resolve() + }) + }) + it('should show institution SSO available notification for confirmed domains', function (ctx) { + ctx.UserGetter.promises.getUserFullEmails.resolves([ + { + email: 'test@overleaf.com', + affiliation: { + institution: { + id: 1, + confirmed: true, + name: 'Overleaf', + ssoBeta: false, + ssoEnabled: true, + }, + }, + }, + ]) + ctx.res.render = (pageName, opts) => { + expect(opts.notificationsInstitution).to.deep.include({ + email: ctx.institutionEmail, + institutionId: 1, + institutionName: ctx.institutionName, + templateKey: 'notification_institution_sso_available', + }) + } + ctx.ProjectListController.projectListPage(ctx.req, ctx.res) + }) + it('should show a linked notification', function (ctx) { + ctx.req.session.saml = { + institutionEmail: ctx.institutionEmail, + linked: { + hasEntitlement: false, + universityName: ctx.institutionName, + }, + } + ctx.res.render = (pageName, opts) => { + expect(opts.notificationsInstitution).to.deep.include({ + email: ctx.institutionEmail, + institutionName: ctx.institutionName, + templateKey: 'notification_institution_sso_linked', + }) + } + ctx.ProjectListController.projectListPage(ctx.req, ctx.res) + }) + it('should show a linked another email notification', function (ctx) { + // when they request to link an email but the institution returns + // a different email + ctx.res.render = (pageName, opts) => { + expect(opts.notificationsInstitution).to.deep.include({ + institutionEmail: ctx.institutionEmail, + requestedEmail: 'requested@overleaf.com', + templateKey: 'notification_institution_sso_non_canonical', + }) + } + ctx.req.session.saml = { + emailNonCanonical: ctx.institutionEmail, + institutionEmail: ctx.institutionEmail, + requestedEmail: 'requested@overleaf.com', + linked: { + hasEntitlement: false, + universityName: ctx.institutionName, + }, + } + ctx.ProjectListController.projectListPage(ctx.req, ctx.res) + }) + + it('should show a notification when intent was to register via SSO but account existed', function (ctx) { + ctx.res.render = (pageName, opts) => { + expect(opts.notificationsInstitution).to.deep.include({ + email: ctx.institutionEmail, + templateKey: 'notification_institution_sso_already_registered', + }) + } + ctx.req.session.saml = { + institutionEmail: ctx.institutionEmail, + linked: { + hasEntitlement: false, + universityName: 'Overleaf', + }, + registerIntercept: { + id: 1, + name: 'Example University', + }, + } + ctx.ProjectListController.projectListPage(ctx.req, ctx.res) + }) + + it('should not show a register notification if the flow was abandoned', function (ctx) { + // could initially start to register with an SSO email and then + // abandon flow and login with an existing non-institution SSO email + ctx.res.render = (pageName, opts) => { + expect(opts.notificationsInstitution).to.deep.not.include({ + email: 'test@overleaf.com', + templateKey: 'notification_institution_sso_already_registered', + }) + } + ctx.req.session.saml = { + registerIntercept: { + id: 1, + name: 'Example University', + }, + } + ctx.ProjectListController.projectListPage(ctx.req, ctx.res) + }) + + it('should show error notification', function (ctx) { + ctx.res.render = (pageName, opts) => { + expect(opts.notificationsInstitution.length).to.equal(1) + expect(opts.notificationsInstitution[0].templateKey).to.equal( + 'notification_institution_sso_error' + ) + expect(opts.notificationsInstitution[0].error).to.be.instanceof( + Errors.SAMLAlreadyLinkedError + ) + } + ctx.req.session.saml = { + institutionEmail: ctx.institutionEmail, + error: new Errors.SAMLAlreadyLinkedError(), + } + ctx.ProjectListController.projectListPage(ctx.req, ctx.res) + }) + + describe('for an unconfirmed domain for an SSO institution', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.UserGetter.promises.getUserFullEmails.resolves([ + { + email: 'test@overleaf-uncofirmed.com', + affiliation: { + institution: { + id: 1, + confirmed: false, + name: 'Overleaf', + ssoBeta: false, + ssoEnabled: true, + }, + }, + }, + ]) + resolve() + }) + }) + it('should not show institution SSO available notification', function (ctx) { + ctx.res.render = (pageName, opts) => { + expect(opts.notificationsInstitution.length).to.equal(0) + } + ctx.ProjectListController.projectListPage(ctx.req, ctx.res) + }) + }) + describe('when linking/logging in initiated on institution side', function () { + it('should not show a linked another email notification', function (ctx) { + // this is only used when initated on Overleaf, + // because we keep track of the requested email they tried to link + ctx.res.render = (pageName, opts) => { + expect(opts.notificationsInstitution).to.not.deep.include({ + institutionEmail: ctx.institutionEmail, + requestedEmail: undefined, + templateKey: 'notification_institution_sso_non_canonical', + }) + } + ctx.req.session.saml = { + emailNonCanonical: ctx.institutionEmail, + institutionEmail: ctx.institutionEmail, + linked: { + hasEntitlement: false, + universityName: ctx.institutionName, + }, + } + ctx.ProjectListController.projectListPage(ctx.req, ctx.res) + }) + }) + describe('Institution with SSO beta testable', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.UserGetter.promises.getUserFullEmails.resolves([ + { + email: 'beta@beta.com', + affiliation: { + institution: { + id: 2, + confirmed: true, + name: 'Beta University', + ssoBeta: true, + ssoEnabled: false, + }, + }, + }, + ]) + resolve() + }) + }) + it('should show institution SSO available notification when on a beta testing session', function (ctx) { + ctx.req.session.samlBeta = true + ctx.res.render = (pageName, opts) => { + expect(opts.notificationsInstitution).to.deep.include({ + email: 'beta@beta.com', + institutionId: 2, + institutionName: 'Beta University', + templateKey: 'notification_institution_sso_available', + }) + } + ctx.ProjectListController.projectListPage(ctx.req, ctx.res) + }) + it('should not show institution SSO available notification when not on a beta testing session', function (ctx) { + ctx.req.session.samlBeta = false + ctx.res.render = (pageName, opts) => { + expect(opts.notificationsInstitution).to.deep.not.include({ + email: 'test@overleaf.com', + institutionId: 1, + institutionName: 'Overleaf', + templateKey: 'notification_institution_sso_available', + }) + } + ctx.ProjectListController.projectListPage(ctx.req, ctx.res) + }) + }) + }) + + describe('Without Institution SSO feature', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.Features.hasFeature.withArgs('saml').returns(false) + resolve() + }) + }) + it('should not show institution sso available notification', function (ctx) { + ctx.res.render = (pageName, opts) => { + expect(opts.notificationsInstitution).to.deep.not.include({ + email: 'test@overleaf.com', + institutionId: 1, + institutionName: 'Overleaf', + templateKey: 'notification_institution_sso_available', + }) + } + ctx.ProjectListController.projectListPage(ctx.req, ctx.res) + }) + }) + + describe('enterprise banner', function () { + beforeEach(function (ctx) { + ctx.Features.hasFeature.withArgs('saas').returns(true) + ctx.SubscriptionViewModelBuilder.promises.getUsersSubscriptionDetails.resolves( + { memberGroupSubscriptions: [] } + ) + ctx.UserGetter.promises.getUserFullEmails.resolves([ + { + email: 'test@test-domain.com', + }, + ]) + }) + + describe('normal enterprise banner', function () { + it('shows banner', function (ctx) { + ctx.res.render = (pageName, opts) => { + expect(opts.showGroupsAndEnterpriseBanner).to.be.true + } + ctx.ProjectListController.projectListPage(ctx.req, ctx.res) + }) + + it('does not show banner if user is part of any affiliation', function (ctx) { + ctx.UserGetter.promises.getUserFullEmails.resolves([ + { + email: 'test@overleaf.com', + affiliation: { + licence: 'pro_plus', + institution: { + id: 1, + confirmed: true, + name: 'Overleaf', + ssoBeta: false, + ssoEnabled: true, + }, + }, + }, + ]) + + ctx.res.render = (pageName, opts) => { + expect(opts.showGroupsAndEnterpriseBanner).to.be.false + } + ctx.ProjectListController.projectListPage(ctx.req, ctx.res) + }) + + it('does not show banner if user is part of any group subscription', function (ctx) { + ctx.SubscriptionViewModelBuilder.promises.getUsersSubscriptionDetails.resolves( + { memberGroupSubscriptions: [{}] } + ) + + ctx.res.render = (pageName, opts) => { + expect(opts.showGroupsAndEnterpriseBanner).to.be.false + } + ctx.ProjectListController.projectListPage(ctx.req, ctx.res) + }) + + it('have a banner variant of "FOMO" or "on-premise"', function (ctx) { + ctx.res.render = (pageName, opts) => { + expect(opts.groupsAndEnterpriseBannerVariant).to.be.oneOf([ + 'FOMO', + 'on-premise', + ]) + } + ctx.ProjectListController.projectListPage(ctx.req, ctx.res) + }) + }) + + describe('US government enterprise banner', function () { + it('does not show enterprise banner if US government enterprise banner is shown', function (ctx) { + const emails = [ + { + email: 'test@test.mil', + confirmedAt: new Date('2024-01-01'), + }, + ] + + ctx.UserGetter.promises.getUserFullEmails.resolves(emails) + ctx.Modules.promises.hooks.fire + .withArgs('getUSGovBanner', emails, false, []) + .resolves([ + { + showUSGovBanner: true, + usGovBannerVariant: 'variant', + }, + ]) + ctx.res.render = (pageName, opts) => { + expect(opts.showGroupsAndEnterpriseBanner).to.be.false + expect(opts.showUSGovBanner).to.be.true + } + ctx.ProjectListController.projectListPage(ctx.req, ctx.res) + }) + }) + }) + }) + + describe('projectListReactPage with duplicate projects', function () { + beforeEach(function (ctx) { + ctx.projects = [ + { _id: 1, lastUpdated: 1, owner_ref: 'user-1' }, + { _id: 2, lastUpdated: 2, owner_ref: 'user-2' }, + ] + ctx.readAndWrite = [{ _id: 5, lastUpdated: 5, owner_ref: 'user-1' }] + ctx.readOnly = [{ _id: 3, lastUpdated: 3, owner_ref: 'user-1' }] + ctx.tokenReadAndWrite = [{ _id: 6, lastUpdated: 5, owner_ref: 'user-4' }] + ctx.tokenReadOnly = [ + { _id: 6, lastUpdated: 5, owner_ref: 'user-4' }, // Also in tokenReadAndWrite + { _id: 7, lastUpdated: 4, owner_ref: 'user-5' }, + ] + ctx.review = [{ _id: 8, lastUpdated: 5, owner_ref: 'user-6' }] + ctx.allProjects = { + owned: ctx.projects, + readAndWrite: ctx.readAndWrite, + readOnly: ctx.readOnly, + tokenReadAndWrite: ctx.tokenReadAndWrite, + tokenReadOnly: ctx.tokenReadOnly, + review: ctx.review, + } + + ctx.ProjectGetter.promises.findAllUsersProjects.resolves(ctx.allProjects) + }) + + it('should render the project/list-react page', function (ctx) { + return new Promise(resolve => { + ctx.res.render = (pageName, opts) => { + pageName.should.equal('project/list-react') + resolve() + } + ctx.ProjectListController.projectListPage(ctx.req, ctx.res) + }) + }) + + it('should omit one of the projects', function (ctx) { + return new Promise(resolve => { + ctx.res.render = (pageName, opts) => { + opts.prefetchedProjectsBlob.projects.length.should.equal( + ctx.projects.length + + ctx.readAndWrite.length + + ctx.readOnly.length + + ctx.tokenReadAndWrite.length + + ctx.tokenReadOnly.length + + ctx.review.length - + 1 + ) + resolve() + } + ctx.ProjectListController.projectListPage(ctx.req, ctx.res) + }) + }) + }) +}) diff --git a/services/web/test/unit/src/Project/ProjectListControllerTests.mjs b/services/web/test/unit/src/Project/ProjectListControllerTests.mjs deleted file mode 100644 index 827d16b737..0000000000 --- a/services/web/test/unit/src/Project/ProjectListControllerTests.mjs +++ /dev/null @@ -1,762 +0,0 @@ -import esmock from 'esmock' -import sinon from 'sinon' -import { expect } from 'chai' -import mongodb from 'mongodb-legacy' -import Errors from '../../../../app/src/Features/Errors/Errors.js' - -const ObjectId = mongodb.ObjectId - -const MODULE_PATH = new URL( - '../../../../app/src/Features/Project/ProjectListController', - import.meta.url -).pathname - -describe('ProjectListController', function () { - beforeEach(async function () { - this.project_id = new ObjectId('abcdefabcdefabcdefabcdef') - - this.user = { - _id: new ObjectId('123456123456123456123456'), - email: 'test@overleaf.com', - first_name: 'bjkdsjfk', - features: {}, - emails: [{ email: 'test@overleaf.com' }], - lastLoginIp: '111.111.111.112', - } - this.users = { - 'user-1': { - first_name: 'James', - }, - 'user-2': { - first_name: 'Henry', - }, - } - this.users[this.user._id] = this.user // Owner - this.usersArr = Object.entries(this.users).map(([key, value]) => ({ - _id: key, - ...value, - })) - this.tags = [ - { name: 1, project_ids: ['1', '2', '3'] }, - { name: 2, project_ids: ['a', '1'] }, - { name: 3, project_ids: ['a', 'b', 'c', 'd'] }, - ] - this.notifications = [ - { - _id: '1', - user_id: '2', - templateKey: '3', - messageOpts: '4', - key: '5', - }, - ] - this.settings = { - siteUrl: 'https://overleaf.com', - } - this.TagsHandler = { - promises: { - getAllTags: sinon.stub().resolves(this.tags), - }, - } - this.NotificationsHandler = { - promises: { - getUserNotifications: sinon.stub().resolves(this.notifications), - }, - } - this.UserModel = { - findById: sinon.stub().resolves(this.user), - } - this.UserPrimaryEmailCheckHandler = { - requiresPrimaryEmailCheck: sinon.stub().returns(false), - } - this.ProjectGetter = { - promises: { - findAllUsersProjects: sinon.stub(), - }, - } - this.ProjectHelper = { - isArchived: sinon.stub(), - isTrashed: sinon.stub(), - } - this.SessionManager = { - getLoggedInUserId: sinon.stub().returns(this.user._id), - } - this.UserController = { - logout: sinon.stub(), - } - this.UserGetter = { - promises: { - getUsers: sinon.stub().resolves(this.usersArr), - getUserFullEmails: sinon.stub().resolves([]), - }, - } - this.Features = { - hasFeature: sinon.stub(), - } - this.Metrics = { - inc: sinon.stub(), - } - this.SplitTestHandler = { - promises: { - getAssignment: sinon.stub().resolves({ variant: 'default' }), - }, - } - this.SplitTestSessionHandler = { - promises: { - sessionMaintenance: sinon.stub().resolves(), - }, - } - this.SubscriptionViewModelBuilder = { - promises: { - getUsersSubscriptionDetails: sinon.stub().resolves({ - bestSubscription: { type: 'free' }, - individualSubscription: null, - memberGroupSubscriptions: [], - }), - }, - } - this.SurveyHandler = { - promises: { - getSurvey: sinon.stub().resolves({}), - }, - } - this.NotificationBuilder = { - promises: { - ipMatcherAffiliation: sinon.stub().returns({ create: sinon.stub() }), - }, - } - this.GeoIpLookup = { - promises: { - getCurrencyCode: sinon.stub().resolves({ - countryCode: 'US', - currencyCode: 'USD', - }), - }, - } - this.TutorialHandler = { - getInactiveTutorials: sinon.stub().returns([]), - } - - this.Modules = { - promises: { - hooks: { - fire: sinon.stub().resolves([]), - }, - }, - } - - this.ProjectListController = await esmock.strict(MODULE_PATH, { - 'mongodb-legacy': { ObjectId }, - '@overleaf/settings': this.settings, - '@overleaf/metrics': this.Metrics, - '../../../../app/src/Features/SplitTests/SplitTestHandler': - this.SplitTestHandler, - '../../../../app/src/Features/SplitTests/SplitTestSessionHandler': - this.SplitTestSessionHandler, - '../../../../app/src/Features/User/UserController': this.UserController, - '../../../../app/src/Features/Project/ProjectHelper': this.ProjectHelper, - '../../../../app/src/Features/Tags/TagsHandler': this.TagsHandler, - '../../../../app/src/Features/Notifications/NotificationsHandler': - this.NotificationsHandler, - '../../../../app/src/models/User': { User: this.UserModel }, - '../../../../app/src/Features/Project/ProjectGetter': this.ProjectGetter, - '../../../../app/src/Features/Authentication/SessionManager': - this.SessionManager, - '../../../../app/src/infrastructure/Features': this.Features, - '../../../../app/src/Features/User/UserGetter': this.UserGetter, - '../../../../app/src/Features/Subscription/SubscriptionViewModelBuilder': - this.SubscriptionViewModelBuilder, - '../../../../app/src/infrastructure/Modules': this.Modules, - '../../../../app/src/Features/Survey/SurveyHandler': this.SurveyHandler, - '../../../../app/src/Features/User/UserPrimaryEmailCheckHandler': - this.UserPrimaryEmailCheckHandler, - '../../../../app/src/Features/Notifications/NotificationsBuilder': - this.NotificationBuilder, - '../../../../app/src/infrastructure/GeoIpLookup': this.GeoIpLookup, - '../../../../app/src/Features/Tutorial/TutorialHandler': - this.TutorialHandler, - }) - - this.req = { - query: {}, - params: { - Project_id: this.project_id, - }, - headers: {}, - session: { - user: this.user, - }, - body: {}, - i18n: { - translate() {}, - }, - } - this.res = {} - }) - - describe('projectListPage', function () { - beforeEach(function () { - this.projects = [ - { _id: 1, lastUpdated: 1, owner_ref: 'user-1' }, - { - _id: 2, - lastUpdated: 2, - owner_ref: 'user-2', - lastUpdatedBy: 'user-1', - }, - ] - this.readAndWrite = [{ _id: 5, lastUpdated: 5, owner_ref: 'user-1' }] - this.readOnly = [{ _id: 3, lastUpdated: 3, owner_ref: 'user-1' }] - this.tokenReadAndWrite = [{ _id: 6, lastUpdated: 5, owner_ref: 'user-4' }] - this.tokenReadOnly = [{ _id: 7, lastUpdated: 4, owner_ref: 'user-5' }] - this.review = [{ _id: 8, lastUpdated: 4, owner_ref: 'user-6' }] - this.allProjects = { - owned: this.projects, - readAndWrite: this.readAndWrite, - readOnly: this.readOnly, - tokenReadAndWrite: this.tokenReadAndWrite, - tokenReadOnly: this.tokenReadOnly, - review: this.review, - } - - this.ProjectGetter.promises.findAllUsersProjects.resolves( - this.allProjects - ) - }) - - it('should render the project/list-react page', function (done) { - this.res.render = (pageName, opts) => { - pageName.should.equal('project/list-react') - done() - } - this.ProjectListController.projectListPage(this.req, this.res) - }) - - it('should invoke the session maintenance', function (done) { - this.Features.hasFeature.withArgs('saas').returns(true) - this.res.render = () => { - this.SplitTestSessionHandler.promises.sessionMaintenance.should.have.been.calledWith( - this.req, - this.user - ) - done() - } - this.ProjectListController.projectListPage(this.req, this.res) - }) - - it('should send the tags', function (done) { - this.res.render = (pageName, opts) => { - opts.tags.length.should.equal(this.tags.length) - done() - } - this.ProjectListController.projectListPage(this.req, this.res) - }) - - it('should create trigger ip matcher notifications', function (done) { - this.settings.overleaf = true - this.req.ip = '111.111.111.111' - this.res.render = (pageName, opts) => { - this.NotificationBuilder.promises.ipMatcherAffiliation.called.should.equal( - true - ) - done() - } - this.ProjectListController.projectListPage(this.req, this.res) - }) - - it('should send the projects', function (done) { - this.res.render = (pageName, opts) => { - opts.prefetchedProjectsBlob.projects.length.should.equal( - this.projects.length + - this.readAndWrite.length + - this.readOnly.length + - this.tokenReadAndWrite.length + - this.tokenReadOnly.length + - this.review.length - ) - done() - } - this.ProjectListController.projectListPage(this.req, this.res) - }) - - it('should send the user', function (done) { - this.res.render = (pageName, opts) => { - opts.user.should.deep.equal(this.user) - done() - } - this.ProjectListController.projectListPage(this.req, this.res) - }) - - it('should inject the users', function (done) { - this.res.render = (pageName, opts) => { - const projects = opts.prefetchedProjectsBlob.projects - - projects - .filter(p => p.id === '1')[0] - .owner.firstName.should.equal( - this.users[this.projects.filter(p => p._id === 1)[0].owner_ref] - .first_name - ) - projects - .filter(p => p.id === '2')[0] - .owner.firstName.should.equal( - this.users[this.projects.filter(p => p._id === 2)[0].owner_ref] - .first_name - ) - projects - .filter(p => p.id === '2')[0] - .lastUpdatedBy.firstName.should.equal( - this.users[this.projects.filter(p => p._id === 2)[0].lastUpdatedBy] - .first_name - ) - done() - } - this.ProjectListController.projectListPage(this.req, this.res) - }) - - it("should send the user's best subscription when saas feature present", function (done) { - this.Features.hasFeature.withArgs('saas').returns(true) - this.res.render = (pageName, opts) => { - expect(opts.usersBestSubscription).to.deep.include({ type: 'free' }) - done() - } - this.ProjectListController.projectListPage(this.req, this.res) - }) - - it('should not return a best subscription without saas feature', function (done) { - this.Features.hasFeature.withArgs('saas').returns(false) - this.res.render = (pageName, opts) => { - expect(opts.usersBestSubscription).to.be.undefined - done() - } - this.ProjectListController.projectListPage(this.req, this.res) - }) - - it('should show INR Banner for Indian users with free account', function (done) { - // usersBestSubscription is only available when saas feature is present - this.Features.hasFeature.withArgs('saas').returns(true) - this.SubscriptionViewModelBuilder.promises.getUsersSubscriptionDetails.resolves( - { - bestSubscription: { - type: 'free', - }, - } - ) - this.GeoIpLookup.promises.getCurrencyCode.resolves({ - countryCode: 'IN', - }) - this.res.render = (pageName, opts) => { - expect(opts.showInrGeoBanner).to.be.true - done() - } - this.ProjectListController.projectListPage(this.req, this.res) - }) - - it('should not show INR Banner for Indian users with premium account', function (done) { - // usersBestSubscription is only available when saas feature is present - this.Features.hasFeature.withArgs('saas').returns(true) - this.SubscriptionViewModelBuilder.promises.getUsersSubscriptionDetails.resolves( - { - bestSubscription: { - type: 'individual', - }, - } - ) - this.GeoIpLookup.promises.getCurrencyCode.resolves({ - countryCode: 'IN', - }) - this.res.render = (pageName, opts) => { - expect(opts.showInrGeoBanner).to.be.false - done() - } - this.ProjectListController.projectListPage(this.req, this.res) - }) - - describe('With Institution SSO feature', function () { - beforeEach(function (done) { - this.institutionEmail = 'test@overleaf.com' - this.institutionName = 'Overleaf' - this.Features.hasFeature.withArgs('saml').returns(true) - this.Features.hasFeature.withArgs('affiliations').returns(true) - this.Features.hasFeature.withArgs('saas').returns(true) - done() - }) - it('should show institution SSO available notification for confirmed domains', function () { - this.UserGetter.promises.getUserFullEmails.resolves([ - { - email: 'test@overleaf.com', - affiliation: { - institution: { - id: 1, - confirmed: true, - name: 'Overleaf', - ssoBeta: false, - ssoEnabled: true, - }, - }, - }, - ]) - this.res.render = (pageName, opts) => { - expect(opts.notificationsInstitution).to.deep.include({ - email: this.institutionEmail, - institutionId: 1, - institutionName: this.institutionName, - templateKey: 'notification_institution_sso_available', - }) - } - this.ProjectListController.projectListPage(this.req, this.res) - }) - it('should show a linked notification', function () { - this.req.session.saml = { - institutionEmail: this.institutionEmail, - linked: { - hasEntitlement: false, - universityName: this.institutionName, - }, - } - this.res.render = (pageName, opts) => { - expect(opts.notificationsInstitution).to.deep.include({ - email: this.institutionEmail, - institutionName: this.institutionName, - templateKey: 'notification_institution_sso_linked', - }) - } - this.ProjectListController.projectListPage(this.req, this.res) - }) - it('should show a linked another email notification', function () { - // when they request to link an email but the institution returns - // a different email - this.res.render = (pageName, opts) => { - expect(opts.notificationsInstitution).to.deep.include({ - institutionEmail: this.institutionEmail, - requestedEmail: 'requested@overleaf.com', - templateKey: 'notification_institution_sso_non_canonical', - }) - } - this.req.session.saml = { - emailNonCanonical: this.institutionEmail, - institutionEmail: this.institutionEmail, - requestedEmail: 'requested@overleaf.com', - linked: { - hasEntitlement: false, - universityName: this.institutionName, - }, - } - this.ProjectListController.projectListPage(this.req, this.res) - }) - - it('should show a notification when intent was to register via SSO but account existed', function () { - this.res.render = (pageName, opts) => { - expect(opts.notificationsInstitution).to.deep.include({ - email: this.institutionEmail, - templateKey: 'notification_institution_sso_already_registered', - }) - } - this.req.session.saml = { - institutionEmail: this.institutionEmail, - linked: { - hasEntitlement: false, - universityName: 'Overleaf', - }, - registerIntercept: { - id: 1, - name: 'Example University', - }, - } - this.ProjectListController.projectListPage(this.req, this.res) - }) - - it('should not show a register notification if the flow was abandoned', function () { - // could initially start to register with an SSO email and then - // abandon flow and login with an existing non-institution SSO email - this.res.render = (pageName, opts) => { - expect(opts.notificationsInstitution).to.deep.not.include({ - email: 'test@overleaf.com', - templateKey: 'notification_institution_sso_already_registered', - }) - } - this.req.session.saml = { - registerIntercept: { - id: 1, - name: 'Example University', - }, - } - this.ProjectListController.projectListPage(this.req, this.res) - }) - - it('should show error notification', function () { - this.res.render = (pageName, opts) => { - expect(opts.notificationsInstitution.length).to.equal(1) - expect(opts.notificationsInstitution[0].templateKey).to.equal( - 'notification_institution_sso_error' - ) - expect(opts.notificationsInstitution[0].error).to.be.instanceof( - Errors.SAMLAlreadyLinkedError - ) - } - this.req.session.saml = { - institutionEmail: this.institutionEmail, - error: new Errors.SAMLAlreadyLinkedError(), - } - this.ProjectListController.projectListPage(this.req, this.res) - }) - - describe('for an unconfirmed domain for an SSO institution', function () { - beforeEach(function (done) { - this.UserGetter.promises.getUserFullEmails.resolves([ - { - email: 'test@overleaf-uncofirmed.com', - affiliation: { - institution: { - id: 1, - confirmed: false, - name: 'Overleaf', - ssoBeta: false, - ssoEnabled: true, - }, - }, - }, - ]) - done() - }) - it('should not show institution SSO available notification', function () { - this.res.render = (pageName, opts) => { - expect(opts.notificationsInstitution.length).to.equal(0) - } - this.ProjectListController.projectListPage(this.req, this.res) - }) - }) - describe('when linking/logging in initiated on institution side', function () { - it('should not show a linked another email notification', function () { - // this is only used when initated on Overleaf, - // because we keep track of the requested email they tried to link - this.res.render = (pageName, opts) => { - expect(opts.notificationsInstitution).to.not.deep.include({ - institutionEmail: this.institutionEmail, - requestedEmail: undefined, - templateKey: 'notification_institution_sso_non_canonical', - }) - } - this.req.session.saml = { - emailNonCanonical: this.institutionEmail, - institutionEmail: this.institutionEmail, - linked: { - hasEntitlement: false, - universityName: this.institutionName, - }, - } - this.ProjectListController.projectListPage(this.req, this.res) - }) - }) - describe('Institution with SSO beta testable', function () { - beforeEach(function (done) { - this.UserGetter.promises.getUserFullEmails.resolves([ - { - email: 'beta@beta.com', - affiliation: { - institution: { - id: 2, - confirmed: true, - name: 'Beta University', - ssoBeta: true, - ssoEnabled: false, - }, - }, - }, - ]) - done() - }) - it('should show institution SSO available notification when on a beta testing session', function () { - this.req.session.samlBeta = true - this.res.render = (pageName, opts) => { - expect(opts.notificationsInstitution).to.deep.include({ - email: 'beta@beta.com', - institutionId: 2, - institutionName: 'Beta University', - templateKey: 'notification_institution_sso_available', - }) - } - this.ProjectListController.projectListPage(this.req, this.res) - }) - it('should not show institution SSO available notification when not on a beta testing session', function () { - this.req.session.samlBeta = false - this.res.render = (pageName, opts) => { - expect(opts.notificationsInstitution).to.deep.not.include({ - email: 'test@overleaf.com', - institutionId: 1, - institutionName: 'Overleaf', - templateKey: 'notification_institution_sso_available', - }) - } - this.ProjectListController.projectListPage(this.req, this.res) - }) - }) - }) - - describe('Without Institution SSO feature', function () { - beforeEach(function (done) { - this.Features.hasFeature.withArgs('saml').returns(false) - done() - }) - it('should not show institution sso available notification', function () { - this.res.render = (pageName, opts) => { - expect(opts.notificationsInstitution).to.deep.not.include({ - email: 'test@overleaf.com', - institutionId: 1, - institutionName: 'Overleaf', - templateKey: 'notification_institution_sso_available', - }) - } - this.ProjectListController.projectListPage(this.req, this.res) - }) - }) - - describe('enterprise banner', function () { - beforeEach(function (done) { - this.Features.hasFeature.withArgs('saas').returns(true) - this.SubscriptionViewModelBuilder.promises.getUsersSubscriptionDetails.resolves( - { memberGroupSubscriptions: [] } - ) - this.UserGetter.promises.getUserFullEmails.resolves([ - { - email: 'test@test-domain.com', - }, - ]) - - done() - }) - - describe('normal enterprise banner', function () { - it('shows banner', function () { - this.res.render = (pageName, opts) => { - expect(opts.showGroupsAndEnterpriseBanner).to.be.true - } - this.ProjectListController.projectListPage(this.req, this.res) - }) - - it('does not show banner if user is part of any affiliation', function () { - this.UserGetter.promises.getUserFullEmails.resolves([ - { - email: 'test@overleaf.com', - affiliation: { - licence: 'pro_plus', - institution: { - id: 1, - confirmed: true, - name: 'Overleaf', - ssoBeta: false, - ssoEnabled: true, - }, - }, - }, - ]) - - this.res.render = (pageName, opts) => { - expect(opts.showGroupsAndEnterpriseBanner).to.be.false - } - this.ProjectListController.projectListPage(this.req, this.res) - }) - - it('does not show banner if user is part of any group subscription', function () { - this.SubscriptionViewModelBuilder.promises.getUsersSubscriptionDetails.resolves( - { memberGroupSubscriptions: [{}] } - ) - - this.res.render = (pageName, opts) => { - expect(opts.showGroupsAndEnterpriseBanner).to.be.false - } - this.ProjectListController.projectListPage(this.req, this.res) - }) - - it('have a banner variant of "FOMO" or "on-premise"', function () { - this.res.render = (pageName, opts) => { - expect(opts.groupsAndEnterpriseBannerVariant).to.be.oneOf([ - 'FOMO', - 'on-premise', - ]) - } - this.ProjectListController.projectListPage(this.req, this.res) - }) - }) - - describe('US government enterprise banner', function () { - it('does not show enterprise banner if US government enterprise banner is shown', function () { - const emails = [ - { - email: 'test@test.mil', - confirmedAt: new Date('2024-01-01'), - }, - ] - - this.UserGetter.promises.getUserFullEmails.resolves(emails) - this.Modules.promises.hooks.fire - .withArgs('getUSGovBanner', emails, false, []) - .resolves([ - { - showUSGovBanner: true, - usGovBannerVariant: 'variant', - }, - ]) - this.res.render = (pageName, opts) => { - expect(opts.showGroupsAndEnterpriseBanner).to.be.false - expect(opts.showUSGovBanner).to.be.true - } - this.ProjectListController.projectListPage(this.req, this.res) - }) - }) - }) - }) - - describe('projectListReactPage with duplicate projects', function () { - beforeEach(function () { - this.projects = [ - { _id: 1, lastUpdated: 1, owner_ref: 'user-1' }, - { _id: 2, lastUpdated: 2, owner_ref: 'user-2' }, - ] - this.readAndWrite = [{ _id: 5, lastUpdated: 5, owner_ref: 'user-1' }] - this.readOnly = [{ _id: 3, lastUpdated: 3, owner_ref: 'user-1' }] - this.tokenReadAndWrite = [{ _id: 6, lastUpdated: 5, owner_ref: 'user-4' }] - this.tokenReadOnly = [ - { _id: 6, lastUpdated: 5, owner_ref: 'user-4' }, // Also in tokenReadAndWrite - { _id: 7, lastUpdated: 4, owner_ref: 'user-5' }, - ] - this.review = [{ _id: 8, lastUpdated: 5, owner_ref: 'user-6' }] - this.allProjects = { - owned: this.projects, - readAndWrite: this.readAndWrite, - readOnly: this.readOnly, - tokenReadAndWrite: this.tokenReadAndWrite, - tokenReadOnly: this.tokenReadOnly, - review: this.review, - } - - this.ProjectGetter.promises.findAllUsersProjects.resolves( - this.allProjects - ) - }) - - it('should render the project/list-react page', function (done) { - this.res.render = (pageName, opts) => { - pageName.should.equal('project/list-react') - done() - } - this.ProjectListController.projectListPage(this.req, this.res) - }) - - it('should omit one of the projects', function (done) { - this.res.render = (pageName, opts) => { - opts.prefetchedProjectsBlob.projects.length.should.equal( - this.projects.length + - this.readAndWrite.length + - this.readOnly.length + - this.tokenReadAndWrite.length + - this.tokenReadOnly.length + - this.review.length - - 1 - ) - done() - } - this.ProjectListController.projectListPage(this.req, this.res) - }) - }) -}) diff --git a/services/web/test/unit/src/Referal/ReferalConnect.test.mjs b/services/web/test/unit/src/Referal/ReferalConnect.test.mjs new file mode 100644 index 0000000000..33e6c6816e --- /dev/null +++ b/services/web/test/unit/src/Referal/ReferalConnect.test.mjs @@ -0,0 +1,153 @@ +const modulePath = new URL( + '../../../../app/src/Features/Referal/ReferalConnect.mjs', + import.meta.url +).pathname + +describe('Referal connect middle wear', function () { + beforeEach(async function (ctx) { + ctx.connect = (await import(modulePath)).default + }) + + it('should take a referal query string and put it on the session if it exists', function (ctx) { + return new Promise(resolve => { + const req = { + query: { referal: '12345' }, + session: {}, + } + ctx.connect.use(req, {}, () => { + req.session.referal_id.should.equal(req.query.referal) + resolve() + }) + }) + }) + + it('should not change the referal_id on the session if not in query', function (ctx) { + return new Promise(resolve => { + const req = { + query: {}, + session: { referal_id: 'same' }, + } + ctx.connect.use(req, {}, () => { + req.session.referal_id.should.equal('same') + resolve() + }) + }) + }) + + it('should take a facebook referal query string and put it on the session if it exists', function (ctx) { + return new Promise(resolve => { + const req = { + query: { fb_ref: '12345' }, + session: {}, + } + ctx.connect.use(req, {}, () => { + req.session.referal_id.should.equal(req.query.fb_ref) + resolve() + }) + }) + }) + + it('should map the facebook medium into the session', function (ctx) { + return new Promise(resolve => { + const req = { + query: { rm: 'fb' }, + session: {}, + } + ctx.connect.use(req, {}, () => { + req.session.referal_medium.should.equal('facebook') + resolve() + }) + }) + }) + + it('should map the twitter medium into the session', function (ctx) { + return new Promise(resolve => { + const req = { + query: { rm: 't' }, + session: {}, + } + ctx.connect.use(req, {}, () => { + req.session.referal_medium.should.equal('twitter') + resolve() + }) + }) + }) + + it('should map the google plus medium into the session', function (ctx) { + return new Promise(resolve => { + const req = { + query: { rm: 'gp' }, + session: {}, + } + ctx.connect.use(req, {}, () => { + req.session.referal_medium.should.equal('google_plus') + resolve() + }) + }) + }) + + it('should map the email medium into the session', function (ctx) { + return new Promise(resolve => { + const req = { + query: { rm: 'e' }, + session: {}, + } + ctx.connect.use(req, {}, () => { + req.session.referal_medium.should.equal('email') + resolve() + }) + }) + }) + + it('should map the direct medium into the session', function (ctx) { + return new Promise(resolve => { + const req = { + query: { rm: 'd' }, + session: {}, + } + ctx.connect.use(req, {}, () => { + req.session.referal_medium.should.equal('direct') + resolve() + }) + }) + }) + + it('should map the bonus source into the session', function (ctx) { + return new Promise(resolve => { + const req = { + query: { rs: 'b' }, + session: {}, + } + ctx.connect.use(req, {}, () => { + req.session.referal_source.should.equal('bonus') + resolve() + }) + }) + }) + + it('should map the public share source into the session', function (ctx) { + return new Promise(resolve => { + const req = { + query: { rs: 'ps' }, + session: {}, + } + ctx.connect.use(req, {}, () => { + req.session.referal_source.should.equal('public_share') + resolve() + }) + }) + }) + + it('should map the collaborator invite into the session', function (ctx) { + return new Promise(resolve => { + const req = { + query: { rs: 'ci' }, + session: {}, + } + ctx.connect.use(req, {}, () => { + req.session.referal_source.should.equal('collaborator_invite') + resolve() + }) + }) + }) +}) diff --git a/services/web/test/unit/src/Referal/ReferalConnectTests.mjs b/services/web/test/unit/src/Referal/ReferalConnectTests.mjs deleted file mode 100644 index c6e56c3c6a..0000000000 --- a/services/web/test/unit/src/Referal/ReferalConnectTests.mjs +++ /dev/null @@ -1,132 +0,0 @@ -import esmock from 'esmock' -const modulePath = new URL( - '../../../../app/src/Features/Referal/ReferalConnect.mjs', - import.meta.url -).pathname - -describe('Referal connect middle wear', function () { - beforeEach(async function () { - this.connect = await esmock.strict(modulePath, {}) - }) - - it('should take a referal query string and put it on the session if it exists', function (done) { - const req = { - query: { referal: '12345' }, - session: {}, - } - this.connect.use(req, {}, () => { - req.session.referal_id.should.equal(req.query.referal) - done() - }) - }) - - it('should not change the referal_id on the session if not in query', function (done) { - const req = { - query: {}, - session: { referal_id: 'same' }, - } - this.connect.use(req, {}, () => { - req.session.referal_id.should.equal('same') - done() - }) - }) - - it('should take a facebook referal query string and put it on the session if it exists', function (done) { - const req = { - query: { fb_ref: '12345' }, - session: {}, - } - this.connect.use(req, {}, () => { - req.session.referal_id.should.equal(req.query.fb_ref) - done() - }) - }) - - it('should map the facebook medium into the session', function (done) { - const req = { - query: { rm: 'fb' }, - session: {}, - } - this.connect.use(req, {}, () => { - req.session.referal_medium.should.equal('facebook') - done() - }) - }) - - it('should map the twitter medium into the session', function (done) { - const req = { - query: { rm: 't' }, - session: {}, - } - this.connect.use(req, {}, () => { - req.session.referal_medium.should.equal('twitter') - done() - }) - }) - - it('should map the google plus medium into the session', function (done) { - const req = { - query: { rm: 'gp' }, - session: {}, - } - this.connect.use(req, {}, () => { - req.session.referal_medium.should.equal('google_plus') - done() - }) - }) - - it('should map the email medium into the session', function (done) { - const req = { - query: { rm: 'e' }, - session: {}, - } - this.connect.use(req, {}, () => { - req.session.referal_medium.should.equal('email') - done() - }) - }) - - it('should map the direct medium into the session', function (done) { - const req = { - query: { rm: 'd' }, - session: {}, - } - this.connect.use(req, {}, () => { - req.session.referal_medium.should.equal('direct') - done() - }) - }) - - it('should map the bonus source into the session', function (done) { - const req = { - query: { rs: 'b' }, - session: {}, - } - this.connect.use(req, {}, () => { - req.session.referal_source.should.equal('bonus') - done() - }) - }) - - it('should map the public share source into the session', function (done) { - const req = { - query: { rs: 'ps' }, - session: {}, - } - this.connect.use(req, {}, () => { - req.session.referal_source.should.equal('public_share') - done() - }) - }) - - it('should map the collaborator invite into the session', function (done) { - const req = { - query: { rs: 'ci' }, - session: {}, - } - this.connect.use(req, {}, () => { - req.session.referal_source.should.equal('collaborator_invite') - done() - }) - }) -}) diff --git a/services/web/test/unit/src/Referal/ReferalController.test.mjs b/services/web/test/unit/src/Referal/ReferalController.test.mjs new file mode 100644 index 0000000000..383902946f --- /dev/null +++ b/services/web/test/unit/src/Referal/ReferalController.test.mjs @@ -0,0 +1,7 @@ +const modulePath = '../../../../app/src/Features/Referal/ReferalController.js' + +describe.todo('Referal controller', function () { + beforeEach(async function (ctx) { + ctx.controller = (await import(modulePath)).default + }) +}) diff --git a/services/web/test/unit/src/Referal/ReferalControllerTests.mjs b/services/web/test/unit/src/Referal/ReferalControllerTests.mjs deleted file mode 100644 index 523fd23728..0000000000 --- a/services/web/test/unit/src/Referal/ReferalControllerTests.mjs +++ /dev/null @@ -1,11 +0,0 @@ -import esmock from 'esmock' -const modulePath = new URL( - '../../../../app/src/Features/Referal/ReferalController.js', - import.meta.url -).pathname - -describe('Referal controller', function () { - beforeEach(async function () { - this.controller = await esmock.strict(modulePath, {}) - }) -}) diff --git a/services/web/test/unit/src/Referal/ReferalHandlerTests.mjs b/services/web/test/unit/src/Referal/ReferalHandler.test.mjs similarity index 63% rename from services/web/test/unit/src/Referal/ReferalHandlerTests.mjs rename to services/web/test/unit/src/Referal/ReferalHandler.test.mjs index 6fd58a6569..5c042f2ef9 100644 --- a/services/web/test/unit/src/Referal/ReferalHandlerTests.mjs +++ b/services/web/test/unit/src/Referal/ReferalHandler.test.mjs @@ -1,88 +1,85 @@ -import esmock from 'esmock' -import { expect } from 'chai' +import { expect, vi } from 'vitest' import sinon from 'sinon' -const modulePath = new URL( - '../../../../app/src/Features/Referal/ReferalHandler.mjs', - import.meta.url -).pathname +const modulePath = '../../../../app/src/Features/Referal/ReferalHandler.mjs' describe('Referal handler', function () { - beforeEach(async function () { - this.User = { + beforeEach(async function (ctx) { + ctx.User = { findById: sinon.stub().returns({ exec: sinon.stub(), }), } - this.handler = await esmock.strict(modulePath, { - '../../../../app/src/models/User': { - User: this.User, - }, - }) - this.user_id = '12313' + + vi.doMock('../../../../app/src/models/User', () => ({ + User: ctx.User, + })) + + ctx.handler = (await import(modulePath)).default + ctx.user_id = '12313' }) describe('getting refered user_ids', function () { - it('should get the user from mongo and return the refered users array', async function () { + it('should get the user from mongo and return the refered users array', async function (ctx) { const user = { refered_users: ['1234', '312312', '3213129'], refered_user_count: 3, } - this.User.findById.returns({ + ctx.User.findById.returns({ exec: sinon.stub().resolves(user), }) const { referedUsers: passedReferedUserIds, referedUserCount: passedReferedUserCount, - } = await this.handler.promises.getReferedUsers(this.user_id) + } = await ctx.handler.promises.getReferedUsers(ctx.user_id) passedReferedUserIds.should.deep.equal(user.refered_users) passedReferedUserCount.should.equal(3) }) - it('should return an empty array if it is not set', async function () { + it('should return an empty array if it is not set', async function (ctx) { const user = {} - this.User.findById.returns({ + ctx.User.findById.returns({ exec: sinon.stub().resolves(user), }) const { referedUsers: passedReferedUserIds } = - await this.handler.promises.getReferedUsers(this.user_id) + await ctx.handler.promises.getReferedUsers(ctx.user_id) passedReferedUserIds.length.should.equal(0) }) - it('should return a zero count if neither it or the array are set', async function () { + it('should return a zero count if neither it or the array are set', async function (ctx) { const user = {} - this.User.findById.returns({ + ctx.User.findById.returns({ exec: sinon.stub().resolves(user), }) const { referedUserCount: passedReferedUserCount } = - await this.handler.promises.getReferedUsers(this.user_id) + await ctx.handler.promises.getReferedUsers(ctx.user_id) passedReferedUserCount.should.equal(0) }) - it('should return the array length if count is not set', async function () { + it('should return the array length if count is not set', async function (ctx) { const user = { refered_users: ['1234', '312312', '3213129'] } - this.User.findById.returns({ + ctx.User.findById.returns({ exec: sinon.stub().resolves(user), }) const { referedUserCount: passedReferedUserCount } = - await this.handler.promises.getReferedUsers(this.user_id) + await ctx.handler.promises.getReferedUsers(ctx.user_id) passedReferedUserCount.should.equal(3) }) - it('should error if finding the user fails', async function () { - this.User.findById.returns({ + it('should error if finding the user fails', async function (ctx) { + ctx.User.findById.returns({ exec: sinon.stub().rejects(new Error('user not found')), }) expect( - this.handler.promises.getReferedUsers(this.user_id) + ctx.handler.promises.getReferedUsers(ctx.user_id) ).to.be.rejectedWith('user not found') }) }) diff --git a/services/web/test/unit/src/References/ReferencesController.test.mjs b/services/web/test/unit/src/References/ReferencesController.test.mjs new file mode 100644 index 0000000000..679e835840 --- /dev/null +++ b/services/web/test/unit/src/References/ReferencesController.test.mjs @@ -0,0 +1,213 @@ +import { vi } from 'vitest' +import sinon from 'sinon' +import MockRequest from '../helpers/MockRequest.js' +import MockResponse from '../helpers/MockResponse.js' +const modulePath = + '../../../../app/src/Features/References/ReferencesController' + +describe('ReferencesController', function () { + beforeEach(async function (ctx) { + ctx.projectId = '2222' + + vi.doMock('@overleaf/settings', () => ({ + default: (ctx.settings = { + apis: { web: { url: 'http://some.url' } }, + }), + })) + + vi.doMock( + '../../../../app/src/Features/References/ReferencesHandler', + () => ({ + default: (ctx.ReferencesHandler = { + index: sinon.stub(), + indexAll: sinon.stub(), + }), + }) + ) + + vi.doMock( + '../../../../app/src/Features/Editor/EditorRealTimeController', + () => ({ + default: (ctx.EditorRealTimeController = { + emitToRoom: sinon.stub(), + }), + }) + ) + + ctx.controller = (await import(modulePath)).default + ctx.req = new MockRequest() + ctx.req.params.Project_id = ctx.projectId + ctx.req.body = { + docIds: (ctx.docIds = ['aaa', 'bbb']), + shouldBroadcast: false, + } + ctx.res = new MockResponse() + ctx.res.json = sinon.stub() + ctx.res.sendStatus = sinon.stub() + ctx.next = sinon.stub() + ctx.fakeResponseData = { + projectId: ctx.projectId, + keys: ['one', 'two', 'three'], + } + }) + + describe('indexAll', function () { + beforeEach(function (ctx) { + ctx.req.body = { shouldBroadcast: false } + ctx.ReferencesHandler.indexAll.callsArgWith(1, null, ctx.fakeResponseData) + ctx.call = callback => { + ctx.controller.indexAll(ctx.req, ctx.res, ctx.next) + return callback() + } + }) + + it('should not produce an error', function (ctx) { + return new Promise(resolve => { + ctx.call(() => { + ctx.res.sendStatus.callCount.should.equal(0) + ctx.res.sendStatus.calledWith(500).should.equal(false) + ctx.res.sendStatus.calledWith(400).should.equal(false) + resolve() + }) + }) + }) + + it('should return data', function (ctx) { + return new Promise(resolve => { + ctx.call(() => { + ctx.res.json.callCount.should.equal(1) + ctx.res.json.calledWith(ctx.fakeResponseData).should.equal(true) + resolve() + }) + }) + }) + + it('should call ReferencesHandler.indexAll', function (ctx) { + return new Promise(resolve => { + ctx.call(() => { + ctx.ReferencesHandler.indexAll.callCount.should.equal(1) + ctx.ReferencesHandler.indexAll + .calledWith(ctx.projectId) + .should.equal(true) + resolve() + }) + }) + }) + + describe('when shouldBroadcast is true', function () { + beforeEach(function (ctx) { + ctx.ReferencesHandler.index.callsArgWith(2, null, ctx.fakeResponseData) + ctx.req.body.shouldBroadcast = true + }) + + it('should call EditorRealTimeController.emitToRoom', function (ctx) { + return new Promise(resolve => { + ctx.call(() => { + ctx.EditorRealTimeController.emitToRoom.callCount.should.equal(1) + resolve() + }) + }) + }) + + it('should not produce an error', function (ctx) { + return new Promise(resolve => { + ctx.call(() => { + ctx.res.sendStatus.callCount.should.equal(0) + ctx.res.sendStatus.calledWith(500).should.equal(false) + ctx.res.sendStatus.calledWith(400).should.equal(false) + resolve() + }) + }) + }) + + it('should still return data', function (ctx) { + return new Promise(resolve => { + ctx.call(() => { + ctx.res.json.callCount.should.equal(1) + ctx.res.json.calledWith(ctx.fakeResponseData).should.equal(true) + resolve() + }) + }) + }) + }) + + describe('when shouldBroadcast is false', function () { + beforeEach(function (ctx) { + ctx.ReferencesHandler.index.callsArgWith(2, null, ctx.fakeResponseData) + ctx.req.body.shouldBroadcast = false + }) + + it('should not call EditorRealTimeController.emitToRoom', function (ctx) { + return new Promise(resolve => { + ctx.call(() => { + ctx.EditorRealTimeController.emitToRoom.callCount.should.equal(0) + resolve() + }) + }) + }) + + it('should not produce an error', function (ctx) { + return new Promise(resolve => { + ctx.call(() => { + ctx.res.sendStatus.callCount.should.equal(0) + ctx.res.sendStatus.calledWith(500).should.equal(false) + ctx.res.sendStatus.calledWith(400).should.equal(false) + resolve() + }) + }) + }) + + it('should still return data', function (ctx) { + return new Promise(resolve => { + ctx.call(() => { + ctx.res.json.callCount.should.equal(1) + ctx.res.json.calledWith(ctx.fakeResponseData).should.equal(true) + resolve() + }) + }) + }) + }) + }) + + describe('there is no data', function () { + beforeEach(function (ctx) { + ctx.ReferencesHandler.indexAll.callsArgWith(1) + ctx.call = callback => { + ctx.controller.indexAll(ctx.req, ctx.res, ctx.next) + callback() + } + }) + + it('should not call EditorRealTimeController.emitToRoom', function (ctx) { + return new Promise(resolve => { + ctx.call(() => { + ctx.EditorRealTimeController.emitToRoom.callCount.should.equal(0) + resolve() + }) + }) + }) + + it('should not produce an error', function (ctx) { + return new Promise(resolve => { + ctx.call(() => { + ctx.res.sendStatus.callCount.should.equal(0) + ctx.res.sendStatus.calledWith(500).should.equal(false) + ctx.res.sendStatus.calledWith(400).should.equal(false) + resolve() + }) + }) + }) + + it('should send a response with an empty keys list', function (ctx) { + return new Promise(resolve => { + ctx.call(() => { + ctx.res.json.called.should.equal(true) + ctx.res.json + .calledWith({ projectId: ctx.projectId, keys: [] }) + .should.equal(true) + resolve() + }) + }) + }) + }) +}) diff --git a/services/web/test/unit/src/References/ReferencesControllerTests.mjs b/services/web/test/unit/src/References/ReferencesControllerTests.mjs deleted file mode 100644 index fca2acea12..0000000000 --- a/services/web/test/unit/src/References/ReferencesControllerTests.mjs +++ /dev/null @@ -1,188 +0,0 @@ -import esmock from 'esmock' -import sinon from 'sinon' -import MockRequest from '../helpers/MockRequest.js' -import MockResponse from '../helpers/MockResponse.js' -const modulePath = - '../../../../app/src/Features/References/ReferencesController' - -describe('ReferencesController', function () { - beforeEach(async function () { - this.projectId = '2222' - this.controller = await esmock.strict(modulePath, { - '@overleaf/settings': (this.settings = { - apis: { web: { url: 'http://some.url' } }, - }), - '../../../../app/src/Features/References/ReferencesHandler': - (this.ReferencesHandler = { - index: sinon.stub(), - indexAll: sinon.stub(), - }), - '../../../../app/src/Features/Editor/EditorRealTimeController': - (this.EditorRealTimeController = { - emitToRoom: sinon.stub(), - }), - }) - this.req = new MockRequest() - this.req.params.Project_id = this.projectId - this.req.body = { - docIds: (this.docIds = ['aaa', 'bbb']), - shouldBroadcast: false, - } - this.res = new MockResponse() - this.res.json = sinon.stub() - this.res.sendStatus = sinon.stub() - this.next = sinon.stub() - this.fakeResponseData = { - projectId: this.projectId, - keys: ['one', 'two', 'three'], - } - }) - - describe('indexAll', function () { - beforeEach(function () { - this.req.body = { shouldBroadcast: false } - this.ReferencesHandler.indexAll.callsArgWith( - 1, - null, - this.fakeResponseData - ) - this.call = callback => { - this.controller.indexAll(this.req, this.res, this.next) - return callback() - } - }) - - it('should not produce an error', function (done) { - this.call(() => { - this.res.sendStatus.callCount.should.equal(0) - this.res.sendStatus.calledWith(500).should.equal(false) - this.res.sendStatus.calledWith(400).should.equal(false) - done() - }) - }) - - it('should return data', function (done) { - this.call(() => { - this.res.json.callCount.should.equal(1) - this.res.json.calledWith(this.fakeResponseData).should.equal(true) - done() - }) - }) - - it('should call ReferencesHandler.indexAll', function (done) { - this.call(() => { - this.ReferencesHandler.indexAll.callCount.should.equal(1) - this.ReferencesHandler.indexAll - .calledWith(this.projectId) - .should.equal(true) - done() - }) - }) - - describe('when shouldBroadcast is true', function () { - beforeEach(function () { - this.ReferencesHandler.index.callsArgWith( - 2, - null, - this.fakeResponseData - ) - this.req.body.shouldBroadcast = true - }) - - it('should call EditorRealTimeController.emitToRoom', function (done) { - this.call(() => { - this.EditorRealTimeController.emitToRoom.callCount.should.equal(1) - done() - }) - }) - - it('should not produce an error', function (done) { - this.call(() => { - this.res.sendStatus.callCount.should.equal(0) - this.res.sendStatus.calledWith(500).should.equal(false) - this.res.sendStatus.calledWith(400).should.equal(false) - done() - }) - }) - - it('should still return data', function (done) { - this.call(() => { - this.res.json.callCount.should.equal(1) - this.res.json.calledWith(this.fakeResponseData).should.equal(true) - done() - }) - }) - }) - - describe('when shouldBroadcast is false', function () { - beforeEach(function () { - this.ReferencesHandler.index.callsArgWith( - 2, - null, - this.fakeResponseData - ) - this.req.body.shouldBroadcast = false - }) - - it('should not call EditorRealTimeController.emitToRoom', function (done) { - this.call(() => { - this.EditorRealTimeController.emitToRoom.callCount.should.equal(0) - done() - }) - }) - - it('should not produce an error', function (done) { - this.call(() => { - this.res.sendStatus.callCount.should.equal(0) - this.res.sendStatus.calledWith(500).should.equal(false) - this.res.sendStatus.calledWith(400).should.equal(false) - done() - }) - }) - - it('should still return data', function (done) { - this.call(() => { - this.res.json.callCount.should.equal(1) - this.res.json.calledWith(this.fakeResponseData).should.equal(true) - done() - }) - }) - }) - }) - - describe('there is no data', function () { - beforeEach(function () { - this.ReferencesHandler.indexAll.callsArgWith(1) - this.call = callback => { - this.controller.indexAll(this.req, this.res, this.next) - callback() - } - }) - - it('should not call EditorRealTimeController.emitToRoom', function (done) { - this.call(() => { - this.EditorRealTimeController.emitToRoom.callCount.should.equal(0) - done() - }) - }) - - it('should not produce an error', function (done) { - this.call(() => { - this.res.sendStatus.callCount.should.equal(0) - this.res.sendStatus.calledWith(500).should.equal(false) - this.res.sendStatus.calledWith(400).should.equal(false) - done() - }) - }) - - it('should send a response with an empty keys list', function (done) { - this.call(() => { - this.res.json.called.should.equal(true) - this.res.json - .calledWith({ projectId: this.projectId, keys: [] }) - .should.equal(true) - done() - }) - }) - }) -}) diff --git a/services/web/test/unit/src/References/ReferencesHandler.test.mjs b/services/web/test/unit/src/References/ReferencesHandler.test.mjs new file mode 100644 index 0000000000..92666e6bcc --- /dev/null +++ b/services/web/test/unit/src/References/ReferencesHandler.test.mjs @@ -0,0 +1,445 @@ +import { expect, vi } from 'vitest' +import sinon from 'sinon' +import Errors from '../../../../app/src/Features/Errors/Errors.js' +const modulePath = + '../../../../app/src/Features/References/ReferencesHandler.mjs' + +vi.mock('../../../../app/src/Features/Errors/Errors.js', () => + vi.importActual('../../../../app/src/Features/Errors/Errors.js') +) + +describe('ReferencesHandler', function () { + beforeEach(async function (ctx) { + ctx.projectId = '222' + ctx.historyId = 42 + ctx.fakeProject = { + _id: ctx.projectId, + owner_ref: (ctx.fakeOwner = { + _id: 'some_owner', + features: { + references: false, + }, + }), + rootFolder: [ + { + docs: [ + { name: 'one.bib', _id: 'aaa' }, + { name: 'two.txt', _id: 'bbb' }, + ], + folders: [ + { + docs: [{ name: 'three.bib', _id: 'ccc' }], + fileRefs: [ + { name: 'four.bib', _id: 'fff' }, + { name: 'five.bib', _id: 'ggg', hash: 'hash' }, + ], + folders: [], + }, + ], + }, + ], + overleaf: { history: { id: ctx.historyId } }, + } + ctx.docIds = ['aaa', 'ccc'] + + vi.doMock('@overleaf/settings', () => ({ + default: (ctx.settings = { + apis: { + references: { url: 'http://some.url/references' }, + docstore: { url: 'http://some.url/docstore' }, + filestore: { url: 'http://some.url/filestore' }, + project_history: { url: 'http://project-history.local' }, + }, + enableProjectHistoryBlobs: true, + }), + })) + + vi.doMock('request', () => ({ + default: (ctx.request = { + get: sinon.stub(), + post: sinon.stub(), + }), + })) + + vi.doMock('../../../../app/src/Features/Project/ProjectGetter', () => ({ + default: (ctx.ProjectGetter = { + getProject: sinon.stub().callsArgWith(2, null, ctx.fakeProject), + }), + })) + + vi.doMock('../../../../app/src/Features/User/UserGetter', () => ({ + default: (ctx.UserGetter = { + getUser: sinon.stub(), + }), + })) + + vi.doMock( + '../../../../app/src/Features/DocumentUpdater/DocumentUpdaterHandler', + () => ({ + default: (ctx.DocumentUpdaterHandler = { + flushDocToMongo: sinon.stub().callsArgWith(2, null), + }), + }) + ) + + vi.doMock('../../../../app/src/infrastructure/Features', () => ({ + default: (ctx.Features = { + hasFeature: sinon.stub().returns(true), + }), + })) + + ctx.handler = (await import(modulePath)).default + ctx.fakeResponseData = { + projectId: ctx.projectId, + keys: ['k1', 'k2'], + } + }) + + describe('indexAll', function () { + beforeEach(function (ctx) { + sinon.stub(ctx.handler, '_findBibDocIds').returns(['aaa', 'ccc']) + sinon + .stub(ctx.handler, '_findBibFileRefs') + .returns([{ _id: 'fff' }, { _id: 'ggg', hash: 'hash' }]) + sinon.stub(ctx.handler, '_isFullIndex').callsArgWith(1, null, true) + ctx.request.post.callsArgWith( + 1, + null, + { statusCode: 200 }, + ctx.fakeResponseData + ) + return (ctx.call = callback => { + return ctx.handler.indexAll(ctx.projectId, callback) + }) + }) + + it('should call _findBibDocIds', function (ctx) { + return new Promise(resolve => { + return ctx.call((err, data) => { + expect(err).to.be.null + ctx.handler._findBibDocIds.callCount.should.equal(1) + ctx.handler._findBibDocIds + .calledWith(ctx.fakeProject) + .should.equal(true) + return resolve() + }) + }) + }) + + it('should call _findBibFileRefs', function (ctx) { + return new Promise(resolve => { + return ctx.call((err, data) => { + expect(err).to.be.null + ctx.handler._findBibDocIds.callCount.should.equal(1) + ctx.handler._findBibDocIds + .calledWith(ctx.fakeProject) + .should.equal(true) + return resolve() + }) + }) + }) + + it('should call DocumentUpdaterHandler.flushDocToMongo', function (ctx) { + return new Promise(resolve => { + return ctx.call((err, data) => { + expect(err).to.be.null + ctx.DocumentUpdaterHandler.flushDocToMongo.callCount.should.equal(2) + return resolve() + }) + }) + }) + + it('should make a request to references service', function (ctx) { + return new Promise(resolve => { + return ctx.call((err, data) => { + expect(err).to.be.null + ctx.request.post.callCount.should.equal(1) + const arg = ctx.request.post.firstCall.args[0] + expect(arg.json).to.have.all.keys( + 'docUrls', + 'sourceURLs', + 'fullIndex' + ) + expect(arg.json.docUrls.length).to.equal(4) + expect(arg.json.docUrls).to.deep.equal([ + `${ctx.settings.apis.docstore.url}/project/${ctx.projectId}/doc/aaa/raw`, + `${ctx.settings.apis.docstore.url}/project/${ctx.projectId}/doc/ccc/raw`, + `${ctx.settings.apis.filestore.url}/project/${ctx.projectId}/file/fff?from=bibFileUrls`, + `${ctx.settings.apis.filestore.url}/project/${ctx.projectId}/file/ggg?from=bibFileUrls`, + ]) + expect(arg.json.sourceURLs.length).to.equal(4) + expect(arg.json.sourceURLs).to.deep.equal([ + { + url: `${ctx.settings.apis.docstore.url}/project/${ctx.projectId}/doc/aaa/raw`, + }, + { + url: `${ctx.settings.apis.docstore.url}/project/${ctx.projectId}/doc/ccc/raw`, + }, + { + url: `${ctx.settings.apis.filestore.url}/project/${ctx.projectId}/file/fff?from=bibFileUrls`, + }, + { + url: `${ctx.settings.apis.project_history.url}/project/${ctx.historyId}/blob/hash`, + fallbackURL: `${ctx.settings.apis.filestore.url}/project/${ctx.projectId}/file/ggg?from=bibFileUrls`, + }, + ]) + expect(arg.json.fullIndex).to.equal(true) + return resolve() + }) + }) + }) + + it('should not produce an error', function (ctx) { + return new Promise(resolve => { + return ctx.call((err, data) => { + expect(err).to.equal(null) + return resolve() + }) + }) + }) + + it('should return data', function (ctx) { + return new Promise(resolve => { + return ctx.call((err, data) => { + expect(err).to.be.null + expect(data).to.not.equal(null) + expect(data).to.not.equal(undefined) + expect(data).to.equal(ctx.fakeResponseData) + return resolve() + }) + }) + }) + + describe('when ProjectGetter.getProject produces an error', function () { + beforeEach(function (ctx) { + ctx.ProjectGetter.getProject.callsArgWith(2, new Error('woops')) + }) + + it('should produce an error', function (ctx) { + return new Promise(resolve => { + ctx.call((err, data) => { + expect(err).to.not.equal(null) + expect(err).to.be.instanceof(Error) + expect(data).to.equal(undefined) + resolve() + }) + }) + }) + + it('should not send request', function (ctx) { + return new Promise(resolve => { + ctx.call(() => { + ctx.request.post.callCount.should.equal(0) + resolve() + }) + }) + }) + }) + + describe('when ProjectGetter.getProject returns null', function () { + beforeEach(function (ctx) { + ctx.ProjectGetter.getProject.callsArgWith(2, null) + }) + + it('should produce an error', function (ctx) { + return new Promise(resolve => { + ctx.call((err, data) => { + expect(err).to.not.equal(null) + expect(err).to.be.instanceof(Errors.NotFoundError) + expect(data).to.equal(undefined) + resolve() + }) + }) + }) + + it('should not send request', function (ctx) { + return new Promise(resolve => { + ctx.call(() => { + ctx.request.post.callCount.should.equal(0) + resolve() + }) + }) + }) + }) + + describe('when _isFullIndex produces an error', function () { + beforeEach(function (ctx) { + ctx.ProjectGetter.getProject.callsArgWith(2, null, ctx.fakeProject) + ctx.handler._isFullIndex.callsArgWith(1, new Error('woops')) + }) + + it('should produce an error', function (ctx) { + return new Promise(resolve => { + ctx.call((err, data) => { + expect(err).to.not.equal(null) + expect(err).to.be.instanceof(Error) + expect(data).to.equal(undefined) + resolve() + }) + }) + }) + + it('should not send request', function (ctx) { + return new Promise(resolve => { + ctx.call(() => { + ctx.request.post.callCount.should.equal(0) + resolve() + }) + }) + }) + }) + + describe('when flushDocToMongo produces an error', function () { + beforeEach(function (ctx) { + ctx.ProjectGetter.getProject.callsArgWith(2, null, ctx.fakeProject) + ctx.handler._isFullIndex.callsArgWith(1, false) + ctx.DocumentUpdaterHandler.flushDocToMongo.callsArgWith( + 2, + new Error('woops') + ) + }) + + it('should produce an error', function (ctx) { + return new Promise(resolve => { + ctx.call((err, data) => { + expect(err).to.not.equal(null) + expect(err).to.be.instanceof(Error) + expect(data).to.equal(undefined) + resolve() + }) + }) + }) + + it('should not send request', function (ctx) { + return new Promise(resolve => { + ctx.call(() => { + ctx.request.post.callCount.should.equal(0) + resolve() + }) + }) + }) + }) + }) + + describe('_findBibDocIds', function () { + beforeEach(function (ctx) { + ctx.fakeProject = { + rootFolder: [ + { + docs: [ + { name: 'one.bib', _id: 'aaa' }, + { name: 'two.txt', _id: 'bbb' }, + ], + folders: [ + { docs: [{ name: 'three.bib', _id: 'ccc' }], folders: [] }, + ], + }, + ], + } + ctx.expectedIds = ['aaa', 'ccc'] + }) + + it('should select the correct docIds', function (ctx) { + const result = ctx.handler._findBibDocIds(ctx.fakeProject) + expect(result).to.deep.equal(ctx.expectedIds) + }) + + it('should not error with a non array of folders from dirty data', function (ctx) { + ctx.fakeProject.rootFolder[0].folders[0].folders = {} + const result = ctx.handler._findBibDocIds(ctx.fakeProject) + expect(result).to.deep.equal(ctx.expectedIds) + }) + }) + + describe('_findBibFileRefs', function () { + beforeEach(function (ctx) { + ctx.fakeProject = { + rootFolder: [ + { + docs: [ + { name: 'one.bib', _id: 'aaa' }, + { name: 'two.txt', _id: 'bbb' }, + ], + fileRefs: [{ name: 'other.bib', _id: 'ddd' }], + folders: [ + { + docs: [{ name: 'three.bib', _id: 'ccc' }], + fileRefs: [{ name: 'four.bib', _id: 'ghg' }], + folders: [], + }, + ], + }, + ], + } + ctx.expectedIds = [ + ctx.fakeProject.rootFolder[0].fileRefs[0], + ctx.fakeProject.rootFolder[0].folders[0].fileRefs[0], + ] + }) + + it('should select the correct docIds', function (ctx) { + const result = ctx.handler._findBibFileRefs(ctx.fakeProject) + expect(result).to.deep.equal(ctx.expectedIds) + }) + }) + + describe('_isFullIndex', function () { + beforeEach(function (ctx) { + ctx.fakeProject = { owner_ref: (ctx.owner_ref = 'owner-ref-123') } + ctx.owner = { + features: { + references: false, + }, + } + ctx.UserGetter.getUser = sinon.stub() + ctx.UserGetter.getUser + .withArgs(ctx.owner_ref, { features: true }) + .yields(null, ctx.owner) + ctx.call = callback => { + ctx.handler._isFullIndex(ctx.fakeProject, callback) + } + }) + + describe('with references feature on', function () { + beforeEach(function (ctx) { + ctx.owner.features.references = true + }) + + it('should return true', function (ctx) { + ctx.call((err, isFullIndex) => { + expect(err).to.equal(null) + expect(isFullIndex).to.equal(true) + }) + }) + }) + + describe('with references feature off', function () { + beforeEach(function (ctx) { + ctx.owner.features.references = false + }) + + it('should return false', function (ctx) { + ctx.call((err, isFullIndex) => { + expect(err).to.equal(null) + expect(isFullIndex).to.equal(false) + }) + }) + }) + + describe('with referencesSearch', function () { + beforeEach(function (ctx) { + ctx.owner.features = { + referencesSearch: true, + references: false, + } + }) + + it('should return true', function (ctx) { + ctx.call((err, isFullIndex) => { + expect(err).to.equal(null) + expect(isFullIndex).to.equal(true) + }) + }) + }) + }) +}) diff --git a/services/web/test/unit/src/References/ReferencesHandlerTests.mjs b/services/web/test/unit/src/References/ReferencesHandlerTests.mjs deleted file mode 100644 index 57570dcf12..0000000000 --- a/services/web/test/unit/src/References/ReferencesHandlerTests.mjs +++ /dev/null @@ -1,399 +0,0 @@ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -import esmock from 'esmock' - -import { expect } from 'chai' -import sinon from 'sinon' -import Errors from '../../../../app/src/Features/Errors/Errors.js' -const modulePath = - '../../../../app/src/Features/References/ReferencesHandler.mjs' - -describe('ReferencesHandler', function () { - beforeEach(async function () { - this.projectId = '222' - this.historyId = 42 - this.fakeProject = { - _id: this.projectId, - owner_ref: (this.fakeOwner = { - _id: 'some_owner', - features: { - references: false, - }, - }), - rootFolder: [ - { - docs: [ - { name: 'one.bib', _id: 'aaa' }, - { name: 'two.txt', _id: 'bbb' }, - ], - folders: [ - { - docs: [{ name: 'three.bib', _id: 'ccc' }], - fileRefs: [ - { name: 'four.bib', _id: 'fff' }, - { name: 'five.bib', _id: 'ggg', hash: 'hash' }, - ], - folders: [], - }, - ], - }, - ], - overleaf: { history: { id: this.historyId } }, - } - this.docIds = ['aaa', 'ccc'] - this.handler = await esmock.strict(modulePath, { - '@overleaf/settings': (this.settings = { - apis: { - references: { url: 'http://some.url/references' }, - docstore: { url: 'http://some.url/docstore' }, - filestore: { url: 'http://some.url/filestore' }, - project_history: { url: 'http://project-history.local' }, - }, - enableProjectHistoryBlobs: true, - }), - request: (this.request = { - get: sinon.stub(), - post: sinon.stub(), - }), - '../../../../app/src/Features/Project/ProjectGetter': - (this.ProjectGetter = { - getProject: sinon.stub().callsArgWith(2, null, this.fakeProject), - }), - '../../../../app/src/Features/User/UserGetter': (this.UserGetter = { - getUser: sinon.stub(), - }), - '../../../../app/src/Features/DocumentUpdater/DocumentUpdaterHandler': - (this.DocumentUpdaterHandler = { - flushDocToMongo: sinon.stub().callsArgWith(2, null), - }), - '../../../../app/src/infrastructure/Features': (this.Features = { - hasFeature: sinon.stub().returns(true), - }), - }) - this.fakeResponseData = { - projectId: this.projectId, - keys: ['k1', 'k2'], - } - }) - - describe('indexAll', function () { - beforeEach(function () { - sinon.stub(this.handler, '_findBibDocIds').returns(['aaa', 'ccc']) - sinon - .stub(this.handler, '_findBibFileRefs') - .returns([{ _id: 'fff' }, { _id: 'ggg', hash: 'hash' }]) - sinon.stub(this.handler, '_isFullIndex').callsArgWith(1, null, true) - this.request.post.callsArgWith( - 1, - null, - { statusCode: 200 }, - this.fakeResponseData - ) - return (this.call = callback => { - return this.handler.indexAll(this.projectId, callback) - }) - }) - - it('should call _findBibDocIds', function (done) { - return this.call((err, data) => { - expect(err).to.be.null - this.handler._findBibDocIds.callCount.should.equal(1) - this.handler._findBibDocIds - .calledWith(this.fakeProject) - .should.equal(true) - return done() - }) - }) - - it('should call _findBibFileRefs', function (done) { - return this.call((err, data) => { - expect(err).to.be.null - this.handler._findBibDocIds.callCount.should.equal(1) - this.handler._findBibDocIds - .calledWith(this.fakeProject) - .should.equal(true) - return done() - }) - }) - - it('should call DocumentUpdaterHandler.flushDocToMongo', function (done) { - return this.call((err, data) => { - expect(err).to.be.null - this.DocumentUpdaterHandler.flushDocToMongo.callCount.should.equal(2) - return done() - }) - }) - - it('should make a request to references service', function (done) { - return this.call((err, data) => { - expect(err).to.be.null - this.request.post.callCount.should.equal(1) - const arg = this.request.post.firstCall.args[0] - expect(arg.json).to.have.all.keys('docUrls', 'sourceURLs', 'fullIndex') - expect(arg.json.docUrls.length).to.equal(4) - expect(arg.json.docUrls).to.deep.equal([ - `${this.settings.apis.docstore.url}/project/${this.projectId}/doc/aaa/raw`, - `${this.settings.apis.docstore.url}/project/${this.projectId}/doc/ccc/raw`, - `${this.settings.apis.filestore.url}/project/${this.projectId}/file/fff?from=bibFileUrls`, - `${this.settings.apis.filestore.url}/project/${this.projectId}/file/ggg?from=bibFileUrls`, - ]) - expect(arg.json.sourceURLs.length).to.equal(4) - expect(arg.json.sourceURLs).to.deep.equal([ - { - url: `${this.settings.apis.docstore.url}/project/${this.projectId}/doc/aaa/raw`, - }, - { - url: `${this.settings.apis.docstore.url}/project/${this.projectId}/doc/ccc/raw`, - }, - { - url: `${this.settings.apis.filestore.url}/project/${this.projectId}/file/fff?from=bibFileUrls`, - }, - { - url: `${this.settings.apis.project_history.url}/project/${this.historyId}/blob/hash`, - fallbackURL: `${this.settings.apis.filestore.url}/project/${this.projectId}/file/ggg?from=bibFileUrls`, - }, - ]) - expect(arg.json.fullIndex).to.equal(true) - return done() - }) - }) - - it('should not produce an error', function (done) { - return this.call((err, data) => { - expect(err).to.equal(null) - return done() - }) - }) - - it('should return data', function (done) { - return this.call((err, data) => { - expect(err).to.be.null - expect(data).to.not.equal(null) - expect(data).to.not.equal(undefined) - expect(data).to.equal(this.fakeResponseData) - return done() - }) - }) - - describe('when ProjectGetter.getProject produces an error', function () { - beforeEach(function () { - return this.ProjectGetter.getProject.callsArgWith(2, new Error('woops')) - }) - - it('should produce an error', function (done) { - return this.call((err, data) => { - expect(err).to.not.equal(null) - expect(err).to.be.instanceof(Error) - expect(data).to.equal(undefined) - return done() - }) - }) - - it('should not send request', function (done) { - return this.call(() => { - this.request.post.callCount.should.equal(0) - return done() - }) - }) - }) - - describe('when ProjectGetter.getProject returns null', function () { - beforeEach(function () { - return this.ProjectGetter.getProject.callsArgWith(2, null) - }) - - it('should produce an error', function (done) { - return this.call((err, data) => { - expect(err).to.not.equal(null) - expect(err).to.be.instanceof(Errors.NotFoundError) - expect(data).to.equal(undefined) - return done() - }) - }) - - it('should not send request', function (done) { - return this.call(() => { - this.request.post.callCount.should.equal(0) - return done() - }) - }) - }) - - describe('when _isFullIndex produces an error', function () { - beforeEach(function () { - this.ProjectGetter.getProject.callsArgWith(2, null, this.fakeProject) - return this.handler._isFullIndex.callsArgWith(1, new Error('woops')) - }) - - it('should produce an error', function (done) { - return this.call((err, data) => { - expect(err).to.not.equal(null) - expect(err).to.be.instanceof(Error) - expect(data).to.equal(undefined) - return done() - }) - }) - - it('should not send request', function (done) { - return this.call(() => { - this.request.post.callCount.should.equal(0) - return done() - }) - }) - }) - - describe('when flushDocToMongo produces an error', function () { - beforeEach(function () { - this.ProjectGetter.getProject.callsArgWith(2, null, this.fakeProject) - this.handler._isFullIndex.callsArgWith(1, false) - return this.DocumentUpdaterHandler.flushDocToMongo.callsArgWith( - 2, - new Error('woops') - ) - }) - - it('should produce an error', function (done) { - return this.call((err, data) => { - expect(err).to.not.equal(null) - expect(err).to.be.instanceof(Error) - expect(data).to.equal(undefined) - return done() - }) - }) - - it('should not send request', function (done) { - return this.call(() => { - this.request.post.callCount.should.equal(0) - return done() - }) - }) - }) - }) - - describe('_findBibDocIds', function () { - beforeEach(function () { - this.fakeProject = { - rootFolder: [ - { - docs: [ - { name: 'one.bib', _id: 'aaa' }, - { name: 'two.txt', _id: 'bbb' }, - ], - folders: [ - { docs: [{ name: 'three.bib', _id: 'ccc' }], folders: [] }, - ], - }, - ], - } - return (this.expectedIds = ['aaa', 'ccc']) - }) - - it('should select the correct docIds', function () { - const result = this.handler._findBibDocIds(this.fakeProject) - return expect(result).to.deep.equal(this.expectedIds) - }) - - it('should not error with a non array of folders from dirty data', function () { - this.fakeProject.rootFolder[0].folders[0].folders = {} - const result = this.handler._findBibDocIds(this.fakeProject) - return expect(result).to.deep.equal(this.expectedIds) - }) - }) - - describe('_findBibFileRefs', function () { - beforeEach(function () { - this.fakeProject = { - rootFolder: [ - { - docs: [ - { name: 'one.bib', _id: 'aaa' }, - { name: 'two.txt', _id: 'bbb' }, - ], - fileRefs: [{ name: 'other.bib', _id: 'ddd' }], - folders: [ - { - docs: [{ name: 'three.bib', _id: 'ccc' }], - fileRefs: [{ name: 'four.bib', _id: 'ghg' }], - folders: [], - }, - ], - }, - ], - } - this.expectedIds = [ - this.fakeProject.rootFolder[0].fileRefs[0], - this.fakeProject.rootFolder[0].folders[0].fileRefs[0], - ] - }) - - it('should select the correct docIds', function () { - const result = this.handler._findBibFileRefs(this.fakeProject) - return expect(result).to.deep.equal(this.expectedIds) - }) - }) - - describe('_isFullIndex', function () { - beforeEach(function () { - this.fakeProject = { owner_ref: (this.owner_ref = 'owner-ref-123') } - this.owner = { - features: { - references: false, - }, - } - this.UserGetter.getUser = sinon.stub() - this.UserGetter.getUser - .withArgs(this.owner_ref, { features: true }) - .yields(null, this.owner) - return (this.call = callback => { - return this.handler._isFullIndex(this.fakeProject, callback) - }) - }) - - describe('with references feature on', function () { - beforeEach(function () { - return (this.owner.features.references = true) - }) - - it('should return true', function () { - return this.call((err, isFullIndex) => { - expect(err).to.equal(null) - return expect(isFullIndex).to.equal(true) - }) - }) - }) - - describe('with references feature off', function () { - beforeEach(function () { - return (this.owner.features.references = false) - }) - - it('should return false', function () { - return this.call((err, isFullIndex) => { - expect(err).to.equal(null) - return expect(isFullIndex).to.equal(false) - }) - }) - }) - - describe('with referencesSearch', function () { - beforeEach(function () { - return (this.owner.features = { - referencesSearch: true, - references: false, - }) - }) - - it('should return true', function () { - return this.call((err, isFullIndex) => { - expect(err).to.equal(null) - return expect(isFullIndex).to.equal(true) - }) - }) - }) - }) -}) diff --git a/services/web/test/unit/src/Subscription/PaymentProviderEntitiesTest.js b/services/web/test/unit/src/Subscription/PaymentProviderEntitiesTest.js index c6593da28d..07c401dfb8 100644 --- a/services/web/test/unit/src/Subscription/PaymentProviderEntitiesTest.js +++ b/services/web/test/unit/src/Subscription/PaymentProviderEntitiesTest.js @@ -11,6 +11,7 @@ const { PaymentProviderSubscription, PaymentProviderSubscriptionAddOnUpdate, } = require('../../../../app/src/Features/Subscription/PaymentProviderEntities') +const SubscriptionHelper = require('../../../../app/src/Features/Subscription/SubscriptionHelper') const MODULE_PATH = '../../../../app/src/Features/Subscription/PaymentProviderEntities' @@ -32,6 +33,7 @@ describe('PaymentProviderEntities', function () { requires: { '@overleaf/settings': this.Settings, './Errors': Errors, + './SubscriptionHelper': SubscriptionHelper, }, }) }) @@ -102,6 +104,23 @@ describe('PaymentProviderEntities', function () { ) }) + it('returns a change request for downgrades while on trial', function () { + const fiveDaysFromNow = new Date() + fiveDaysFromNow.setDate(fiveDaysFromNow.getDate() + 5) + this.subscription.trialPeriodEnd = fiveDaysFromNow + const { PaymentProviderSubscriptionChangeRequest } = + this.PaymentProviderEntities + const changeRequest = + this.subscription.getRequestForPlanChange('cheap-plan') + expect(changeRequest).to.deep.equal( + new PaymentProviderSubscriptionChangeRequest({ + subscription: this.subscription, + timeframe: 'now', + planCode: 'cheap-plan', + }) + ) + }) + it('preserves the AI add-on on upgrades', function () { const { PaymentProviderSubscriptionChangeRequest } = this.PaymentProviderEntities @@ -154,7 +173,7 @@ describe('PaymentProviderEntities', function () { expect(changeRequest).to.deep.equal( new PaymentProviderSubscriptionChangeRequest({ subscription: this.subscription, - timeframe: 'term_end', + timeframe: 'now', planCode: 'cheap-plan', addOnUpdates: [ new PaymentProviderSubscriptionAddOnUpdate({ @@ -280,6 +299,22 @@ describe('PaymentProviderEntities', function () { ) }) + it('returns a change request when in trial', function () { + const fiveDaysFromNow = new Date() + fiveDaysFromNow.setDate(fiveDaysFromNow.getDate() + 5) + this.subscription.trialPeriodEnd = fiveDaysFromNow + const changeRequest = this.subscription.getRequestForAddOnRemoval( + this.addOn.code + ) + expect(changeRequest).to.deep.equal( + new PaymentProviderSubscriptionChangeRequest({ + subscription: this.subscription, + timeframe: 'now', + addOnUpdates: [], + }) + ) + }) + it("throws an AddOnNotPresentError if the subscription doesn't have the add-on", function () { expect(() => this.subscription.getRequestForAddOnRemoval('another-add-on') diff --git a/services/web/test/unit/src/Subscription/PlansLocatorTests.js b/services/web/test/unit/src/Subscription/PlansLocatorTests.js index f705baa01c..bd15f5cfaa 100644 --- a/services/web/test/unit/src/Subscription/PlansLocatorTests.js +++ b/services/web/test/unit/src/Subscription/PlansLocatorTests.js @@ -29,6 +29,7 @@ const plans = [ describe('PlansLocator', function () { beforeEach(function () { this.settings = { plans } + this.AI_ADD_ON_CODE = 'assistant' this.PlansLocator = SandboxedModule.require(modulePath, { requires: { @@ -49,68 +50,139 @@ describe('PlansLocator', function () { }) }) - describe('mapRecurlyPlanCodeToStripeLookupKey', function () { + describe('buildStripeLookupKey', function () { it('should map "collaborator" plan code to stripe lookup keys', function () { const planCode = 'collaborator' - const lookupKey = - this.PlansLocator.mapRecurlyPlanCodeToStripeLookupKey(planCode) - expect(lookupKey).to.equal('standard_monthly') + const currency = 'eur' + const lookupKey = this.PlansLocator.buildStripeLookupKey( + planCode, + currency + ) + expect(lookupKey).to.equal('standard_monthly_jun2025_eur') }) it('should map "collaborator_free_trial_7_days" plan code to stripe lookup keys', function () { const planCode = 'collaborator_free_trial_7_days' - const lookupKey = - this.PlansLocator.mapRecurlyPlanCodeToStripeLookupKey(planCode) - expect(lookupKey).to.equal('standard_monthly') + const currency = 'eur' + const lookupKey = this.PlansLocator.buildStripeLookupKey( + planCode, + currency + ) + expect(lookupKey).to.equal('standard_monthly_jun2025_eur') }) it('should map "collaborator-annual" plan code to stripe lookup keys', function () { const planCode = 'collaborator-annual' - const lookupKey = - this.PlansLocator.mapRecurlyPlanCodeToStripeLookupKey(planCode) - expect(lookupKey).to.equal('standard_annual') + const currency = 'eur' + const lookupKey = this.PlansLocator.buildStripeLookupKey( + planCode, + currency + ) + expect(lookupKey).to.equal('standard_annual_jun2025_eur') }) it('should map "professional" plan code to stripe lookup keys', function () { const planCode = 'professional' - const lookupKey = - this.PlansLocator.mapRecurlyPlanCodeToStripeLookupKey(planCode) - expect(lookupKey).to.equal('professional_monthly') + const currency = 'eur' + const lookupKey = this.PlansLocator.buildStripeLookupKey( + planCode, + currency + ) + expect(lookupKey).to.equal('professional_monthly_jun2025_eur') }) it('should map "professional_free_trial_7_days" plan code to stripe lookup keys', function () { const planCode = 'professional_free_trial_7_days' - const lookupKey = - this.PlansLocator.mapRecurlyPlanCodeToStripeLookupKey(planCode) - expect(lookupKey).to.equal('professional_monthly') + const currency = 'eur' + const lookupKey = this.PlansLocator.buildStripeLookupKey( + planCode, + currency + ) + expect(lookupKey).to.equal('professional_monthly_jun2025_eur') }) it('should map "professional-annual" plan code to stripe lookup keys', function () { const planCode = 'professional-annual' - const lookupKey = - this.PlansLocator.mapRecurlyPlanCodeToStripeLookupKey(planCode) - expect(lookupKey).to.equal('professional_annual') + const currency = 'eur' + const lookupKey = this.PlansLocator.buildStripeLookupKey( + planCode, + currency + ) + expect(lookupKey).to.equal('professional_annual_jun2025_eur') }) it('should map "student" plan code to stripe lookup keys', function () { const planCode = 'student' - const lookupKey = - this.PlansLocator.mapRecurlyPlanCodeToStripeLookupKey(planCode) - expect(lookupKey).to.equal('student_monthly') + const currency = 'eur' + const lookupKey = this.PlansLocator.buildStripeLookupKey( + planCode, + currency + ) + expect(lookupKey).to.equal('student_monthly_jun2025_eur') }) it('shoult map "student_free_trial_7_days" plan code to stripe lookup keys', function () { const planCode = 'student_free_trial_7_days' - const lookupKey = - this.PlansLocator.mapRecurlyPlanCodeToStripeLookupKey(planCode) - expect(lookupKey).to.equal('student_monthly') + const currency = 'eur' + const lookupKey = this.PlansLocator.buildStripeLookupKey( + planCode, + currency + ) + expect(lookupKey).to.equal('student_monthly_jun2025_eur') }) it('should map "student-annual" plan code to stripe lookup keys', function () { const planCode = 'student-annual' - const lookupKey = - this.PlansLocator.mapRecurlyPlanCodeToStripeLookupKey(planCode) - expect(lookupKey).to.equal('student_annual') + const currency = 'eur' + const lookupKey = this.PlansLocator.buildStripeLookupKey( + planCode, + currency + ) + expect(lookupKey).to.equal('student_annual_jun2025_eur') + }) + + it('should return null for unknown add-on codes', function () { + const billingCycleInterval = 'month' + const addOnCode = 'unknown_addon' + const currency = 'gbp' + const lookupKey = this.PlansLocator.buildStripeLookupKey( + addOnCode, + currency, + billingCycleInterval + ) + expect(lookupKey).to.equal(null) + }) + + it('should handle missing input', function () { + const lookupKey = this.PlansLocator.buildStripeLookupKey( + undefined, + undefined + ) + expect(lookupKey).to.equal(null) + }) + + it('returns the key for a monthly AI assist add-on', function () { + const billingCycleInterval = 'month' + const addOnCode = this.AI_ADD_ON_CODE + const currency = 'gbp' + const lookupKey = this.PlansLocator.buildStripeLookupKey( + addOnCode, + currency, + billingCycleInterval + ) + expect(lookupKey).to.equal('assistant_monthly_jun2025_gbp') + }) + + it('returns the key for an annual AI assist add-on', function () { + const billingCycleInterval = 'year' + const addOnCode = this.AI_ADD_ON_CODE + const currency = 'gbp' + const lookupKey = this.PlansLocator.buildStripeLookupKey( + addOnCode, + currency, + billingCycleInterval + ) + expect(lookupKey).to.equal('assistant_annual_jun2025_gbp') }) }) diff --git a/services/web/test/unit/src/Subscription/RecurlyClientTests.js b/services/web/test/unit/src/Subscription/RecurlyClientTests.js index 97088e9944..6194e35a5f 100644 --- a/services/web/test/unit/src/Subscription/RecurlyClientTests.js +++ b/services/web/test/unit/src/Subscription/RecurlyClientTests.js @@ -692,4 +692,20 @@ describe('RecurlyClient', function () { ).to.be.rejectedWith(Error) }) }) + + describe('terminateSubscriptionByUuid', function () { + it('should attempt to terminate the subscription', async function () { + this.client.terminateSubscription = sinon + .stub() + .resolves(this.recurlySubscription) + const subscription = + await this.RecurlyClient.promises.terminateSubscriptionByUuid( + this.subscription.uuid + ) + expect(subscription).to.deep.equal(this.recurlySubscription) + expect(this.client.terminateSubscription).to.be.calledWith( + 'uuid-' + this.subscription.uuid + ) + }) + }) }) diff --git a/services/web/test/unit/src/Subscription/SubscriptionControllerTests.js b/services/web/test/unit/src/Subscription/SubscriptionControllerTests.js index b3ae6610e1..087df52815 100644 --- a/services/web/test/unit/src/Subscription/SubscriptionControllerTests.js +++ b/services/web/test/unit/src/Subscription/SubscriptionControllerTests.js @@ -6,6 +6,7 @@ const MockResponse = require('../helpers/MockResponse') const modulePath = '../../../../app/src/Features/Subscription/SubscriptionController' const SubscriptionErrors = require('../../../../app/src/Features/Subscription/Errors') +const SubscriptionHelper = require('../../../../app/src/Features/Subscription/SubscriptionHelper') const mockSubscriptions = { 'subscription-123-active': { @@ -77,7 +78,6 @@ describe('SubscriptionController', function () { buildPlansList: sinon.stub(), promises: { buildUsersSubscriptionViewModel: sinon.stub().resolves({}), - getBestSubscription: sinon.stub().resolves({}), }, buildPlansListForSubscriptionDash: sinon .stub() @@ -146,14 +146,16 @@ describe('SubscriptionController', function () { '../SplitTests/SplitTestHandler': this.SplitTestV2Hander, '../Authentication/SessionManager': this.SessionManager, './SubscriptionHandler': this.SubscriptionHandler, - './SubscriptionHelper': this.SubscriptionHelper, + './SubscriptionHelper': SubscriptionHelper, './SubscriptionViewModelBuilder': this.SubscriptionViewModelBuilder, './LimitationsManager': this.LimitationsManager, '../../infrastructure/GeoIpLookup': this.GeoIpLookup, '@overleaf/settings': this.settings, '../User/UserGetter': this.UserGetter, './RecurlyWrapper': (this.RecurlyWrapper = { - updateAccountEmailAddress: sinon.stub().yields(), + promises: { + updateAccountEmailAddress: sinon.stub().resolves(), + }, }), './RecurlyEventHandler': { sendRecurlyAnalyticsEvent: sinon.stub().resolves(), @@ -309,31 +311,50 @@ describe('SubscriptionController', function () { }) describe('updateAccountEmailAddress via put', function () { - it('should send the user and subscriptionId to RecurlyWrapper', function () { - this.res.sendStatus = sinon.spy() - this.SubscriptionController.updateAccountEmailAddress(this.req, this.res) - this.RecurlyWrapper.updateAccountEmailAddress - .calledWith(this.user._id, this.user.email) - .should.equal(true) + beforeEach(function () { + this.req.body = { + account_email: 'current_account_email@overleaf.com', + } }) - it('should respond with 200', function () { + it('should send the user and subscriptionId to "updateAccountEmailAddress" hooks', async function () { this.res.sendStatus = sinon.spy() - this.SubscriptionController.updateAccountEmailAddress(this.req, this.res) + + await this.SubscriptionController.updateAccountEmailAddress( + this.req, + this.res + ) + + expect(this.Modules.promises.hooks.fire).to.have.been.calledWith( + 'updateAccountEmailAddress', + this.user._id, + this.user.email + ) + }) + + it('should respond with 200', async function () { + this.res.sendStatus = sinon.spy() + await this.SubscriptionController.updateAccountEmailAddress( + this.req, + this.res + ) this.res.sendStatus.calledWith(200).should.equal(true) }) - it('should send the error to the next handler when updating recurly account email fails', function (done) { - this.RecurlyWrapper.updateAccountEmailAddress.yields(new Error()) + it('should send the error to the next handler when updating recurly account email fails', async function () { + this.Modules.promises.hooks.fire + .withArgs('updateAccountEmailAddress', this.user._id, this.user.email) + .rejects(new Error()) + this.next = sinon.spy(error => { - expect(error).instanceOf(Error) - done() + expect(error).to.be.instanceOf(Error) }) - this.SubscriptionController.updateAccountEmailAddress( + await this.SubscriptionController.updateAccountEmailAddress( this.req, this.res, this.next ) + expect(this.next.calledOnce).to.be.true }) }) diff --git a/services/web/test/unit/src/Subscription/SubscriptionGroupController.test.mjs b/services/web/test/unit/src/Subscription/SubscriptionGroupController.test.mjs new file mode 100644 index 0000000000..30301ec8cc --- /dev/null +++ b/services/web/test/unit/src/Subscription/SubscriptionGroupController.test.mjs @@ -0,0 +1,884 @@ +import { vi } from 'vitest' +import sinon from 'sinon' + +const modulePath = + '../../../../app/src/Features/Subscription/SubscriptionGroupController' + +describe('SubscriptionGroupController', function () { + beforeEach(async function (ctx) { + ctx.user = { _id: '!@312431', email: 'user@email.com' } + ctx.adminUserId = '123jlkj' + ctx.subscriptionId = '123434325412' + ctx.user_email = 'bob@gmail.com' + ctx.req = { + session: { + user: { + _id: ctx.adminUserId, + email: ctx.user_email, + }, + }, + params: { + subscriptionId: ctx.subscriptionId, + }, + query: {}, + } + + ctx.subscription = { + _id: ctx.subscriptionId, + teamName: 'Cool group', + groupPlan: true, + membersLimit: 5, + } + + ctx.plan = { + canUseFlexibleLicensing: true, + } + + ctx.recurlySubscription = { + get isCollectionMethodManual() { + return true + }, + } + + ctx.previewSubscriptionChangeData = { + change: {}, + currency: 'USD', + } + + ctx.createSubscriptionChangeData = { adding: 1 } + + ctx.paymentMethod = { cardType: 'Visa', lastFour: '1111' } + + ctx.SubscriptionGroupHandler = { + promises: { + removeUserFromGroup: sinon.stub().resolves(), + getUsersGroupSubscriptionDetails: sinon.stub().resolves({ + subscription: ctx.subscription, + plan: ctx.plan, + recurlySubscription: ctx.recurlySubscription, + }), + previewAddSeatsSubscriptionChange: sinon + .stub() + .resolves(ctx.previewSubscriptionChangeData), + createAddSeatsSubscriptionChange: sinon + .stub() + .resolves(ctx.createSubscriptionChangeData), + ensureFlexibleLicensingEnabled: sinon.stub().resolves(), + ensureSubscriptionIsActive: sinon.stub().resolves(), + ensureSubscriptionCollectionMethodIsNotManual: sinon.stub().resolves(), + ensureSubscriptionHasNoPendingChanges: sinon.stub().resolves(), + ensureSubscriptionHasNoPastDueInvoice: sinon.stub().resolves(), + getGroupPlanUpgradePreview: sinon + .stub() + .resolves(ctx.previewSubscriptionChangeData), + checkBillingInfoExistence: sinon.stub().resolves(ctx.paymentMethod), + updateSubscriptionPaymentTerms: sinon.stub().resolves(), + }, + } + + ctx.SubscriptionLocator = { + promises: { + getSubscription: sinon.stub().resolves(ctx.subscription), + }, + } + + ctx.SessionManager = { + getLoggedInUserId(session) { + return session.user._id + }, + getSessionUser(session) { + return session.user + }, + } + + ctx.UserAuditLogHandler = { + promises: { + addEntry: sinon.stub().resolves(), + }, + } + + ctx.Modules = { + promises: { + hooks: { + fire: sinon.stub().resolves(), + }, + }, + } + + ctx.SplitTestHandler = { + promises: { + getAssignment: sinon.stub().resolves({ variant: 'enabled' }), + }, + } + + ctx.UserGetter = { + promises: { + getUserEmail: sinon.stub().resolves(ctx.user.email), + }, + } + + ctx.paymentMethod = { cardType: 'Visa', lastFour: '1111' } + + ctx.RecurlyClient = { + promises: { + getPaymentMethod: sinon.stub().resolves(ctx.paymentMethod), + }, + } + + ctx.SubscriptionController = {} + + ctx.SubscriptionModel = { Subscription: {} } + + ctx.PlansHelper = { + isProfessionalGroupPlan: sinon.stub().returns(false), + } + + ctx.Errors = { + MissingBillingInfoError: class extends Error {}, + ManuallyCollectedError: class extends Error {}, + PendingChangeError: class extends Error {}, + InactiveError: class extends Error {}, + SubtotalLimitExceededError: class extends Error {}, + HasPastDueInvoiceError: class extends Error {}, + } + + vi.doMock( + '../../../../app/src/Features/Subscription/SubscriptionGroupHandler', + () => ({ + default: ctx.SubscriptionGroupHandler, + }) + ) + + vi.doMock( + '../../../../app/src/Features/Subscription/SubscriptionLocator', + () => ({ + default: ctx.SubscriptionLocator, + }) + ) + + vi.doMock( + '../../../../app/src/Features/Authentication/SessionManager', + () => ({ + default: ctx.SessionManager, + }) + ) + + vi.doMock('../../../../app/src/Features/User/UserAuditLogHandler', () => ({ + default: ctx.UserAuditLogHandler, + })) + + vi.doMock('../../../../app/src/infrastructure/Modules', () => ({ + default: ctx.Modules, + })) + + vi.doMock( + '../../../../app/src/Features/SplitTests/SplitTestHandler', + () => ({ + default: ctx.SplitTestHandler, + }) + ) + + vi.doMock('../../../../app/src/Features/User/UserGetter', () => ({ + default: ctx.UserGetter, + })) + + vi.doMock('../../../../app/src/Features/Errors/ErrorController', () => ({ + default: (ctx.ErrorController = { + notFound: sinon.stub(), + }), + })) + + vi.doMock( + '../../../../app/src/Features/Subscription/SubscriptionController', + () => ({ + default: ctx.SubscriptionController, + }) + ) + + vi.doMock( + '../../../../app/src/Features/Subscription/RecurlyClient', + () => ({ + default: ctx.RecurlyClient, + }) + ) + + vi.doMock( + '../../../../app/src/Features/Subscription/PlansHelper', + () => ctx.PlansHelper + ) + + vi.doMock( + '../../../../app/src/Features/Subscription/Errors', + () => ctx.Errors + ) + + vi.doMock( + '../../../../app/src/models/Subscription', + () => ctx.SubscriptionModel + ) + + vi.doMock('@overleaf/logger', () => ({ + default: { + err: sinon.stub(), + error: sinon.stub(), + warn: sinon.stub(), + log: sinon.stub(), + debug: sinon.stub(), + }, + })) + + ctx.Controller = (await import(modulePath)).default + }) + + describe('removeUserFromGroup', function () { + it('should use the subscription id for the logged in user and take the user id from the params', function (ctx) { + return new Promise(resolve => { + const userIdToRemove = '31231' + ctx.req.params = { user_id: userIdToRemove } + ctx.req.entity = ctx.subscription + + const res = { + sendStatus: () => { + ctx.SubscriptionGroupHandler.promises.removeUserFromGroup + .calledWith(ctx.subscriptionId, userIdToRemove, { + initiatorId: ctx.req.session.user._id, + ipAddress: ctx.req.ip, + }) + .should.equal(true) + resolve() + }, + } + ctx.Controller.removeUserFromGroup(ctx.req, res, resolve) + }) + }) + + it('should log that the user has been removed', function (ctx) { + return new Promise(resolve => { + const userIdToRemove = '31231' + ctx.req.params = { user_id: userIdToRemove } + ctx.req.entity = ctx.subscription + + const res = { + sendStatus: () => { + sinon.assert.calledWith( + ctx.UserAuditLogHandler.promises.addEntry, + userIdToRemove, + 'remove-from-group-subscription', + ctx.adminUserId, + ctx.req.ip, + { subscriptionId: ctx.subscriptionId } + ) + resolve() + }, + } + ctx.Controller.removeUserFromGroup(ctx.req, res, resolve) + }) + }) + + it('should call the group SSO hooks with group SSO enabled', function (ctx) { + return new Promise(resolve => { + const userIdToRemove = '31231' + ctx.req.params = { user_id: userIdToRemove } + ctx.req.entity = ctx.subscription + ctx.Modules.promises.hooks.fire + .withArgs('hasGroupSSOEnabled', ctx.subscription) + .resolves([true]) + + const res = { + sendStatus: () => { + ctx.Modules.promises.hooks.fire + .calledWith('hasGroupSSOEnabled', ctx.subscription) + .should.equal(true) + ctx.Modules.promises.hooks.fire + .calledWith( + 'unlinkUserFromGroupSSO', + userIdToRemove, + ctx.subscriptionId + ) + .should.equal(true) + sinon.assert.calledTwice(ctx.Modules.promises.hooks.fire) + resolve() + }, + } + ctx.Controller.removeUserFromGroup(ctx.req, res, resolve) + }) + }) + + it('should call the group SSO hooks with group SSO disabled', function (ctx) { + return new Promise(resolve => { + const userIdToRemove = '31231' + ctx.req.params = { user_id: userIdToRemove } + ctx.req.entity = ctx.subscription + ctx.Modules.promises.hooks.fire + .withArgs('hasGroupSSOEnabled', ctx.subscription) + .resolves([false]) + + const res = { + sendStatus: () => { + ctx.Modules.promises.hooks.fire + .calledWith('hasGroupSSOEnabled', ctx.subscription) + .should.equal(true) + sinon.assert.calledOnce(ctx.Modules.promises.hooks.fire) + resolve() + }, + } + ctx.Controller.removeUserFromGroup(ctx.req, res, resolve) + }) + }) + }) + + describe('removeSelfFromGroup', function () { + it('gets subscription and remove user', function (ctx) { + return new Promise(resolve => { + ctx.req.query = { subscriptionId: ctx.subscriptionId } + const memberUserIdToremove = 123456789 + ctx.req.session.user._id = memberUserIdToremove + + const res = { + sendStatus: () => { + sinon.assert.calledWith( + ctx.SubscriptionLocator.promises.getSubscription, + ctx.subscriptionId + ) + sinon.assert.calledWith( + ctx.SubscriptionGroupHandler.promises.removeUserFromGroup, + ctx.subscriptionId, + memberUserIdToremove, + { + initiatorId: ctx.req.session.user._id, + ipAddress: ctx.req.ip, + } + ) + resolve() + }, + } + ctx.Controller.removeSelfFromGroup(ctx.req, res, resolve) + }) + }) + + it('should log that the user has left the subscription', function (ctx) { + return new Promise(resolve => { + ctx.req.query = { subscriptionId: ctx.subscriptionId } + const memberUserIdToremove = '123456789' + ctx.req.session.user._id = memberUserIdToremove + + const res = { + sendStatus: () => { + sinon.assert.calledWith( + ctx.UserAuditLogHandler.promises.addEntry, + memberUserIdToremove, + 'remove-from-group-subscription', + memberUserIdToremove, + ctx.req.ip, + { subscriptionId: ctx.subscriptionId } + ) + resolve() + }, + } + ctx.Controller.removeSelfFromGroup(ctx.req, res, resolve) + }) + }) + + it('should call the group SSO hooks with group SSO enabled', function (ctx) { + return new Promise(resolve => { + ctx.req.query = { subscriptionId: ctx.subscriptionId } + const memberUserIdToremove = '123456789' + ctx.req.session.user._id = memberUserIdToremove + + ctx.Modules.promises.hooks.fire + .withArgs('hasGroupSSOEnabled', ctx.subscription) + .resolves([true]) + + const res = { + sendStatus: () => { + ctx.Modules.promises.hooks.fire + .calledWith('hasGroupSSOEnabled', ctx.subscription) + .should.equal(true) + ctx.Modules.promises.hooks.fire + .calledWith( + 'unlinkUserFromGroupSSO', + memberUserIdToremove, + ctx.subscriptionId + ) + .should.equal(true) + sinon.assert.calledTwice(ctx.Modules.promises.hooks.fire) + resolve() + }, + } + ctx.Controller.removeSelfFromGroup(ctx.req, res, resolve) + }) + }) + + it('should call the group SSO hooks with group SSO disabled', function (ctx) { + return new Promise(resolve => { + const userIdToRemove = '31231' + ctx.req.session.user._id = userIdToRemove + ctx.req.params = { user_id: userIdToRemove } + ctx.req.entity = ctx.subscription + ctx.Modules.promises.hooks.fire + .withArgs('hasGroupSSOEnabled', ctx.subscription) + .resolves([false]) + + const res = { + sendStatus: () => { + ctx.Modules.promises.hooks.fire + .calledWith('hasGroupSSOEnabled', ctx.subscription) + .should.equal(true) + sinon.assert.calledOnce(ctx.Modules.promises.hooks.fire) + resolve() + }, + } + ctx.Controller.removeSelfFromGroup(ctx.req, res, resolve) + }) + }) + }) + + describe('addSeatsToGroupSubscription', function () { + it('should render the "add seats" page', function (ctx) { + return new Promise((resolve, reject) => { + const res = { + render: (page, props) => { + ctx.SubscriptionGroupHandler.promises.getUsersGroupSubscriptionDetails + .calledWith(ctx.req.session.user._id) + .should.equal(true) + ctx.SubscriptionGroupHandler.promises.ensureFlexibleLicensingEnabled + .calledWith(ctx.plan) + .should.equal(true) + ctx.SubscriptionGroupHandler.promises.ensureSubscriptionHasNoPendingChanges + .calledWith(ctx.recurlySubscription) + .should.equal(true) + ctx.SubscriptionGroupHandler.promises.ensureSubscriptionIsActive + .calledWith(ctx.subscription) + .should.equal(true) + ctx.SubscriptionGroupHandler.promises.ensureSubscriptionHasNoPastDueInvoice + .calledWith(ctx.subscription) + .should.equal(true) + ctx.SubscriptionGroupHandler.promises.checkBillingInfoExistence + .calledWith(ctx.recurlySubscription, ctx.adminUserId) + .should.equal(true) + page.should.equal('subscriptions/add-seats') + props.subscriptionId.should.equal(ctx.subscriptionId) + props.groupName.should.equal(ctx.subscription.teamName) + props.totalLicenses.should.equal(ctx.subscription.membersLimit) + props.isProfessional.should.equal(false) + props.isCollectionMethodManual.should.equal(true) + resolve() + }, + } + + ctx.Controller.addSeatsToGroupSubscription(ctx.req, res) + }) + }) + + it('should redirect to subscription page when getting subscription details fails', function (ctx) { + return new Promise(resolve => { + ctx.SubscriptionGroupHandler.promises.getUsersGroupSubscriptionDetails = + sinon.stub().rejects() + + const res = { + redirect: url => { + url.should.equal('/user/subscription') + resolve() + }, + } + + ctx.Controller.addSeatsToGroupSubscription(ctx.req, res) + }) + }) + + it('should redirect to subscription page when flexible licensing is not enabled', function (ctx) { + return new Promise(resolve => { + ctx.SubscriptionGroupHandler.promises.ensureFlexibleLicensingEnabled = + sinon.stub().rejects() + + const res = { + redirect: url => { + url.should.equal('/user/subscription') + resolve() + }, + } + + ctx.Controller.addSeatsToGroupSubscription(ctx.req, res) + }) + }) + + it('should redirect to missing billing information page when billing information is missing', function (ctx) { + return new Promise(resolve => { + ctx.SubscriptionGroupHandler.promises.checkBillingInfoExistence = sinon + .stub() + .throws(new ctx.Errors.MissingBillingInfoError()) + + const res = { + redirect: url => { + url.should.equal( + '/user/subscription/group/missing-billing-information' + ) + resolve() + }, + } + + ctx.Controller.addSeatsToGroupSubscription(ctx.req, res) + }) + }) + + it('should redirect to subscription page when there is a pending change', function (ctx) { + return new Promise(resolve => { + ctx.SubscriptionGroupHandler.promises.ensureSubscriptionHasNoPendingChanges = + sinon.stub().throws(new ctx.Errors.PendingChangeError()) + + const res = { + redirect: url => { + url.should.equal('/user/subscription') + resolve() + }, + } + + ctx.Controller.addSeatsToGroupSubscription(ctx.req, res) + }) + }) + + it('should redirect to subscription page when subscription is not active', function (ctx) { + return new Promise(resolve => { + ctx.SubscriptionGroupHandler.promises.ensureSubscriptionIsActive = sinon + .stub() + .rejects() + + const res = { + redirect: url => { + url.should.equal('/user/subscription') + resolve() + }, + } + + ctx.Controller.addSeatsToGroupSubscription(ctx.req, res) + }) + }) + + it('should redirect to subscription page when subscription has pending invoice', function (ctx) { + ctx.SubscriptionGroupHandler.promises.ensureSubscriptionHasNoPastDueInvoice = + sinon.stub().rejects() + return new Promise(resolve => { + const res = { + redirect: url => { + url.should.equal('/user/subscription') + resolve() + }, + } + + ctx.Controller.addSeatsToGroupSubscription(ctx.req, res) + }) + }) + }) + + describe('previewAddSeatsSubscriptionChange', function () { + it('should preview "add seats" change', function (ctx) { + return new Promise(resolve => { + ctx.req.body = { adding: 2 } + + const res = { + json: data => { + ctx.SubscriptionGroupHandler.promises.previewAddSeatsSubscriptionChange + .calledWith(ctx.req.session.user._id, ctx.req.body.adding) + .should.equal(true) + data.should.deep.equal(ctx.previewSubscriptionChangeData) + resolve() + }, + } + + ctx.Controller.previewAddSeatsSubscriptionChange(ctx.req, res) + }) + }) + + it('should fail previewing "add seats" change', function (ctx) { + return new Promise(resolve => { + ctx.SubscriptionGroupHandler.promises.previewAddSeatsSubscriptionChange = + sinon.stub().rejects() + + const res = { + status: statusCode => { + statusCode.should.equal(500) + + return { + end: () => { + resolve() + }, + } + }, + } + + ctx.Controller.previewAddSeatsSubscriptionChange(ctx.req, res) + }) + }) + + it('should fail previewing "add seats" change with SubtotalLimitExceededError', function (ctx) { + return new Promise(resolve => { + ctx.req.body = { adding: 2 } + ctx.SubscriptionGroupHandler.promises.previewAddSeatsSubscriptionChange = + sinon.stub().throws(new ctx.Errors.SubtotalLimitExceededError()) + + const res = { + status: statusCode => { + statusCode.should.equal(422) + + return { + json: data => { + data.should.deep.equal({ + code: 'subtotal_limit_exceeded', + adding: ctx.req.body.adding, + }) + resolve() + }, + } + }, + } + + ctx.Controller.previewAddSeatsSubscriptionChange(ctx.req, res) + }) + }) + }) + + describe('createAddSeatsSubscriptionChange', function () { + it('should apply "add seats" change', function (ctx) { + return new Promise(resolve => { + ctx.req.body = { adding: 2 } + + const res = { + json: data => { + ctx.SubscriptionGroupHandler.promises.createAddSeatsSubscriptionChange + .calledWith(ctx.req.session.user._id, ctx.req.body.adding) + .should.equal(true) + data.should.deep.equal(ctx.createSubscriptionChangeData) + resolve() + }, + } + + ctx.Controller.createAddSeatsSubscriptionChange(ctx.req, res) + }) + }) + + it('should fail applying "add seats" change', function (ctx) { + return new Promise(resolve => { + ctx.SubscriptionGroupHandler.promises.createAddSeatsSubscriptionChange = + sinon.stub().rejects() + + const res = { + status: statusCode => { + statusCode.should.equal(500) + + return { + end: () => { + resolve() + }, + } + }, + } + + ctx.Controller.createAddSeatsSubscriptionChange(ctx.req, res) + }) + }) + + it('should fail applying "add seats" change with SubtotalLimitExceededError', function (ctx) { + return new Promise(resolve => { + ctx.req.body = { adding: 2 } + ctx.SubscriptionGroupHandler.promises.createAddSeatsSubscriptionChange = + sinon.stub().throws(new ctx.Errors.SubtotalLimitExceededError()) + + const res = { + status: statusCode => { + statusCode.should.equal(422) + + return { + json: data => { + data.should.deep.equal({ + code: 'subtotal_limit_exceeded', + adding: ctx.req.body.adding, + }) + resolve() + }, + } + }, + } + + ctx.Controller.createAddSeatsSubscriptionChange(ctx.req, res) + }) + }) + }) + + describe('submitForm', function () { + it('should build and pass the request body to the sales submit handler', function (ctx) { + return new Promise(resolve => { + const adding = 100 + const poNumber = 'PO123456' + ctx.req.body = { adding, poNumber } + + const res = { + sendStatus: code => { + ctx.SubscriptionGroupHandler.promises.updateSubscriptionPaymentTerms( + ctx.adminUserId, + ctx.recurlySubscription, + poNumber + ) + ctx.Modules.promises.hooks.fire + .calledWith('sendSupportRequest', { + email: ctx.user.email, + subject: 'Sales Contact Form', + message: + '\n' + + '**Overleaf Sales Contact Form:**\n' + + '\n' + + '**Subject:** Self-Serve Group User Increase Request\n' + + '\n' + + `**Estimated Number of Users:** ${adding}\n` + + '\n' + + `**PO Number:** ${poNumber}\n` + + '\n' + + `**Message:** This email has been generated on behalf of user with email **${ctx.user.email}** to request an increase in the total number of users for their subscription.`, + inbox: 'sales', + }) + .should.equal(true) + sinon.assert.calledOnce(ctx.Modules.promises.hooks.fire) + code.should.equal(204) + resolve() + }, + } + ctx.Controller.submitForm(ctx.req, res, resolve) + }) + }) + }) + + describe('subscriptionUpgradePage', function () { + it('should render "subscription upgrade" page', function (ctx) { + return new Promise(resolve => { + const olSubscription = { membersLimit: 1, teamName: 'test team' } + ctx.SubscriptionModel.Subscription.findOne = () => { + return { + exec: () => olSubscription, + } + } + + const res = { + render: (page, data) => { + ctx.SubscriptionGroupHandler.promises.getGroupPlanUpgradePreview + .calledWith(ctx.req.session.user._id) + .should.equal(true) + page.should.equal('subscriptions/upgrade-group-subscription-react') + data.totalLicenses.should.equal(olSubscription.membersLimit) + data.groupName.should.equal(olSubscription.teamName) + data.changePreview.should.equal(ctx.previewSubscriptionChangeData) + resolve() + }, + } + + ctx.Controller.subscriptionUpgradePage(ctx.req, res) + }) + }) + + it('should redirect if failed to generate preview', function (ctx) { + return new Promise(resolve => { + ctx.SubscriptionGroupHandler.promises.getGroupPlanUpgradePreview = sinon + .stub() + .rejects() + + const res = { + redirect: url => { + url.should.equal('/user/subscription') + resolve() + }, + } + + ctx.Controller.subscriptionUpgradePage(ctx.req, res) + }) + }) + + it('should redirect to missing billing information page when billing information is missing', function (ctx) { + return new Promise(resolve => { + ctx.SubscriptionGroupHandler.promises.getGroupPlanUpgradePreview = sinon + .stub() + .throws(new ctx.Errors.MissingBillingInfoError()) + + const res = { + redirect: url => { + url.should.equal( + '/user/subscription/group/missing-billing-information' + ) + resolve() + }, + } + + ctx.Controller.subscriptionUpgradePage(ctx.req, res) + }) + }) + + it('should redirect to manually collected subscription error page when collection method is manual', function (ctx) { + return new Promise(resolve => { + ctx.SubscriptionGroupHandler.promises.getGroupPlanUpgradePreview = sinon + .stub() + .throws(new ctx.Errors.ManuallyCollectedError()) + + const res = { + redirect: url => { + url.should.equal( + '/user/subscription/group/manually-collected-subscription' + ) + resolve() + }, + } + + ctx.Controller.subscriptionUpgradePage(ctx.req, res) + }) + }) + + it('should redirect to subtotal limit exceeded page', function (ctx) { + return new Promise(resolve => { + ctx.SubscriptionGroupHandler.promises.getGroupPlanUpgradePreview = sinon + .stub() + .throws(new ctx.Errors.SubtotalLimitExceededError()) + + const res = { + redirect: url => { + url.should.equal('/user/subscription/group/subtotal-limit-exceeded') + resolve() + }, + } + + ctx.Controller.subscriptionUpgradePage(ctx.req, res) + }) + }) + }) + + describe('upgradeSubscription', function () { + it('should send 200 response', function (ctx) { + return new Promise(resolve => { + ctx.SubscriptionGroupHandler.promises.upgradeGroupPlan = sinon + .stub() + .resolves() + + const res = { + sendStatus: code => { + code.should.equal(200) + resolve() + }, + } + + ctx.Controller.upgradeSubscription(ctx.req, res) + }) + }) + + it('should send 500 response', function (ctx) { + return new Promise(resolve => { + ctx.SubscriptionGroupHandler.promises.upgradeGroupPlan = sinon + .stub() + .rejects() + + const res = { + sendStatus: code => { + code.should.equal(500) + resolve() + }, + } + + ctx.Controller.upgradeSubscription(ctx.req, res) + }) + }) + }) +}) diff --git a/services/web/test/unit/src/Subscription/SubscriptionGroupControllerTests.mjs b/services/web/test/unit/src/Subscription/SubscriptionGroupControllerTests.mjs deleted file mode 100644 index 4376e752e7..0000000000 --- a/services/web/test/unit/src/Subscription/SubscriptionGroupControllerTests.mjs +++ /dev/null @@ -1,766 +0,0 @@ -import esmock from 'esmock' -import sinon from 'sinon' - -const modulePath = - '../../../../app/src/Features/Subscription/SubscriptionGroupController' - -describe('SubscriptionGroupController', function () { - beforeEach(async function () { - this.user = { _id: '!@312431', email: 'user@email.com' } - this.adminUserId = '123jlkj' - this.subscriptionId = '123434325412' - this.user_email = 'bob@gmail.com' - this.req = { - session: { - user: { - _id: this.adminUserId, - email: this.user_email, - }, - }, - params: { - subscriptionId: this.subscriptionId, - }, - query: {}, - } - - this.subscription = { - _id: this.subscriptionId, - teamName: 'Cool group', - groupPlan: true, - membersLimit: 5, - } - - this.plan = { - canUseFlexibleLicensing: true, - } - - this.recurlySubscription = { - get isCollectionMethodManual() { - return true - }, - } - - this.previewSubscriptionChangeData = { - change: {}, - currency: 'USD', - } - - this.createSubscriptionChangeData = { adding: 1 } - - this.paymentMethod = { cardType: 'Visa', lastFour: '1111' } - - this.SubscriptionGroupHandler = { - promises: { - removeUserFromGroup: sinon.stub().resolves(), - getUsersGroupSubscriptionDetails: sinon.stub().resolves({ - subscription: this.subscription, - plan: this.plan, - recurlySubscription: this.recurlySubscription, - }), - previewAddSeatsSubscriptionChange: sinon - .stub() - .resolves(this.previewSubscriptionChangeData), - createAddSeatsSubscriptionChange: sinon - .stub() - .resolves(this.createSubscriptionChangeData), - ensureFlexibleLicensingEnabled: sinon.stub().resolves(), - ensureSubscriptionIsActive: sinon.stub().resolves(), - ensureSubscriptionCollectionMethodIsNotManual: sinon.stub().resolves(), - ensureSubscriptionHasNoPendingChanges: sinon.stub().resolves(), - ensureSubscriptionHasNoPastDueInvoice: sinon.stub().resolves(), - getGroupPlanUpgradePreview: sinon - .stub() - .resolves(this.previewSubscriptionChangeData), - checkBillingInfoExistence: sinon.stub().resolves(this.paymentMethod), - updateSubscriptionPaymentTerms: sinon.stub().resolves(), - }, - } - - this.SubscriptionLocator = { - promises: { - getSubscription: sinon.stub().resolves(this.subscription), - }, - } - - this.SessionManager = { - getLoggedInUserId(session) { - return session.user._id - }, - getSessionUser(session) { - return session.user - }, - } - - this.UserAuditLogHandler = { - promises: { - addEntry: sinon.stub().resolves(), - }, - } - - this.Modules = { - promises: { - hooks: { - fire: sinon.stub().resolves(), - }, - }, - } - - this.SplitTestHandler = { - promises: { - getAssignment: sinon.stub().resolves({ variant: 'enabled' }), - }, - } - - this.UserGetter = { - promises: { - getUserEmail: sinon.stub().resolves(this.user.email), - }, - } - - this.paymentMethod = { cardType: 'Visa', lastFour: '1111' } - - this.RecurlyClient = { - promises: { - getPaymentMethod: sinon.stub().resolves(this.paymentMethod), - }, - } - - this.SubscriptionController = {} - - this.SubscriptionModel = { Subscription: {} } - - this.PlansHelper = { - isProfessionalGroupPlan: sinon.stub().returns(false), - } - - this.Errors = { - MissingBillingInfoError: class extends Error {}, - ManuallyCollectedError: class extends Error {}, - PendingChangeError: class extends Error {}, - InactiveError: class extends Error {}, - SubtotalLimitExceededError: class extends Error {}, - HasPastDueInvoiceError: class extends Error {}, - } - - this.Controller = await esmock.strict(modulePath, { - '../../../../app/src/Features/Subscription/SubscriptionGroupHandler': - this.SubscriptionGroupHandler, - '../../../../app/src/Features/Subscription/SubscriptionLocator': - this.SubscriptionLocator, - '../../../../app/src/Features/Authentication/SessionManager': - this.SessionManager, - '../../../../app/src/Features/User/UserAuditLogHandler': - this.UserAuditLogHandler, - '../../../../app/src/infrastructure/Modules': this.Modules, - '../../../../app/src/Features/SplitTests/SplitTestHandler': - this.SplitTestHandler, - '../../../../app/src/Features/User/UserGetter': this.UserGetter, - '../../../../app/src/Features/Errors/ErrorController': - (this.ErrorController = { - notFound: sinon.stub(), - }), - '../../../../app/src/Features/Subscription/SubscriptionController': - this.SubscriptionController, - '../../../../app/src/Features/Subscription/RecurlyClient': - this.RecurlyClient, - '../../../../app/src/Features/Subscription/PlansHelper': this.PlansHelper, - '../../../../app/src/Features/Subscription/Errors': this.Errors, - '../../../../app/src/models/Subscription': this.SubscriptionModel, - '@overleaf/logger': { - err: sinon.stub(), - error: sinon.stub(), - warn: sinon.stub(), - log: sinon.stub(), - debug: sinon.stub(), - }, - }) - }) - - describe('removeUserFromGroup', function () { - it('should use the subscription id for the logged in user and take the user id from the params', function (done) { - const userIdToRemove = '31231' - this.req.params = { user_id: userIdToRemove } - this.req.entity = this.subscription - - const res = { - sendStatus: () => { - this.SubscriptionGroupHandler.promises.removeUserFromGroup - .calledWith(this.subscriptionId, userIdToRemove) - .should.equal(true) - done() - }, - } - this.Controller.removeUserFromGroup(this.req, res, done) - }) - - it('should log that the user has been removed', function (done) { - const userIdToRemove = '31231' - this.req.params = { user_id: userIdToRemove } - this.req.entity = this.subscription - - const res = { - sendStatus: () => { - sinon.assert.calledWith( - this.UserAuditLogHandler.promises.addEntry, - userIdToRemove, - 'remove-from-group-subscription', - this.adminUserId, - this.req.ip, - { subscriptionId: this.subscriptionId } - ) - done() - }, - } - this.Controller.removeUserFromGroup(this.req, res, done) - }) - - it('should call the group SSO hooks with group SSO enabled', function (done) { - const userIdToRemove = '31231' - this.req.params = { user_id: userIdToRemove } - this.req.entity = this.subscription - this.Modules.promises.hooks.fire - .withArgs('hasGroupSSOEnabled', this.subscription) - .resolves([true]) - - const res = { - sendStatus: () => { - this.Modules.promises.hooks.fire - .calledWith('hasGroupSSOEnabled', this.subscription) - .should.equal(true) - this.Modules.promises.hooks.fire - .calledWith( - 'unlinkUserFromGroupSSO', - userIdToRemove, - this.subscriptionId - ) - .should.equal(true) - sinon.assert.calledTwice(this.Modules.promises.hooks.fire) - done() - }, - } - this.Controller.removeUserFromGroup(this.req, res, done) - }) - - it('should call the group SSO hooks with group SSO disabled', function (done) { - const userIdToRemove = '31231' - this.req.params = { user_id: userIdToRemove } - this.req.entity = this.subscription - this.Modules.promises.hooks.fire - .withArgs('hasGroupSSOEnabled', this.subscription) - .resolves([false]) - - const res = { - sendStatus: () => { - this.Modules.promises.hooks.fire - .calledWith('hasGroupSSOEnabled', this.subscription) - .should.equal(true) - sinon.assert.calledOnce(this.Modules.promises.hooks.fire) - done() - }, - } - this.Controller.removeUserFromGroup(this.req, res, done) - }) - }) - - describe('removeSelfFromGroup', function () { - it('gets subscription and remove user', function (done) { - this.req.query = { subscriptionId: this.subscriptionId } - const memberUserIdToremove = 123456789 - this.req.session.user._id = memberUserIdToremove - - const res = { - sendStatus: () => { - sinon.assert.calledWith( - this.SubscriptionLocator.promises.getSubscription, - this.subscriptionId - ) - sinon.assert.calledWith( - this.SubscriptionGroupHandler.promises.removeUserFromGroup, - this.subscriptionId, - memberUserIdToremove - ) - done() - }, - } - this.Controller.removeSelfFromGroup(this.req, res, done) - }) - - it('should log that the user has left the subscription', function (done) { - this.req.query = { subscriptionId: this.subscriptionId } - const memberUserIdToremove = '123456789' - this.req.session.user._id = memberUserIdToremove - - const res = { - sendStatus: () => { - sinon.assert.calledWith( - this.UserAuditLogHandler.promises.addEntry, - memberUserIdToremove, - 'remove-from-group-subscription', - memberUserIdToremove, - this.req.ip, - { subscriptionId: this.subscriptionId } - ) - done() - }, - } - this.Controller.removeSelfFromGroup(this.req, res, done) - }) - - it('should call the group SSO hooks with group SSO enabled', function (done) { - this.req.query = { subscriptionId: this.subscriptionId } - const memberUserIdToremove = '123456789' - this.req.session.user._id = memberUserIdToremove - - this.Modules.promises.hooks.fire - .withArgs('hasGroupSSOEnabled', this.subscription) - .resolves([true]) - - const res = { - sendStatus: () => { - this.Modules.promises.hooks.fire - .calledWith('hasGroupSSOEnabled', this.subscription) - .should.equal(true) - this.Modules.promises.hooks.fire - .calledWith( - 'unlinkUserFromGroupSSO', - memberUserIdToremove, - this.subscriptionId - ) - .should.equal(true) - sinon.assert.calledTwice(this.Modules.promises.hooks.fire) - done() - }, - } - this.Controller.removeSelfFromGroup(this.req, res, done) - }) - - it('should call the group SSO hooks with group SSO disabled', function (done) { - const userIdToRemove = '31231' - this.req.session.user._id = userIdToRemove - this.req.params = { user_id: userIdToRemove } - this.req.entity = this.subscription - this.Modules.promises.hooks.fire - .withArgs('hasGroupSSOEnabled', this.subscription) - .resolves([false]) - - const res = { - sendStatus: () => { - this.Modules.promises.hooks.fire - .calledWith('hasGroupSSOEnabled', this.subscription) - .should.equal(true) - sinon.assert.calledOnce(this.Modules.promises.hooks.fire) - done() - }, - } - this.Controller.removeSelfFromGroup(this.req, res, done) - }) - }) - - describe('addSeatsToGroupSubscription', function () { - it('should render the "add seats" page', function (done) { - const res = { - render: (page, props) => { - this.SubscriptionGroupHandler.promises.getUsersGroupSubscriptionDetails - .calledWith(this.req.session.user._id) - .should.equal(true) - this.SubscriptionGroupHandler.promises.ensureFlexibleLicensingEnabled - .calledWith(this.plan) - .should.equal(true) - this.SubscriptionGroupHandler.promises.ensureSubscriptionHasNoPendingChanges - .calledWith(this.recurlySubscription) - .should.equal(true) - this.SubscriptionGroupHandler.promises.ensureSubscriptionIsActive - .calledWith(this.subscription) - .should.equal(true) - this.SubscriptionGroupHandler.promises.ensureSubscriptionHasNoPastDueInvoice - .calledWith(this.subscription) - .should.equal(true) - this.SubscriptionGroupHandler.promises.checkBillingInfoExistence - .calledWith(this.recurlySubscription, this.adminUserId) - .should.equal(true) - page.should.equal('subscriptions/add-seats') - props.subscriptionId.should.equal(this.subscriptionId) - props.groupName.should.equal(this.subscription.teamName) - props.totalLicenses.should.equal(this.subscription.membersLimit) - props.isProfessional.should.equal(false) - props.isCollectionMethodManual.should.equal(true) - done() - }, - } - - this.Controller.addSeatsToGroupSubscription(this.req, res) - }) - - it('should redirect to subscription page when getting subscription details fails', function (done) { - this.SubscriptionGroupHandler.promises.getUsersGroupSubscriptionDetails = - sinon.stub().rejects() - - const res = { - redirect: url => { - url.should.equal('/user/subscription') - done() - }, - } - - this.Controller.addSeatsToGroupSubscription(this.req, res) - }) - - it('should redirect to subscription page when flexible licensing is not enabled', function (done) { - this.SubscriptionGroupHandler.promises.ensureFlexibleLicensingEnabled = - sinon.stub().rejects() - - const res = { - redirect: url => { - url.should.equal('/user/subscription') - done() - }, - } - - this.Controller.addSeatsToGroupSubscription(this.req, res) - }) - - it('should redirect to missing billing information page when billing information is missing', function (done) { - this.SubscriptionGroupHandler.promises.checkBillingInfoExistence = sinon - .stub() - .throws(new this.Errors.MissingBillingInfoError()) - - const res = { - redirect: url => { - url.should.equal( - '/user/subscription/group/missing-billing-information' - ) - done() - }, - } - - this.Controller.addSeatsToGroupSubscription(this.req, res) - }) - - it('should redirect to subscription page when there is a pending change', function (done) { - this.SubscriptionGroupHandler.promises.ensureSubscriptionHasNoPendingChanges = - sinon.stub().throws(new this.Errors.PendingChangeError()) - - const res = { - redirect: url => { - url.should.equal('/user/subscription') - done() - }, - } - - this.Controller.addSeatsToGroupSubscription(this.req, res) - }) - - it('should redirect to subscription page when subscription is not active', function (done) { - this.SubscriptionGroupHandler.promises.ensureSubscriptionIsActive = sinon - .stub() - .rejects() - - const res = { - redirect: url => { - url.should.equal('/user/subscription') - done() - }, - } - - this.Controller.addSeatsToGroupSubscription(this.req, res) - }) - - it('should redirect to subscription page when subscription has pending invoice', function (done) { - this.SubscriptionGroupHandler.promises.ensureSubscriptionHasNoPastDueInvoice = - sinon.stub().rejects() - - const res = { - redirect: url => { - url.should.equal('/user/subscription') - done() - }, - } - - this.Controller.addSeatsToGroupSubscription(this.req, res) - }) - }) - - describe('previewAddSeatsSubscriptionChange', function () { - it('should preview "add seats" change', function (done) { - this.req.body = { adding: 2 } - - const res = { - json: data => { - this.SubscriptionGroupHandler.promises.previewAddSeatsSubscriptionChange - .calledWith(this.req.session.user._id, this.req.body.adding) - .should.equal(true) - data.should.deep.equal(this.previewSubscriptionChangeData) - done() - }, - } - - this.Controller.previewAddSeatsSubscriptionChange(this.req, res) - }) - - it('should fail previewing "add seats" change', function (done) { - this.SubscriptionGroupHandler.promises.previewAddSeatsSubscriptionChange = - sinon.stub().rejects() - - const res = { - status: statusCode => { - statusCode.should.equal(500) - - return { - end: () => { - done() - }, - } - }, - } - - this.Controller.previewAddSeatsSubscriptionChange(this.req, res) - }) - - it('should fail previewing "add seats" change with SubtotalLimitExceededError', function (done) { - this.req.body = { adding: 2 } - this.SubscriptionGroupHandler.promises.previewAddSeatsSubscriptionChange = - sinon.stub().throws(new this.Errors.SubtotalLimitExceededError()) - - const res = { - status: statusCode => { - statusCode.should.equal(422) - - return { - json: data => { - data.should.deep.equal({ - code: 'subtotal_limit_exceeded', - adding: this.req.body.adding, - }) - done() - }, - } - }, - } - - this.Controller.previewAddSeatsSubscriptionChange(this.req, res) - }) - }) - - describe('createAddSeatsSubscriptionChange', function () { - it('should apply "add seats" change', function (done) { - this.req.body = { adding: 2 } - - const res = { - json: data => { - this.SubscriptionGroupHandler.promises.createAddSeatsSubscriptionChange - .calledWith(this.req.session.user._id, this.req.body.adding) - .should.equal(true) - data.should.deep.equal(this.createSubscriptionChangeData) - done() - }, - } - - this.Controller.createAddSeatsSubscriptionChange(this.req, res) - }) - - it('should fail applying "add seats" change', function (done) { - this.SubscriptionGroupHandler.promises.createAddSeatsSubscriptionChange = - sinon.stub().rejects() - - const res = { - status: statusCode => { - statusCode.should.equal(500) - - return { - end: () => { - done() - }, - } - }, - } - - this.Controller.createAddSeatsSubscriptionChange(this.req, res) - }) - - it('should fail applying "add seats" change with SubtotalLimitExceededError', function (done) { - this.req.body = { adding: 2 } - this.SubscriptionGroupHandler.promises.createAddSeatsSubscriptionChange = - sinon.stub().throws(new this.Errors.SubtotalLimitExceededError()) - - const res = { - status: statusCode => { - statusCode.should.equal(422) - - return { - json: data => { - data.should.deep.equal({ - code: 'subtotal_limit_exceeded', - adding: this.req.body.adding, - }) - done() - }, - } - }, - } - - this.Controller.createAddSeatsSubscriptionChange(this.req, res) - }) - }) - - describe('submitForm', function () { - it('should build and pass the request body to the sales submit handler', function (done) { - const adding = 100 - const poNumber = 'PO123456' - this.req.body = { adding, poNumber } - - const res = { - sendStatus: code => { - this.SubscriptionGroupHandler.promises.updateSubscriptionPaymentTerms( - this.adminUserId, - this.recurlySubscription, - poNumber - ) - this.Modules.promises.hooks.fire - .calledWith('sendSupportRequest', { - email: this.user.email, - subject: 'Sales Contact Form', - message: - '\n' + - '**Overleaf Sales Contact Form:**\n' + - '\n' + - '**Subject:** Self-Serve Group User Increase Request\n' + - '\n' + - `**Estimated Number of Users:** ${adding}\n` + - '\n' + - `**PO Number:** ${poNumber}\n` + - '\n' + - `**Message:** This email has been generated on behalf of user with email **${this.user.email}** to request an increase in the total number of users for their subscription.`, - inbox: 'sales', - }) - .should.equal(true) - sinon.assert.calledOnce(this.Modules.promises.hooks.fire) - code.should.equal(204) - done() - }, - } - this.Controller.submitForm(this.req, res, done) - }) - }) - - describe('subscriptionUpgradePage', function () { - it('should render "subscription upgrade" page', function (done) { - const olSubscription = { membersLimit: 1, teamName: 'test team' } - this.SubscriptionModel.Subscription.findOne = () => { - return { - exec: () => olSubscription, - } - } - - const res = { - render: (page, data) => { - this.SubscriptionGroupHandler.promises.getGroupPlanUpgradePreview - .calledWith(this.req.session.user._id) - .should.equal(true) - page.should.equal('subscriptions/upgrade-group-subscription-react') - data.totalLicenses.should.equal(olSubscription.membersLimit) - data.groupName.should.equal(olSubscription.teamName) - data.changePreview.should.equal(this.previewSubscriptionChangeData) - done() - }, - } - - this.Controller.subscriptionUpgradePage(this.req, res) - }) - - it('should redirect if failed to generate preview', function (done) { - this.SubscriptionGroupHandler.promises.getGroupPlanUpgradePreview = sinon - .stub() - .rejects() - - const res = { - redirect: url => { - url.should.equal('/user/subscription') - done() - }, - } - - this.Controller.subscriptionUpgradePage(this.req, res) - }) - - it('should redirect to missing billing information page when billing information is missing', function (done) { - this.SubscriptionGroupHandler.promises.getGroupPlanUpgradePreview = sinon - .stub() - .throws(new this.Errors.MissingBillingInfoError()) - - const res = { - redirect: url => { - url.should.equal( - '/user/subscription/group/missing-billing-information' - ) - done() - }, - } - - this.Controller.subscriptionUpgradePage(this.req, res) - }) - - it('should redirect to manually collected subscription error page when collection method is manual', function (done) { - this.SubscriptionGroupHandler.promises.getGroupPlanUpgradePreview = sinon - .stub() - .throws(new this.Errors.ManuallyCollectedError()) - - const res = { - redirect: url => { - url.should.equal( - '/user/subscription/group/manually-collected-subscription' - ) - done() - }, - } - - this.Controller.subscriptionUpgradePage(this.req, res) - }) - - it('should redirect to subtotal limit exceeded page', function (done) { - this.SubscriptionGroupHandler.promises.getGroupPlanUpgradePreview = sinon - .stub() - .throws(new this.Errors.SubtotalLimitExceededError()) - - const res = { - redirect: url => { - url.should.equal('/user/subscription/group/subtotal-limit-exceeded') - done() - }, - } - - this.Controller.subscriptionUpgradePage(this.req, res) - }) - }) - - describe('upgradeSubscription', function () { - it('should send 200 response', function (done) { - this.SubscriptionGroupHandler.promises.upgradeGroupPlan = sinon - .stub() - .resolves() - - const res = { - sendStatus: code => { - code.should.equal(200) - done() - }, - } - - this.Controller.upgradeSubscription(this.req, res) - }) - - it('should send 500 response', function (done) { - this.SubscriptionGroupHandler.promises.upgradeGroupPlan = sinon - .stub() - .rejects() - - const res = { - sendStatus: code => { - code.should.equal(500) - done() - }, - } - - this.Controller.upgradeSubscription(this.req, res) - }) - }) -}) diff --git a/services/web/test/unit/src/Subscription/SubscriptionGroupHandlerTests.js b/services/web/test/unit/src/Subscription/SubscriptionGroupHandlerTests.js index 0c47db3e14..1c314458da 100644 --- a/services/web/test/unit/src/Subscription/SubscriptionGroupHandlerTests.js +++ b/services/web/test/unit/src/Subscription/SubscriptionGroupHandlerTests.js @@ -233,13 +233,15 @@ describe('SubscriptionGroupHandler', function () { describe('removeUserFromGroup', function () { it('should call the subscription updater to remove the user', async function () { + const auditLog = { ipAddress: '0:0:0:0', initiatorId: this.user._id } await this.Handler.promises.removeUserFromGroup( this.adminUser_id, - this.user._id + this.user._id, + auditLog ) this.SubscriptionUpdater.promises.removeUserFromGroup - .calledWith(this.adminUser_id, this.user._id) + .calledWith(this.adminUser_id, this.user._id, auditLog) .should.equal(true) }) }) @@ -1149,7 +1151,9 @@ describe('SubscriptionGroupHandler', function () { expect( this.SubscriptionUpdater.promises.removeUserFromGroup - ).to.have.been.calledWith(this.subscription._id, members[2]._id) + ).to.have.been.calledWith(this.subscription._id, members[2]._id, { + initiatorId: inviterId, + }) expect( this.TeamInvitesHandler.promises.createInvite.callCount diff --git a/services/web/test/unit/src/Subscription/SubscriptionHandlerTests.js b/services/web/test/unit/src/Subscription/SubscriptionHandlerTests.js index ed5ed2f6d1..7bf23defd2 100644 --- a/services/web/test/unit/src/Subscription/SubscriptionHandlerTests.js +++ b/services/web/test/unit/src/Subscription/SubscriptionHandlerTests.js @@ -5,6 +5,7 @@ const { expect } = chai const { PaymentProviderSubscription, } = require('../../../../app/src/Features/Subscription/PaymentProviderEntities') +const SubscriptionHelper = require('../../../../app/src/Features/Subscription/SubscriptionHelper') const MODULE_PATH = '../../../../app/src/Features/Subscription/SubscriptionHandler' @@ -149,6 +150,7 @@ describe('SubscriptionHandler', function () { '../../models/User': { User: this.User, }, + './SubscriptionHelper': SubscriptionHelper, './SubscriptionUpdater': this.SubscriptionUpdater, './SubscriptionLocator': this.SubscriptionLocator, './LimitationsManager': this.LimitationsManager, diff --git a/services/web/test/unit/src/Subscription/SubscriptionHelperTests.js b/services/web/test/unit/src/Subscription/SubscriptionHelperTests.js index a6e1ffa089..fb667ca451 100644 --- a/services/web/test/unit/src/Subscription/SubscriptionHelperTests.js +++ b/services/web/test/unit/src/Subscription/SubscriptionHelperTests.js @@ -102,38 +102,65 @@ describe('SubscriptionHelper', function () { }) describe('shouldPlanChangeAtTermEnd', function () { - it('should return true if the new plan is less expensive', function () { + it('should return false if isInTrial is true', function () { + const isInTrial = true const changeAtTermEnd = this.SubscriptionHelper.shouldPlanChangeAtTermEnd( plans.expensive, - plans.cheaper + plans.cheaper, + isInTrial + ) + expect(changeAtTermEnd).to.be.false + }) + + it('should return true if the new plan is less expensive', function () { + const isInTrial = false + const changeAtTermEnd = this.SubscriptionHelper.shouldPlanChangeAtTermEnd( + plans.expensive, + plans.cheaper, + isInTrial ) expect(changeAtTermEnd).to.be.true }) + it('should return false if the new plan is more exepensive', function () { + const isInTrial = false const changeAtTermEnd = this.SubscriptionHelper.shouldPlanChangeAtTermEnd( plans.cheaper, - plans.expensive + plans.expensive, + isInTrial ) expect(changeAtTermEnd).to.be.false }) + it('should return false if the new plan is the same price', function () { + const isInTrial = false + const changeAtTermEnd = this.SubscriptionHelper.shouldPlanChangeAtTermEnd( plans.cheaper, - plans.alsoCheap + plans.alsoCheap, + isInTrial ) expect(changeAtTermEnd).to.be.false }) + it('should return false if the change is from an individual plan to a more expensive group plan', function () { + const isInTrial = false + const changeAtTermEnd = this.SubscriptionHelper.shouldPlanChangeAtTermEnd( plans.expensive, - plans.expensiveGroup + plans.expensiveGroup, + isInTrial ) expect(changeAtTermEnd).to.be.false }) + it('should return true if the change is from an individual plan to a cheaper group plan', function () { + const isInTrial = false + const changeAtTermEnd = this.SubscriptionHelper.shouldPlanChangeAtTermEnd( plans.expensive, - plans.cheapGroup + plans.cheapGroup, + isInTrial ) expect(changeAtTermEnd).to.be.true }) @@ -267,4 +294,229 @@ describe('SubscriptionHelper', function () { }) }) }) + + describe('isPaidSubscription', function () { + it('should return true for a subscription with a recurly subscription id', function () { + const result = this.SubscriptionHelper.isPaidSubscription({ + recurlySubscription_id: 'some-id', + }) + expect(result).to.be.true + }) + + it('should return true for a subscription with a stripe subscription id', function () { + const result = this.SubscriptionHelper.isPaidSubscription({ + paymentProvider: { subscriptionId: 'some-id' }, + }) + expect(result).to.be.true + }) + + it('should return false for a free subscription', function () { + const result = this.SubscriptionHelper.isPaidSubscription({}) + expect(result).to.be.false + }) + + it('should return false for a missing subscription', function () { + const result = this.SubscriptionHelper.isPaidSubscription() + expect(result).to.be.false + }) + }) + + describe('isIndividualActivePaidSubscription', function () { + it('should return true for an active recurly subscription', function () { + const result = this.SubscriptionHelper.isIndividualActivePaidSubscription( + { + groupPlan: false, + recurlyStatus: { state: 'active' }, + recurlySubscription_id: 'some-id', + } + ) + expect(result).to.be.true + }) + + it('should return true for an active stripe subscription', function () { + const result = this.SubscriptionHelper.isIndividualActivePaidSubscription( + { + groupPlan: false, + paymentProvider: { subscriptionId: 'sub_123', state: 'active' }, + } + ) + expect(result).to.be.true + }) + + it('should return false for a canceled recurly subscription', function () { + const result = this.SubscriptionHelper.isIndividualActivePaidSubscription( + { + groupPlan: false, + recurlyStatus: { state: 'canceled' }, + recurlySubscription_id: 'some-id', + } + ) + expect(result).to.be.false + }) + + it('should return false for a canceled stripe subscription', function () { + const result = this.SubscriptionHelper.isIndividualActivePaidSubscription( + { + groupPlan: false, + paymentProvider: { state: 'canceled', subscriptionId: 'sub_123' }, + } + ) + expect(result).to.be.false + }) + + it('should return false for a group plan subscription', function () { + const result = this.SubscriptionHelper.isIndividualActivePaidSubscription( + { + groupPlan: true, + recurlyStatus: { state: 'active' }, + recurlySubscription_id: 'some-id', + } + ) + expect(result).to.be.false + }) + + it('should return false for a free subscription', function () { + const result = this.SubscriptionHelper.isIndividualActivePaidSubscription( + {} + ) + expect(result).to.be.false + }) + + it('should return false for a subscription with an empty string for recurlySubscription_id', function () { + const result = this.SubscriptionHelper.isIndividualActivePaidSubscription( + { + groupPlan: false, + recurlySubscription_id: '', + recurlyStatus: { state: 'active' }, + } + ) + expect(result).to.be.false + }) + + it('should return false for a subscription with an empty string for paymentProvider.subscriptionId', function () { + const result = this.SubscriptionHelper.isIndividualActivePaidSubscription( + { + groupPlan: false, + paymentProvider: { state: 'active', subscriptionId: '' }, + } + ) + expect(result).to.be.false + }) + + it('should return false for a missing subscription', function () { + const result = this.SubscriptionHelper.isPaidSubscription() + expect(result).to.be.false + }) + }) + + describe('getPaymentProviderSubscriptionId', function () { + it('should return the recurly subscription id if it exists', function () { + const result = this.SubscriptionHelper.getPaymentProviderSubscriptionId({ + recurlySubscription_id: 'some-id', + }) + expect(result).to.equal('some-id') + }) + + it('should return the payment provider subscription id if it exists', function () { + const result = this.SubscriptionHelper.getPaymentProviderSubscriptionId({ + paymentProvider: { subscriptionId: 'sub_123' }, + }) + expect(result).to.equal('sub_123') + }) + + it('should return null if no subscription id exists', function () { + const result = this.SubscriptionHelper.getPaymentProviderSubscriptionId( + {} + ) + expect(result).to.be.null + }) + }) + + describe('getPaidSubscriptionState', function () { + it('should return the recurly state if it exists', function () { + const result = this.SubscriptionHelper.getPaidSubscriptionState({ + recurlyStatus: { state: 'active' }, + }) + expect(result).to.equal('active') + }) + + it('should return the payment provider state if it exists', function () { + const result = this.SubscriptionHelper.getPaidSubscriptionState({ + paymentProvider: { state: 'active' }, + }) + expect(result).to.equal('active') + }) + + it('should return null if no state exists', function () { + const result = this.SubscriptionHelper.getPaidSubscriptionState({}) + expect(result).to.be.null + }) + }) + + describe('getSubscriptionTrialStartedAt', function () { + it('should return the recurly trial start date if it exists', function () { + const result = this.SubscriptionHelper.getSubscriptionTrialStartedAt({ + recurlySubscription_id: 'some-id', + recurlyStatus: { trialStartedAt: new Date('2023-01-01') }, + }) + expect(result).to.deep.equal(new Date('2023-01-01')) + }) + + it('should return the payment provider trial start date if it exists', function () { + const result = this.SubscriptionHelper.getSubscriptionTrialStartedAt({ + recurlyStatus: {}, + paymentProvider: { trialStartedAt: new Date('2023-01-01') }, + }) + expect(result).to.deep.equal(new Date('2023-01-01')) + }) + + it('should return undefined if no trial start date exists', function () { + const result = this.SubscriptionHelper.getSubscriptionTrialStartedAt({}) + expect(result).to.be.undefined + }) + }) + + describe('getSubscriptionTrialEndsAt', function () { + it('should return the recurly trial end date if it exists', function () { + const result = this.SubscriptionHelper.getSubscriptionTrialEndsAt({ + recurlySubscription_id: 'some-id', + recurlyStatus: { trialEndsAt: new Date('2023-01-01') }, + }) + expect(result).to.deep.equal(new Date('2023-01-01')) + }) + + it('should return the payment provider trial end date if it exists', function () { + const result = this.SubscriptionHelper.getSubscriptionTrialEndsAt({ + recurlyStatus: {}, + paymentProvider: { trialEndsAt: new Date('2023-01-01') }, + }) + expect(result).to.deep.equal(new Date('2023-01-01')) + }) + + it('should return undefined if no trial end date exists', function () { + const result = this.SubscriptionHelper.getSubscriptionTrialEndsAt({}) + expect(result).to.be.undefined + }) + }) + + describe('isInTrial', function () { + it('should return false if trialEndsAt is null', function () { + const result = this.SubscriptionHelper.isInTrial(null) + expect(result).to.be.false + }) + + it('should return false if trialEndsAt is before now', function () { + const tenDaysAgo = new Date() + tenDaysAgo.setDate(tenDaysAgo.getDate() - 10) + const result = this.SubscriptionHelper.isInTrial(tenDaysAgo) + expect(result).to.be.false + }) + + it('should return true if trialEndsAt is after now', function () { + const tenDaysFromNow = new Date() + tenDaysFromNow.setDate(tenDaysFromNow.getDate() + 10) + const result = this.SubscriptionHelper.isInTrial(tenDaysFromNow) + expect(result).to.be.true + }) + }) }) diff --git a/services/web/test/unit/src/Subscription/SubscriptionLocatorTests.js b/services/web/test/unit/src/Subscription/SubscriptionLocatorTests.js index f66eda5b7f..e8202424fc 100644 --- a/services/web/test/unit/src/Subscription/SubscriptionLocatorTests.js +++ b/services/web/test/unit/src/Subscription/SubscriptionLocatorTests.js @@ -13,6 +13,11 @@ describe('Subscription Locator Tests', function () { exec: sinon.stub().resolves(), }), find: sinon.stub().returns({ + populate: sinon.stub().returns({ + populate: sinon.stub().returns({ + exec: sinon.stub().resolves([]), + }), + }), exec: sinon.stub().resolves(), }), } @@ -77,4 +82,110 @@ describe('Subscription Locator Tests', function () { subscription.should.equal(this.subscription) }) }) + + describe('getUserSubscriptionStatus', function () { + it('should return no active personal or group subscription when no user is passed', async function () { + const subscriptionStatus = + await this.SubscriptionLocator.promises.getUserSubscriptionStatus( + undefined + ) + expect(subscriptionStatus).to.deep.equal({ + personal: false, + group: false, + }) + }) + + it('should return no active personal or group subscription when the user has no subscription', async function () { + const subscriptionStatus = + await this.SubscriptionLocator.promises.getUserSubscriptionStatus( + this.user._id + ) + expect(subscriptionStatus).to.deep.equal({ + personal: false, + group: false, + }) + }) + + it('should return active personal subscription', async function () { + this.Subscription.findOne.returns({ + exec: sinon.stub().resolves({ + recurlyStatus: { + state: 'active', + }, + }), + }) + const subscriptionStatus = + await this.SubscriptionLocator.promises.getUserSubscriptionStatus( + this.user._id + ) + expect(subscriptionStatus).to.deep.equal({ personal: true, group: false }) + }) + + it('should return active group subscription when member of a group plan', async function () { + this.Subscription.find.returns({ + populate: sinon.stub().returns({ + populate: sinon.stub().returns({ + exec: sinon.stub().resolves([ + { + recurlyStatus: { + state: 'active', + }, + groupPlan: true, + }, + ]), + }), + }), + }) + const subscriptionStatus = + await this.SubscriptionLocator.promises.getUserSubscriptionStatus( + this.user._id + ) + expect(subscriptionStatus).to.deep.equal({ personal: false, group: true }) + }) + + it('should return active group subscription when owner of a group plan', async function () { + this.Subscription.findOne.returns({ + exec: sinon.stub().resolves({ + recurlyStatus: { + state: 'active', + }, + groupPlan: true, + }), + }) + const subscriptionStatus = + await this.SubscriptionLocator.promises.getUserSubscriptionStatus( + this.user._id + ) + expect(subscriptionStatus).to.deep.equal({ personal: false, group: true }) + }) + + it('should return active personal and group subscription when has personal subscription and member of a group', async function () { + this.Subscription.find.returns({ + populate: sinon.stub().returns({ + populate: sinon.stub().returns({ + exec: sinon.stub().resolves([ + { + recurlyStatus: { + state: 'active', + }, + groupPlan: true, + }, + ]), + }), + }), + }) + this.Subscription.findOne.returns({ + exec: sinon.stub().resolves({ + recurlyStatus: { + state: 'active', + }, + }), + }) + const subscriptionStatus = + await this.SubscriptionLocator.promises.getUserSubscriptionStatus( + this.user._id + ) + expect(subscriptionStatus).to.deep.equal({ personal: true, group: true }) + }) + }) }) diff --git a/services/web/test/unit/src/Subscription/SubscriptionUpdaterTests.js b/services/web/test/unit/src/Subscription/SubscriptionUpdaterTests.js index 09644bc7b1..d272ad51e4 100644 --- a/services/web/test/unit/src/Subscription/SubscriptionUpdaterTests.js +++ b/services/web/test/unit/src/Subscription/SubscriptionUpdaterTests.js @@ -70,6 +70,7 @@ describe('SubscriptionUpdater', function () { .stub() .returns({ exec: sinon.stub().resolves() }) this.SubscriptionModel.findOne = sinon.stub().resolves() + this.SubscriptionModel.findById = sinon.stub().resolves() this.SubscriptionModel.updateMany = sinon .stub() .returns({ exec: sinon.stub().resolves() }) @@ -120,6 +121,18 @@ describe('SubscriptionUpdater', function () { }, }, ], + mongo: { + options: { + appname: 'web', + maxPoolSize: 100, + serverSelectionTimeoutMS: 60000, + socketTimeoutMS: 60000, + monitorCommands: true, + family: 4, + }, + url: 'mongodb://mongo/test-overleaf', + hasSecondaries: false, + }, } this.UserFeaturesUpdater = { @@ -161,6 +174,12 @@ describe('SubscriptionUpdater', function () { }, } + this.UserUpdater = { + promises: { + updateUser: sinon.stub().resolves(), + }, + } + this.SubscriptionUpdater = SandboxedModule.require(modulePath, { requires: { '../../models/Subscription': { @@ -181,6 +200,14 @@ describe('SubscriptionUpdater', function () { }), '../../infrastructure/Features': this.Features, '../User/UserAuditLogHandler': this.UserAuditLogHandler, + '../User/UserUpdater': this.UserUpdater, + '../../infrastructure/Modules': (this.Modules = { + promises: { + hooks: { + fire: sinon.stub().resolves(), + }, + }, + }), }, }) }) @@ -486,6 +513,7 @@ describe('SubscriptionUpdater', function () { this.SubscriptionModel.updateOne .calledWith(searchOps, insertOperation) .should.equal(true) + expect(this.SubscriptionModel.updateOne.lastCall.args[2].session).to.exist sinon.assert.calledWith( this.AnalyticsManager.recordEventForUserInBackground, this.otherUserId, @@ -571,6 +599,24 @@ describe('SubscriptionUpdater', function () { } ) }) + + it('should add an entry to the group audit log when joining a group', async function () { + await this.SubscriptionUpdater.promises.addUserToGroup( + this.subscription._id, + this.otherUserId, + { ipAddress: '0:0:0:0', initiatorId: 'user123' } + ) + + expect(this.Modules.promises.hooks.fire).to.have.been.calledWith( + 'addGroupAuditLogEntry', + { + groupId: this.subscription._id, + initiatorId: 'user123', + ipAddress: '0:0:0:0', + operation: 'join-group', + } + ) + }) }) describe('removeUserFromGroup', function () { @@ -584,6 +630,9 @@ describe('SubscriptionUpdater', function () { }, ] this.SubscriptionModel.findOne.resolves(this.groupSubscription) + this.SubscriptionModel.findById = sinon + .stub() + .resolves(this.groupSubscription) this.SubscriptionLocator.promises.getMemberSubscriptions.resolves( this.fakeSubscriptions ) @@ -600,6 +649,28 @@ describe('SubscriptionUpdater', function () { .should.equal(true) }) + it('should remove user enrollment if the group is managed', async function () { + this.SubscriptionModel.findById.resolves({ + ...this.groupSubscription, + managedUsersEnabled: true, + }) + await this.SubscriptionUpdater.promises.removeUserFromGroup( + this.groupSubscription._id, + this.otherUserId + ) + this.UserUpdater.promises.updateUser + .calledWith( + { _id: this.otherUserId }, + { + $unset: { + 'enrollment.managedBy': 1, + 'enrollment.enrolledAt': 1, + }, + } + ) + .should.equal(true) + }) + it('should send a group-subscription-left event', async function () { await this.SubscriptionUpdater.promises.removeUserFromGroup( this.groupSubscription._id, diff --git a/services/web/test/unit/src/Subscription/SubscriptionViewModelBuilderTests.js b/services/web/test/unit/src/Subscription/SubscriptionViewModelBuilderTests.js index 0f666b888a..86eb51070e 100644 --- a/services/web/test/unit/src/Subscription/SubscriptionViewModelBuilderTests.js +++ b/services/web/test/unit/src/Subscription/SubscriptionViewModelBuilderTests.js @@ -7,6 +7,7 @@ const { PaymentProviderSubscriptionAddOn, PaymentProviderSubscriptionChange, } = require('../../../../app/src/Features/Subscription/PaymentProviderEntities') +const SubscriptionHelper = require('../../../../app/src/Features/Subscription/SubscriptionHelper') const modulePath = '../../../../app/src/Features/Subscription/SubscriptionViewModelBuilder' @@ -159,13 +160,14 @@ describe('SubscriptionViewModelBuilder', function () { './SubscriptionUpdater': this.SubscriptionUpdater, './PlansLocator': this.PlansLocator, '../../infrastructure/Modules': (this.Modules = { + promises: { hooks: { fire: sinon.stub().resolves([]) } }, hooks: { fire: sinon.stub().yields(null, []), }, }), './V1SubscriptionManager': {}, '../Publishers/PublishersGetter': this.PublishersGetter, - './SubscriptionHelper': {}, + './SubscriptionHelper': SubscriptionHelper, }, }) @@ -180,10 +182,10 @@ describe('SubscriptionViewModelBuilder', function () { .returns(this.commonsPlan) }) - describe('getBestSubscription', function () { + describe('getUsersSubscriptionDetails', function () { it('should return a free plan when user has no subscription or affiliation', async function () { - const usersBestSubscription = - await this.SubscriptionViewModelBuilder.promises.getBestSubscription( + const { bestSubscription: usersBestSubscription } = + await this.SubscriptionViewModelBuilder.promises.getUsersSubscriptionDetails( this.user ) assert.deepEqual(usersBestSubscription, { type: 'free' }) @@ -195,8 +197,8 @@ describe('SubscriptionViewModelBuilder', function () { .withArgs(this.user) .resolves(this.individualCustomSubscription) - const usersBestSubscription = - await this.SubscriptionViewModelBuilder.promises.getBestSubscription( + const { bestSubscription: usersBestSubscription } = + await this.SubscriptionViewModelBuilder.promises.getUsersSubscriptionDetails( this.user ) @@ -213,8 +215,8 @@ describe('SubscriptionViewModelBuilder', function () { .withArgs(this.user) .resolves(this.individualSubscription) - const usersBestSubscription = - await this.SubscriptionViewModelBuilder.promises.getBestSubscription( + const { bestSubscription: usersBestSubscription } = + await this.SubscriptionViewModelBuilder.promises.getUsersSubscriptionDetails( this.user ) @@ -234,8 +236,8 @@ describe('SubscriptionViewModelBuilder', function () { .withArgs(this.user) .resolves(this.individualSubscription) - const usersBestSubscription = - await this.SubscriptionViewModelBuilder.promises.getBestSubscription( + const { bestSubscription: usersBestSubscription } = + await this.SubscriptionViewModelBuilder.promises.getUsersSubscriptionDetails( this.user ) @@ -255,8 +257,8 @@ describe('SubscriptionViewModelBuilder', function () { .withArgs(this.user) .resolves(this.individualSubscription) - const usersBestSubscription = - await this.SubscriptionViewModelBuilder.promises.getBestSubscription( + const { bestSubscription: usersBestSubscription } = + await this.SubscriptionViewModelBuilder.promises.getUsersSubscriptionDetails( this.user ) @@ -268,8 +270,8 @@ describe('SubscriptionViewModelBuilder', function () { }) }) - it('should update subscription if recurly data is missing', async function () { - this.individualSubscriptionWithoutRecurly = { + it('should update subscription if recurly payment state is missing', async function () { + this.individualSubscriptionWithoutPaymentState = { planCode: this.planCode, plan: this.plan, recurlySubscription_id: this.recurlySubscription_id, @@ -280,37 +282,104 @@ describe('SubscriptionViewModelBuilder', function () { this.SubscriptionLocator.promises.getUsersSubscription .withArgs(this.user) .onCall(0) - .resolves(this.individualSubscriptionWithoutRecurly) + .resolves(this.individualSubscriptionWithoutPaymentState) .withArgs(this.user) .onCall(1) .resolves(this.individualSubscription) - this.RecurlyWrapper.promises.getSubscription - .withArgs(this.individualSubscription.recurlySubscription_id, { - includeAccount: true, - }) - .resolves(this.paymentRecord) + const payment = { + subscription: this.paymentRecord, + account: new PaymentProviderAccount({}), + coupons: [], + } - const usersBestSubscription = - await this.SubscriptionViewModelBuilder.promises.getBestSubscription( + this.Modules.promises.hooks.fire + .withArgs( + 'getPaymentFromRecordPromise', + this.individualSubscriptionWithoutPaymentState + ) + .resolves([payment]) + this.Modules.promises.hooks.fire + .withArgs( + 'syncSubscription', + payment, + this.individualSubscriptionWithoutPaymentState + ) + .resolves([]) + + const { bestSubscription: usersBestSubscription } = + await this.SubscriptionViewModelBuilder.promises.getUsersSubscriptionDetails( this.user ) - sinon.assert.calledWith( - this.RecurlyWrapper.promises.getSubscription, - this.individualSubscriptionWithoutRecurly.recurlySubscription_id, - { includeAccount: true } - ) - sinon.assert.calledWith( - this.SubscriptionUpdater.promises.updateSubscriptionFromRecurly, - this.paymentRecord, - this.individualSubscriptionWithoutRecurly - ) assert.deepEqual(usersBestSubscription, { type: 'individual', subscription: this.individualSubscription, plan: this.plan, remainingTrialDays: -1, }) + assert.isTrue( + this.Modules.promises.hooks.fire.withArgs( + 'getPaymentFromRecordPromise', + this.individualSubscriptionWithoutPaymentState + ).calledOnce + ) + }) + + it('should update subscription if stripe payment state is missing', async function () { + this.individualSubscriptionWithoutPaymentState = { + planCode: this.planCode, + plan: this.plan, + paymentProvider: { + subscriptionId: this.recurlySubscription_id, + }, + } + this.paymentRecord = { + state: 'active', + } + this.SubscriptionLocator.promises.getUsersSubscription + .withArgs(this.user) + .onCall(0) + .resolves(this.individualSubscriptionWithoutPaymentState) + .withArgs(this.user) + .onCall(1) + .resolves(this.individualSubscription) + const payment = { + subscription: this.paymentRecord, + account: new PaymentProviderAccount({}), + coupons: [], + } + + this.Modules.promises.hooks.fire + .withArgs( + 'getPaymentFromRecordPromise', + this.individualSubscriptionWithoutPaymentState + ) + .resolves([payment]) + this.Modules.promises.hooks.fire + .withArgs( + 'syncSubscription', + payment, + this.individualSubscriptionWithoutPaymentState + ) + .resolves([]) + + const { bestSubscription: usersBestSubscription } = + await this.SubscriptionViewModelBuilder.promises.getUsersSubscriptionDetails( + this.user + ) + + assert.deepEqual(usersBestSubscription, { + type: 'individual', + subscription: this.individualSubscription, + plan: this.plan, + remainingTrialDays: -1, + }) + assert.isTrue( + this.Modules.promises.hooks.fire.withArgs( + 'getPaymentFromRecordPromise', + this.individualSubscriptionWithoutPaymentState + ).calledOnce + ) }) }) @@ -318,8 +387,8 @@ describe('SubscriptionViewModelBuilder', function () { this.SubscriptionLocator.promises.getMemberSubscriptions .withArgs(this.user) .resolves([this.groupSubscription]) - const usersBestSubscription = - await this.SubscriptionViewModelBuilder.promises.getBestSubscription( + const { bestSubscription: usersBestSubscription } = + await this.SubscriptionViewModelBuilder.promises.getUsersSubscriptionDetails( this.user ) assert.deepEqual(usersBestSubscription, { @@ -336,8 +405,8 @@ describe('SubscriptionViewModelBuilder', function () { .resolves([ Object.assign({}, this.groupSubscription, { teamName: 'test team' }), ]) - const usersBestSubscription = - await this.SubscriptionViewModelBuilder.promises.getBestSubscription( + const { bestSubscription: usersBestSubscription } = + await this.SubscriptionViewModelBuilder.promises.getUsersSubscriptionDetails( this.user ) assert.deepEqual(usersBestSubscription, { @@ -353,8 +422,8 @@ describe('SubscriptionViewModelBuilder', function () { .withArgs(this.user._id) .resolves([this.commonsSubscription]) - const usersBestSubscription = - await this.SubscriptionViewModelBuilder.promises.getBestSubscription( + const { bestSubscription: usersBestSubscription } = + await this.SubscriptionViewModelBuilder.promises.getUsersSubscriptionDetails( this.user ) @@ -385,8 +454,8 @@ describe('SubscriptionViewModelBuilder', function () { compileTimeout: 60, } - const usersBestSubscription = - await this.SubscriptionViewModelBuilder.promises.getBestSubscription( + const { bestSubscription: usersBestSubscription } = + await this.SubscriptionViewModelBuilder.promises.getUsersSubscriptionDetails( this.user ) @@ -410,8 +479,8 @@ describe('SubscriptionViewModelBuilder', function () { compileTimeout: 60, } - const usersBestSubscription = - await this.SubscriptionViewModelBuilder.promises.getBestSubscription( + const { bestSubscription: usersBestSubscription } = + await this.SubscriptionViewModelBuilder.promises.getUsersSubscriptionDetails( this.user ) @@ -440,8 +509,8 @@ describe('SubscriptionViewModelBuilder', function () { compileTimeout: 240, } - const usersBestSubscription = - await this.SubscriptionViewModelBuilder.promises.getBestSubscription( + const { bestSubscription: usersBestSubscription } = + await this.SubscriptionViewModelBuilder.promises.getUsersSubscriptionDetails( this.user ) @@ -469,8 +538,8 @@ describe('SubscriptionViewModelBuilder', function () { compileTimeout: 240, } - const usersBestSubscription = - await this.SubscriptionViewModelBuilder.promises.getBestSubscription( + const { bestSubscription: usersBestSubscription } = + await this.SubscriptionViewModelBuilder.promises.getUsersSubscriptionDetails( this.user ) @@ -499,8 +568,8 @@ describe('SubscriptionViewModelBuilder', function () { compileTimeout: 240, } - const usersBestSubscription = - await this.SubscriptionViewModelBuilder.promises.getBestSubscription( + const { bestSubscription: usersBestSubscription } = + await this.SubscriptionViewModelBuilder.promises.getUsersSubscriptionDetails( this.user ) @@ -589,7 +658,7 @@ describe('SubscriptionViewModelBuilder', function () { describe('isEligibleForGroupPlan', function () { it('is false for Stripe subscriptions', async function () { - this.paymentRecord.service = 'stripe' + this.paymentRecord.service = 'stripe-us' const result = await this.SubscriptionViewModelBuilder.promises.buildUsersSubscriptionViewModel( this.user @@ -627,7 +696,7 @@ describe('SubscriptionViewModelBuilder', function () { describe('isEligibleForPause', function () { it('is false for Stripe subscriptions', async function () { - this.paymentRecord.service = 'stripe' + this.paymentRecord.service = 'stripe-us' const result = await this.SubscriptionViewModelBuilder.promises.buildUsersSubscriptionViewModel( this.user @@ -777,7 +846,7 @@ describe('SubscriptionViewModelBuilder', function () { this.paymentRecord.pausePeriodStart = null this.paymentRecord.remainingPauseCycles = null this.paymentRecord.trialPeriodEnd = null - this.paymentRecord.service = 'stripe' + this.paymentRecord.service = 'stripe-us' const result = await this.SubscriptionViewModelBuilder.promises.buildUsersSubscriptionViewModel( this.user @@ -847,7 +916,7 @@ describe('SubscriptionViewModelBuilder', function () { }) it('does not add a billing details link for a Stripe subscription', async function () { - this.paymentRecord.service = 'stripe' + this.paymentRecord.service = 'stripe-us' this.Modules.hooks.fire .withArgs('getPaymentFromRecord', this.individualSubscription) .yields(null, [ diff --git a/services/web/test/unit/src/Subscription/TeamInvitesController.test.mjs b/services/web/test/unit/src/Subscription/TeamInvitesController.test.mjs new file mode 100644 index 0000000000..be5fe26670 --- /dev/null +++ b/services/web/test/unit/src/Subscription/TeamInvitesController.test.mjs @@ -0,0 +1,277 @@ +import { expect, vi } from 'vitest' +import sinon from 'sinon' + +const modulePath = + '../../../../app/src/Features/Subscription/TeamInvitesController' + +describe('TeamInvitesController', function () { + beforeEach(async function (ctx) { + ctx.user = { _id: '!@312431', email: 'user@email.com' } + ctx.adminUserId = '123jlkj' + ctx.subscriptionId = '123434325412' + ctx.user_email = 'bob@gmail.com' + ctx.req = { + session: { + user: { + _id: ctx.adminUserId, + email: ctx.user_email, + }, + }, + params: {}, + query: {}, + ip: '0.0.0.0', + } + + ctx.subscription = { + _id: ctx.subscriptionId, + } + + ctx.TeamInvitesHandler = { + promises: { + acceptInvite: sinon.stub().resolves(ctx.subscription), + getInvite: sinon.stub().resolves({ + invite: { + email: ctx.user.email, + token: 'token123', + inviterName: ctx.user_email, + }, + subscription: ctx.subscription, + }), + }, + } + + ctx.SubscriptionLocator = { + promises: { + hasSSOEnabled: sinon.stub().resolves(true), + getUsersSubscription: sinon.stub().resolves(), + }, + } + ctx.ErrorController = { notFound: sinon.stub() } + + ctx.SessionManager = { + getLoggedInUserId(session) { + return session.user?._id + }, + getSessionUser(session) { + return session.user + }, + } + + ctx.UserAuditLogHandler = { + promises: { + addEntry: sinon.stub().resolves(), + }, + } + ctx.UserGetter = { + promises: { + getUser: sinon.stub().resolves(ctx.user), + getUserByMainEmail: sinon.stub().resolves(ctx.user), + getUserByAnyEmail: sinon.stub().resolves(ctx.user), + }, + } + ctx.EmailHandler = { + sendDeferredEmail: sinon.stub().resolves(), + } + + ctx.RateLimiter = { + RateLimiter: class {}, + } + + vi.doMock( + '../../../../app/src/Features/Subscription/TeamInvitesHandler', + () => ({ + default: ctx.TeamInvitesHandler, + }) + ) + + vi.doMock( + '../../../../app/src/Features/Authentication/SessionManager', + () => ({ + default: ctx.SessionManager, + }) + ) + + vi.doMock( + '../../../../app/src/Features/Subscription/SubscriptionLocator', + () => ({ + default: ctx.SubscriptionLocator, + }) + ) + + vi.doMock('../../../../app/src/Features/User/UserAuditLogHandler', () => ({ + default: ctx.UserAuditLogHandler, + })) + + vi.doMock('../../../../app/src/Features/Errors/ErrorController', () => ({ + default: ctx.ErrorController, + })) + + vi.doMock('../../../../app/src/Features/User/UserGetter', () => ({ + default: ctx.UserGetter, + })) + + vi.doMock('../../../../app/src/Features/Email/EmailHandler', () => ({ + default: ctx.EmailHandler, + })) + + vi.doMock( + '../../../../app/src/infrastructure/RateLimiter', + () => ctx.RateLimiter + ) + + vi.doMock('../../../../app/src/infrastructure/Modules', () => ({ + default: (ctx.Modules = { + promises: { + hooks: { + fire: sinon.stub().resolves([]), + }, + }, + }), + })) + + vi.doMock( + '../../../../app/src/Features/SplitTests/SplitTestHandler', + () => ({ + default: (ctx.SplitTestHandler = { + promises: { + getAssignment: sinon.stub().resolves({}), + }, + }), + }) + ) + + ctx.Controller = (await import(modulePath)).default + }) + + describe('acceptInvite', function () { + it('should add an audit log entry', function (ctx) { + return new Promise(resolve => { + ctx.req.params.token = 'foo' + ctx.req.session.user = ctx.user + const res = { + json: () => { + sinon.assert.calledWith( + ctx.UserAuditLogHandler.promises.addEntry, + ctx.user._id, + 'accept-group-invitation', + ctx.user._id, + ctx.req.ip, + { subscriptionId: ctx.subscriptionId } + ) + resolve() + }, + } + ctx.Controller.acceptInvite(ctx.req, res) + }) + }) + }) + + describe('viewInvite', function () { + const req = { + params: { token: 'token123' }, + query: {}, + session: { + user: { _id: 'user123' }, + }, + } + + describe('hasIndividualPaidSubscription', function () { + it('is true for personal subscription', function (ctx) { + return new Promise(resolve => { + ctx.SubscriptionLocator.promises.getUsersSubscription.resolves({ + recurlySubscription_id: 'subscription123', + groupPlan: false, + }) + const res = { + render: (template, data) => { + expect(data.hasIndividualPaidSubscription).to.be.true + resolve() + }, + } + ctx.Controller.viewInvite(req, res) + }) + }) + + it('is true for group subscriptions', function (ctx) { + return new Promise(resolve => { + ctx.SubscriptionLocator.promises.getUsersSubscription.resolves({ + recurlySubscription_id: 'subscription123', + groupPlan: true, + }) + const res = { + render: (template, data) => { + expect(data.hasIndividualPaidSubscription).to.be.false + resolve() + }, + } + ctx.Controller.viewInvite(req, res) + }) + }) + + it('is false for canceled subscriptions', function (ctx) { + return new Promise(resolve => { + ctx.SubscriptionLocator.promises.getUsersSubscription.resolves({ + recurlySubscription_id: 'subscription123', + groupPlan: false, + recurlyStatus: { + state: 'canceled', + }, + }) + const res = { + render: (template, data) => { + expect(data.hasIndividualPaidSubscription).to.be.false + resolve() + }, + } + ctx.Controller.viewInvite(req, res) + }) + }) + }) + + describe('when user is logged out', function () { + it('renders logged out invite page', function (ctx) { + return new Promise(resolve => { + const res = { + render: (template, data) => { + expect(template).to.equal('subscriptions/team/invite_logged_out') + expect(data.groupSSOActive).to.be.undefined + resolve() + }, + } + ctx.Controller.viewInvite( + { params: { token: 'token123' }, session: {} }, + res + ) + }) + }) + + it('includes groupSSOActive flag when the group has SSO enabled', function (ctx) { + return new Promise(resolve => { + ctx.Modules.promises.hooks.fire = sinon.stub().resolves([true]) + const res = { + render: (template, data) => { + expect(data.groupSSOActive).to.be.true + resolve() + }, + } + ctx.Controller.viewInvite( + { params: { token: 'token123' }, session: {} }, + res + ) + }) + }) + }) + + it('renders the view', function (ctx) { + return new Promise(resolve => { + const res = { + render: template => { + expect(template).to.equal('subscriptions/team/invite') + resolve() + }, + } + ctx.Controller.viewInvite(req, res) + }) + }) + }) +}) diff --git a/services/web/test/unit/src/Subscription/TeamInvitesControllerTests.mjs b/services/web/test/unit/src/Subscription/TeamInvitesControllerTests.mjs deleted file mode 100644 index 3a1e8c3462..0000000000 --- a/services/web/test/unit/src/Subscription/TeamInvitesControllerTests.mjs +++ /dev/null @@ -1,227 +0,0 @@ -import esmock from 'esmock' -import sinon from 'sinon' -import { expect } from 'chai' -const modulePath = - '../../../../app/src/Features/Subscription/TeamInvitesController' - -describe('TeamInvitesController', function () { - beforeEach(async function () { - this.user = { _id: '!@312431', email: 'user@email.com' } - this.adminUserId = '123jlkj' - this.subscriptionId = '123434325412' - this.user_email = 'bob@gmail.com' - this.req = { - session: { - user: { - _id: this.adminUserId, - email: this.user_email, - }, - }, - params: {}, - query: {}, - ip: '0.0.0.0', - } - - this.subscription = { - _id: this.subscriptionId, - } - - this.TeamInvitesHandler = { - promises: { - acceptInvite: sinon.stub().resolves(this.subscription), - getInvite: sinon.stub().resolves({ - invite: { - email: this.user.email, - token: 'token123', - inviterName: this.user_email, - }, - subscription: this.subscription, - }), - }, - } - - this.SubscriptionLocator = { - promises: { - hasSSOEnabled: sinon.stub().resolves(true), - getUsersSubscription: sinon.stub().resolves(), - }, - } - this.ErrorController = { notFound: sinon.stub() } - - this.SessionManager = { - getLoggedInUserId(session) { - return session.user?._id - }, - getSessionUser(session) { - return session.user - }, - } - - this.UserAuditLogHandler = { - promises: { - addEntry: sinon.stub().resolves(), - }, - } - this.UserGetter = { - promises: { - getUser: sinon.stub().resolves(this.user), - getUserByMainEmail: sinon.stub().resolves(this.user), - getUserByAnyEmail: sinon.stub().resolves(this.user), - }, - } - this.EmailHandler = { - sendDeferredEmail: sinon.stub().resolves(), - } - - this.RateLimiter = { - RateLimiter: class {}, - } - - this.Controller = await esmock.strict(modulePath, { - '../../../../app/src/Features/Subscription/TeamInvitesHandler': - this.TeamInvitesHandler, - '../../../../app/src/Features/Authentication/SessionManager': - this.SessionManager, - '../../../../app/src/Features/Subscription/SubscriptionLocator': - this.SubscriptionLocator, - '../../../../app/src/Features/User/UserAuditLogHandler': - this.UserAuditLogHandler, - '../../../../app/src/Features/Errors/ErrorController': - this.ErrorController, - '../../../../app/src/Features/User/UserGetter': this.UserGetter, - '../../../../app/src/Features/Email/EmailHandler': this.EmailHandler, - '../../../../app/src/infrastructure/RateLimiter': this.RateLimiter, - '../../../../app/src/infrastructure/Modules': (this.Modules = { - promises: { - hooks: { - fire: sinon.stub().resolves([]), - }, - }, - }), - '../../../../app/src/Features/SplitTests/SplitTestHandler': - (this.SplitTestHandler = { - promises: { - getAssignment: sinon.stub().resolves({}), - }, - }), - }) - }) - - describe('acceptInvite', function () { - it('should add an audit log entry', function (done) { - this.req.params.token = 'foo' - this.req.session.user = this.user - const res = { - json: () => { - sinon.assert.calledWith( - this.UserAuditLogHandler.promises.addEntry, - this.user._id, - 'accept-group-invitation', - this.user._id, - this.req.ip, - { subscriptionId: this.subscriptionId } - ) - done() - }, - } - this.Controller.acceptInvite(this.req, res) - }) - }) - - describe('viewInvite', function () { - const req = { - params: { token: 'token123' }, - query: {}, - session: { - user: { _id: 'user123' }, - }, - } - - describe('hasIndividualRecurlySubscription', function () { - it('is true for personal subscription', function (done) { - this.SubscriptionLocator.promises.getUsersSubscription.resolves({ - recurlySubscription_id: 'subscription123', - groupPlan: false, - }) - const res = { - render: (template, data) => { - expect(data.hasIndividualRecurlySubscription).to.be.true - done() - }, - } - this.Controller.viewInvite(req, res) - }) - - it('is true for group subscriptions', function (done) { - this.SubscriptionLocator.promises.getUsersSubscription.resolves({ - recurlySubscription_id: 'subscription123', - groupPlan: true, - }) - const res = { - render: (template, data) => { - expect(data.hasIndividualRecurlySubscription).to.be.false - done() - }, - } - this.Controller.viewInvite(req, res) - }) - - it('is false for canceled subscriptions', function (done) { - this.SubscriptionLocator.promises.getUsersSubscription.resolves({ - recurlySubscription_id: 'subscription123', - groupPlan: false, - recurlyStatus: { - state: 'canceled', - }, - }) - const res = { - render: (template, data) => { - expect(data.hasIndividualRecurlySubscription).to.be.false - done() - }, - } - this.Controller.viewInvite(req, res) - }) - }) - - describe('when user is logged out', function () { - it('renders logged out invite page', function (done) { - const res = { - render: (template, data) => { - expect(template).to.equal('subscriptions/team/invite_logged_out') - expect(data.groupSSOActive).to.be.undefined - done() - }, - } - this.Controller.viewInvite( - { params: { token: 'token123' }, session: {} }, - res - ) - }) - - it('includes groupSSOActive flag when the group has SSO enabled', function (done) { - this.Modules.promises.hooks.fire = sinon.stub().resolves([true]) - const res = { - render: (template, data) => { - expect(data.groupSSOActive).to.be.true - done() - }, - } - this.Controller.viewInvite( - { params: { token: 'token123' }, session: {} }, - res - ) - }) - }) - - it('renders the view', function (done) { - const res = { - render: template => { - expect(template).to.equal('subscriptions/team/invite') - done() - }, - } - this.Controller.viewInvite(req, res) - }) - }) -}) diff --git a/services/web/test/unit/src/Subscription/TeamInvitesHandlerTests.js b/services/web/test/unit/src/Subscription/TeamInvitesHandlerTests.js index fdd247bf96..b15232c822 100644 --- a/services/web/test/unit/src/Subscription/TeamInvitesHandlerTests.js +++ b/services/web/test/unit/src/Subscription/TeamInvitesHandlerTests.js @@ -29,6 +29,7 @@ describe('TeamInvitesHandler', function () { this.subscription = { id: '55153a8014829a865bbf700d', _id: new ObjectId('55153a8014829a865bbf700d'), + recurlySubscription_id: '1a2b3c4d5e6f7g', admin_id: this.manager._id, groupPlan: true, member_ids: [], @@ -54,6 +55,7 @@ describe('TeamInvitesHandler', function () { this.SubscriptionUpdater = { promises: { addUserToGroup: sinon.stub().resolves(), + deleteSubscription: sinon.stub().resolves(), }, } @@ -109,6 +111,12 @@ describe('TeamInvitesHandler', function () { this.Subscription.findOne.resolves(this.subscription) + this.RecurlyClient = { + promises: { + terminateSubscriptionByUuid: sinon.stub().resolves(), + }, + } + this.TeamInvitesHandler = SandboxedModule.require(modulePath, { requires: { 'mongodb-legacy': { ObjectId }, @@ -126,6 +134,7 @@ describe('TeamInvitesHandler', function () { '../../infrastructure/Modules': (this.Modules = { promises: { hooks: { fire: sinon.stub().resolves() } }, }), + './RecurlyClient': this.RecurlyClient, }, }) }) @@ -335,6 +344,8 @@ describe('TeamInvitesHandler', function () { email: 'tyrion@example.com', } + this.ipAddress = '127.0.0.1' + this.UserGetter.promises.getUserByAnyEmail .withArgs(this.user.email) .resolves(this.user) @@ -350,7 +361,8 @@ describe('TeamInvitesHandler', function () { it('adds the user to the team', async function () { await this.TeamInvitesHandler.promises.acceptInvite( 'dddddddd', - this.user.id + this.user.id, + this.ipAddress ) this.SubscriptionUpdater.promises.addUserToGroup .calledWith(this.subscription._id, this.user.id) @@ -360,7 +372,8 @@ describe('TeamInvitesHandler', function () { it('removes the invite from the subscription', async function () { await this.TeamInvitesHandler.promises.acceptInvite( 'dddddddd', - this.user.id + this.user.id, + this.ipAddress ) this.Subscription.updateOne .calledWith( @@ -375,7 +388,8 @@ describe('TeamInvitesHandler', function () { await this.TeamInvitesHandler.promises.acceptInvite( 'dddddddd', - this.user.id + this.user.id, + this.ipAddress ) sinon.assert.called( this.NotificationsBuilder.promises.groupInvitation( @@ -389,7 +403,8 @@ describe('TeamInvitesHandler', function () { it('should not schedule an SSO invite reminder', async function () { await this.TeamInvitesHandler.promises.acceptInvite( 'dddddddd', - this.user.id + this.user.id, + this.ipAddress ) sinon.assert.notCalled(this.Modules.promises.hooks.fire) }) @@ -401,7 +416,17 @@ describe('TeamInvitesHandler', function () { await this.TeamInvitesHandler.promises.acceptInvite( 'dddddddd', - this.user.id + this.user.id, + this.ipAddress + ) + sinon.assert.calledWith( + this.SubscriptionUpdater.promises.deleteSubscription, + this.subscription, + { id: this.user.id, ip: this.ipAddress } + ) + sinon.assert.calledWith( + this.RecurlyClient.promises.terminateSubscriptionByUuid, + this.subscription.recurlySubscription_id ) sinon.assert.calledWith( this.Modules.promises.hooks.fire, @@ -421,7 +446,8 @@ describe('TeamInvitesHandler', function () { await this.TeamInvitesHandler.promises.acceptInvite( 'dddddddd', - this.user.id + this.user.id, + this.ipAddress ) sinon.assert.calledWith( this.Modules.promises.hooks.fire, diff --git a/services/web/test/unit/src/Tags/TagsController.test.mjs b/services/web/test/unit/src/Tags/TagsController.test.mjs new file mode 100644 index 0000000000..c8cb739d0e --- /dev/null +++ b/services/web/test/unit/src/Tags/TagsController.test.mjs @@ -0,0 +1,315 @@ +import { assert, vi } from 'vitest' +import sinon from 'sinon' + +const modulePath = '../../../../app/src/Features/Tags/TagsController.mjs' + +describe('TagsController', function () { + const userId = '123nd3ijdks' + const projectId = '123njdskj9jlk' + + beforeEach(async function (ctx) { + ctx.TagsHandler = { + promises: { + addProjectToTag: sinon.stub().resolves(), + addProjectsToTag: sinon.stub().resolves(), + removeProjectFromTag: sinon.stub().resolves(), + removeProjectsFromTag: sinon.stub().resolves(), + deleteTag: sinon.stub().resolves(), + editTag: sinon.stub().resolves(), + renameTag: sinon.stub().resolves(), + createTag: sinon.stub().resolves(), + }, + } + ctx.SessionManager = { + getLoggedInUserId: session => { + return session.user._id + }, + } + + vi.doMock('../../../../app/src/Features/Tags/TagsHandler', () => ({ + default: ctx.TagsHandler, + })) + + vi.doMock( + '../../../../app/src/Features/Authentication/SessionManager', + () => ({ + default: ctx.SessionManager, + }) + ) + + ctx.TagsController = (await import(modulePath)).default + ctx.req = { + params: { + projectId, + }, + session: { + user: { + _id: userId, + }, + }, + body: {}, + } + + ctx.res = {} + ctx.res.status = sinon.stub().returns(ctx.res) + ctx.res.end = sinon.stub() + ctx.res.json = sinon.stub() + }) + + it('get all tags', function (ctx) { + return new Promise(resolve => { + const allTags = [{ name: 'tag', projects: ['123423', '423423'] }] + ctx.TagsHandler.promises.getAllTags = sinon.stub().resolves(allTags) + ctx.TagsController.getAllTags(ctx.req, { + json: body => { + body.should.equal(allTags) + sinon.assert.calledWith(ctx.TagsHandler.promises.getAllTags, userId) + resolve() + return { + end: () => {}, + } + }, + }) + }) + }) + + describe('create a tag', function (done) { + it('without a color', function (ctx) { + return new Promise(resolve => { + ctx.tag = { mock: 'tag' } + ctx.TagsHandler.promises.createTag = sinon.stub().resolves(ctx.tag) + ctx.req.session.user._id = ctx.userId = 'user-id-123' + ctx.req.body = { name: (ctx.tagName = 'tag-name') } + ctx.TagsController.createTag(ctx.req, { + json: () => { + sinon.assert.calledWith( + ctx.TagsHandler.promises.createTag, + ctx.userId, + ctx.tagName + ) + resolve() + return { + end: () => {}, + } + }, + }) + }) + }) + + it('with a color', function (ctx) { + return new Promise(resolve => { + ctx.tag = { mock: 'tag' } + ctx.TagsHandler.promises.createTag = sinon.stub().resolves(ctx.tag) + ctx.req.session.user._id = ctx.userId = 'user-id-123' + ctx.req.body = { + name: (ctx.tagName = 'tag-name'), + color: (ctx.color = '#123456'), + } + ctx.TagsController.createTag(ctx.req, { + json: () => { + sinon.assert.calledWith( + ctx.TagsHandler.promises.createTag, + ctx.userId, + ctx.tagName, + ctx.color + ) + resolve() + return { + end: () => {}, + } + }, + }) + }) + }) + }) + + it('delete a tag', function (ctx) { + return new Promise(resolve => { + ctx.req.params.tagId = ctx.tagId = 'tag-id-123' + ctx.req.session.user._id = ctx.userId = 'user-id-123' + ctx.TagsController.deleteTag(ctx.req, { + status: code => { + assert.equal(code, 204) + sinon.assert.calledWith( + ctx.TagsHandler.promises.deleteTag, + ctx.userId, + ctx.tagId + ) + resolve() + return { + end: () => {}, + } + }, + }) + }) + }) + + describe('edit a tag', function () { + beforeEach(function (ctx) { + ctx.req.params.tagId = ctx.tagId = 'tag-id-123' + ctx.req.session.user._id = ctx.userId = 'user-id-123' + }) + + it('with a name and no color', function (ctx) { + return new Promise(resolve => { + ctx.req.body = { + name: (ctx.tagName = 'new-name'), + } + ctx.TagsController.editTag(ctx.req, { + status: code => { + assert.equal(code, 204) + sinon.assert.calledWith( + ctx.TagsHandler.promises.editTag, + ctx.userId, + ctx.tagId, + ctx.tagName + ) + resolve() + return { + end: () => {}, + } + }, + }) + }) + }) + + it('with a name and color', function (ctx) { + return new Promise(resolve => { + ctx.req.body = { + name: (ctx.tagName = 'new-name'), + color: (ctx.color = '#FF0011'), + } + ctx.TagsController.editTag(ctx.req, { + status: code => { + assert.equal(code, 204) + sinon.assert.calledWith( + ctx.TagsHandler.promises.editTag, + ctx.userId, + ctx.tagId, + ctx.tagName, + ctx.color + ) + resolve() + return { + end: () => {}, + } + }, + }) + }) + }) + + it('without a name', function (ctx) { + return new Promise(resolve => { + ctx.req.body = { name: undefined } + ctx.TagsController.renameTag(ctx.req, { + status: code => { + assert.equal(code, 400) + sinon.assert.notCalled(ctx.TagsHandler.promises.renameTag) + resolve() + return { + end: () => {}, + } + }, + }) + }) + }) + }) + + it('add a project to a tag', function (ctx) { + return new Promise(resolve => { + ctx.req.params.tagId = ctx.tagId = 'tag-id-123' + ctx.req.params.projectId = ctx.projectId = 'project-id-123' + ctx.req.session.user._id = ctx.userId = 'user-id-123' + ctx.TagsController.addProjectToTag(ctx.req, { + status: code => { + assert.equal(code, 204) + sinon.assert.calledWith( + ctx.TagsHandler.promises.addProjectToTag, + ctx.userId, + ctx.tagId, + ctx.projectId + ) + resolve() + return { + end: () => {}, + } + }, + }) + }) + }) + + it('add projects to a tag', function (ctx) { + return new Promise(resolve => { + ctx.req.params.tagId = ctx.tagId = 'tag-id-123' + ctx.req.body.projectIds = ctx.projectIds = [ + 'project-id-123', + 'project-id-234', + ] + ctx.req.session.user._id = ctx.userId = 'user-id-123' + ctx.TagsController.addProjectsToTag(ctx.req, { + status: code => { + assert.equal(code, 204) + sinon.assert.calledWith( + ctx.TagsHandler.promises.addProjectsToTag, + ctx.userId, + ctx.tagId, + ctx.projectIds + ) + resolve() + return { + end: () => {}, + } + }, + }) + }) + }) + + it('remove a project from a tag', function (ctx) { + return new Promise(resolve => { + ctx.req.params.tagId = ctx.tagId = 'tag-id-123' + ctx.req.params.projectId = ctx.projectId = 'project-id-123' + ctx.req.session.user._id = ctx.userId = 'user-id-123' + ctx.TagsController.removeProjectFromTag(ctx.req, { + status: code => { + assert.equal(code, 204) + sinon.assert.calledWith( + ctx.TagsHandler.promises.removeProjectFromTag, + ctx.userId, + ctx.tagId, + ctx.projectId + ) + resolve() + return { + end: () => {}, + } + }, + }) + }) + }) + + it('remove projects from a tag', function (ctx) { + return new Promise(resolve => { + ctx.req.params.tagId = ctx.tagId = 'tag-id-123' + ctx.req.body.projectIds = ctx.projectIds = [ + 'project-id-123', + 'project-id-234', + ] + ctx.req.session.user._id = ctx.userId = 'user-id-123' + ctx.TagsController.removeProjectsFromTag(ctx.req, { + status: code => { + assert.equal(code, 204) + sinon.assert.calledWith( + ctx.TagsHandler.promises.removeProjectsFromTag, + ctx.userId, + ctx.tagId, + ctx.projectIds + ) + resolve() + return { + end: () => {}, + } + }, + }) + }) + }) +}) diff --git a/services/web/test/unit/src/Tags/TagsControllerTests.mjs b/services/web/test/unit/src/Tags/TagsControllerTests.mjs deleted file mode 100644 index 4474ba0d38..0000000000 --- a/services/web/test/unit/src/Tags/TagsControllerTests.mjs +++ /dev/null @@ -1,288 +0,0 @@ -import esmock from 'esmock' -import sinon from 'sinon' -import { assert } from 'chai' -const modulePath = new URL( - '../../../../app/src/Features/Tags/TagsController.mjs', - import.meta.url -).pathname - -describe('TagsController', function () { - const userId = '123nd3ijdks' - const projectId = '123njdskj9jlk' - - beforeEach(async function () { - this.TagsHandler = { - promises: { - addProjectToTag: sinon.stub().resolves(), - addProjectsToTag: sinon.stub().resolves(), - removeProjectFromTag: sinon.stub().resolves(), - removeProjectsFromTag: sinon.stub().resolves(), - deleteTag: sinon.stub().resolves(), - editTag: sinon.stub().resolves(), - renameTag: sinon.stub().resolves(), - createTag: sinon.stub().resolves(), - }, - } - this.SessionManager = { - getLoggedInUserId: session => { - return session.user._id - }, - } - this.TagsController = await esmock.strict(modulePath, { - '../../../../app/src/Features/Tags/TagsHandler': this.TagsHandler, - '../../../../app/src/Features/Authentication/SessionManager': - this.SessionManager, - }) - this.req = { - params: { - projectId, - }, - session: { - user: { - _id: userId, - }, - }, - body: {}, - } - - this.res = {} - this.res.status = sinon.stub().returns(this.res) - this.res.end = sinon.stub() - this.res.json = sinon.stub() - }) - - it('get all tags', function (done) { - const allTags = [{ name: 'tag', projects: ['123423', '423423'] }] - this.TagsHandler.promises.getAllTags = sinon.stub().resolves(allTags) - this.TagsController.getAllTags(this.req, { - json: body => { - body.should.equal(allTags) - sinon.assert.calledWith(this.TagsHandler.promises.getAllTags, userId) - done() - return { - end: () => {}, - } - }, - }) - }) - - describe('create a tag', function (done) { - it('without a color', function (done) { - this.tag = { mock: 'tag' } - this.TagsHandler.promises.createTag = sinon.stub().resolves(this.tag) - this.req.session.user._id = this.userId = 'user-id-123' - this.req.body = { name: (this.name = 'tag-name') } - this.TagsController.createTag(this.req, { - json: () => { - sinon.assert.calledWith( - this.TagsHandler.promises.createTag, - this.userId, - this.name - ) - done() - return { - end: () => {}, - } - }, - }) - }) - - it('with a color', function (done) { - this.tag = { mock: 'tag' } - this.TagsHandler.promises.createTag = sinon.stub().resolves(this.tag) - this.req.session.user._id = this.userId = 'user-id-123' - this.req.body = { - name: (this.name = 'tag-name'), - color: (this.color = '#123456'), - } - this.TagsController.createTag(this.req, { - json: () => { - sinon.assert.calledWith( - this.TagsHandler.promises.createTag, - this.userId, - this.name, - this.color - ) - done() - return { - end: () => {}, - } - }, - }) - }) - }) - - it('delete a tag', function (done) { - this.req.params.tagId = this.tagId = 'tag-id-123' - this.req.session.user._id = this.userId = 'user-id-123' - this.TagsController.deleteTag(this.req, { - status: code => { - assert.equal(code, 204) - sinon.assert.calledWith( - this.TagsHandler.promises.deleteTag, - this.userId, - this.tagId - ) - done() - return { - end: () => {}, - } - }, - }) - }) - - describe('edit a tag', function () { - beforeEach(function () { - this.req.params.tagId = this.tagId = 'tag-id-123' - this.req.session.user._id = this.userId = 'user-id-123' - }) - - it('with a name and no color', function (done) { - this.req.body = { - name: (this.name = 'new-name'), - } - this.TagsController.editTag(this.req, { - status: code => { - assert.equal(code, 204) - sinon.assert.calledWith( - this.TagsHandler.promises.editTag, - this.userId, - this.tagId, - this.name - ) - done() - return { - end: () => {}, - } - }, - }) - }) - - it('with a name and color', function (done) { - this.req.body = { - name: (this.name = 'new-name'), - color: (this.color = '#FF0011'), - } - this.TagsController.editTag(this.req, { - status: code => { - assert.equal(code, 204) - sinon.assert.calledWith( - this.TagsHandler.promises.editTag, - this.userId, - this.tagId, - this.name, - this.color - ) - done() - return { - end: () => {}, - } - }, - }) - }) - - it('without a name', function (done) { - this.req.body = { name: undefined } - this.TagsController.renameTag(this.req, { - status: code => { - assert.equal(code, 400) - sinon.assert.notCalled(this.TagsHandler.promises.renameTag) - done() - return { - end: () => {}, - } - }, - }) - }) - }) - - it('add a project to a tag', function (done) { - this.req.params.tagId = this.tagId = 'tag-id-123' - this.req.params.projectId = this.projectId = 'project-id-123' - this.req.session.user._id = this.userId = 'user-id-123' - this.TagsController.addProjectToTag(this.req, { - status: code => { - assert.equal(code, 204) - sinon.assert.calledWith( - this.TagsHandler.promises.addProjectToTag, - this.userId, - this.tagId, - this.projectId - ) - done() - return { - end: () => {}, - } - }, - }) - }) - - it('add projects to a tag', function (done) { - this.req.params.tagId = this.tagId = 'tag-id-123' - this.req.body.projectIds = this.projectIds = [ - 'project-id-123', - 'project-id-234', - ] - this.req.session.user._id = this.userId = 'user-id-123' - this.TagsController.addProjectsToTag(this.req, { - status: code => { - assert.equal(code, 204) - sinon.assert.calledWith( - this.TagsHandler.promises.addProjectsToTag, - this.userId, - this.tagId, - this.projectIds - ) - done() - return { - end: () => {}, - } - }, - }) - }) - - it('remove a project from a tag', function (done) { - this.req.params.tagId = this.tagId = 'tag-id-123' - this.req.params.projectId = this.projectId = 'project-id-123' - this.req.session.user._id = this.userId = 'user-id-123' - this.TagsController.removeProjectFromTag(this.req, { - status: code => { - assert.equal(code, 204) - sinon.assert.calledWith( - this.TagsHandler.promises.removeProjectFromTag, - this.userId, - this.tagId, - this.projectId - ) - done() - return { - end: () => {}, - } - }, - }) - }) - - it('remove projects from a tag', function (done) { - this.req.params.tagId = this.tagId = 'tag-id-123' - this.req.body.projectIds = this.projectIds = [ - 'project-id-123', - 'project-id-234', - ] - this.req.session.user._id = this.userId = 'user-id-123' - this.TagsController.removeProjectsFromTag(this.req, { - status: code => { - assert.equal(code, 204) - sinon.assert.calledWith( - this.TagsHandler.promises.removeProjectsFromTag, - this.userId, - this.tagId, - this.projectIds - ) - done() - return { - end: () => {}, - } - }, - }) - }) -}) diff --git a/services/web/test/unit/src/ThirdPartyDataStore/TpdsController.test.mjs b/services/web/test/unit/src/ThirdPartyDataStore/TpdsController.test.mjs new file mode 100644 index 0000000000..29daa00efc --- /dev/null +++ b/services/web/test/unit/src/ThirdPartyDataStore/TpdsController.test.mjs @@ -0,0 +1,568 @@ +import { expect, vi } from 'vitest' +import mongodb from 'mongodb-legacy' +import sinon from 'sinon' +import Errors from '../../../../app/src/Features/Errors/Errors.js' +import MockResponse from '../helpers/MockResponse.js' +import MockRequest from '../helpers/MockRequest.js' + +const ObjectId = mongodb.ObjectId + +const MODULE_PATH = + '../../../../app/src/Features/ThirdPartyDataStore/TpdsController.mjs' + +describe('TpdsController', function () { + beforeEach(async function (ctx) { + ctx.metadata = { + projectId: new ObjectId(), + entityId: new ObjectId(), + folderId: new ObjectId(), + entityType: 'doc', + rev: 2, + } + ctx.TpdsUpdateHandler = { + promises: { + newUpdate: sinon.stub().resolves(ctx.metadata), + deleteUpdate: sinon.stub().resolves(ctx.metadata.entityId), + createFolder: sinon.stub().resolves(), + }, + } + ctx.UpdateMerger = { + promises: { + mergeUpdate: sinon.stub().resolves(ctx.metadata), + deleteUpdate: sinon.stub().resolves(ctx.metadata.entityId), + }, + } + ctx.NotificationsBuilder = { + tpdsFileLimit: sinon.stub().returns({ create: sinon.stub() }), + } + ctx.SessionManager = { + getLoggedInUserId: sinon.stub().returns('user-id'), + } + ctx.TpdsQueueManager = { + promises: { + getQueues: sinon.stub().resolves('queues'), + }, + } + ctx.HttpErrorHandler = { + conflict: sinon.stub(), + } + + ctx.newProject = { _id: new ObjectId() } + ctx.ProjectCreationHandler = { + promises: { createBlankProject: sinon.stub().resolves(ctx.newProject) }, + } + ctx.ProjectDetailsHandler = { + promises: { + generateUniqueName: sinon.stub().resolves('unique'), + }, + } + + vi.doMock( + '../../../../app/src/Features/ThirdPartyDataStore/TpdsUpdateHandler', + () => ({ + default: ctx.TpdsUpdateHandler, + }) + ) + + vi.doMock( + '../../../../app/src/Features/ThirdPartyDataStore/UpdateMerger', + () => ({ + default: ctx.UpdateMerger, + }) + ) + + vi.doMock( + '../../../../app/src/Features/Notifications/NotificationsBuilder', + () => ({ + default: ctx.NotificationsBuilder, + }) + ) + + vi.doMock( + '../../../../app/src/Features/Authentication/SessionManager', + () => ({ + default: ctx.SessionManager, + }) + ) + + vi.doMock('../../../../app/src/Features/Errors/HttpErrorHandler', () => ({ + default: ctx.HttpErrorHandler, + })) + + vi.doMock( + '../../../../app/src/Features/ThirdPartyDataStore/TpdsQueueManager', + () => ({ + default: ctx.TpdsQueueManager, + }) + ) + + vi.doMock( + '../../../../app/src/Features/Project/ProjectCreationHandler', + () => ({ + default: ctx.ProjectCreationHandler, + }) + ) + + vi.doMock( + '../../../../app/src/Features/Project/ProjectDetailsHandler', + () => ({ + default: ctx.ProjectDetailsHandler, + }) + ) + + ctx.TpdsController = (await import(MODULE_PATH)).default + + ctx.user_id = 'dsad29jlkjas' + }) + + describe('creating a project', function () { + it('should yield the new projects id', function (ctx) { + return new Promise(resolve => { + const res = new MockResponse() + const req = new MockRequest() + req.params.user_id = ctx.user_id + req.body = { projectName: 'foo' } + res.callback = err => { + if (err) resolve(err) + expect(res.body).to.equal( + JSON.stringify({ projectId: ctx.newProject._id.toString() }) + ) + expect( + ctx.ProjectDetailsHandler.promises.generateUniqueName + ).to.have.been.calledWith(ctx.user_id, 'foo') + expect( + ctx.ProjectCreationHandler.promises.createBlankProject + ).to.have.been.calledWith( + ctx.user_id, + 'unique', + {}, + { skipCreatingInTPDS: true } + ) + resolve() + } + ctx.TpdsController.createProject(req, res) + }) + }) + }) + + describe('getting an update', function () { + beforeEach(function (ctx) { + ctx.projectName = 'projectName' + ctx.path = '/here.txt' + ctx.req = { + params: { + 0: `${ctx.projectName}${ctx.path}`, + user_id: ctx.user_id, + project_id: '', + }, + headers: { + 'x-update-source': (ctx.source = 'dropbox'), + }, + } + }) + + it('should process the update with the update receiver by name', function (ctx) { + return new Promise(resolve => { + const res = { + json: payload => { + expect(payload).to.deep.equal({ + status: 'applied', + projectId: ctx.metadata.projectId.toString(), + entityId: ctx.metadata.entityId.toString(), + folderId: ctx.metadata.folderId.toString(), + entityType: ctx.metadata.entityType, + rev: ctx.metadata.rev.toString(), + }) + ctx.TpdsUpdateHandler.promises.newUpdate + .calledWith( + ctx.user_id, + '', // projectId + ctx.projectName, + ctx.path, + ctx.req, + ctx.source + ) + .should.equal(true) + resolve() + }, + } + ctx.TpdsController.mergeUpdate(ctx.req, res) + }) + }) + + it('should indicate in the response when the update was rejected', function (ctx) { + return new Promise(resolve => { + ctx.TpdsUpdateHandler.promises.newUpdate.resolves(null) + const res = { + json: payload => { + expect(payload).to.deep.equal({ status: 'rejected' }) + resolve() + }, + } + ctx.TpdsController.mergeUpdate(ctx.req, res) + }) + }) + + it('should process the update with the update receiver by id', function (ctx) { + return new Promise(resolve => { + const path = '/here.txt' + const req = { + pause() {}, + params: { 0: path, user_id: ctx.user_id, project_id: '123' }, + session: { + destroy() {}, + }, + headers: { + 'x-update-source': (ctx.source = 'dropbox'), + }, + } + const res = { + json: () => { + ctx.TpdsUpdateHandler.promises.newUpdate.should.have.been.calledWith( + ctx.user_id, + '123', + '', // projectName + '/here.txt', + req, + ctx.source + ) + resolve() + }, + } + ctx.TpdsController.mergeUpdate(req, res) + }) + }) + + it('should return a 500 error when the update receiver fails', function (ctx) { + return new Promise(resolve => { + ctx.TpdsUpdateHandler.promises.newUpdate.rejects(new Error()) + const res = { + json: sinon.stub(), + } + ctx.TpdsController.mergeUpdate(ctx.req, res, err => { + expect(err).to.exist + expect(res.json).not.to.have.been.called + resolve() + }) + }) + }) + + it('should return a 400 error when the project is too big', function (ctx) { + return new Promise(resolve => { + ctx.TpdsUpdateHandler.promises.newUpdate.rejects({ + message: 'project_has_too_many_files', + }) + const res = { + sendStatus: status => { + expect(status).to.equal(400) + ctx.NotificationsBuilder.tpdsFileLimit.should.have.been.calledWith( + ctx.user_id + ) + resolve() + }, + } + ctx.TpdsController.mergeUpdate(ctx.req, res) + }) + }) + + it('should return a 429 error when the update receiver fails due to too many requests error', function (ctx) { + return new Promise(resolve => { + ctx.TpdsUpdateHandler.promises.newUpdate.rejects( + new Errors.TooManyRequestsError('project on cooldown') + ) + const res = { + sendStatus: status => { + expect(status).to.equal(429) + resolve() + }, + } + ctx.TpdsController.mergeUpdate(ctx.req, res) + }) + }) + }) + + describe('getting a delete update', function () { + it('should process the delete with the update receiver by name', function (ctx) { + return new Promise(resolve => { + const path = '/projectName/here.txt' + const req = { + params: { 0: path, user_id: ctx.user_id, project_id: '' }, + session: { + destroy() {}, + }, + headers: { + 'x-update-source': (ctx.source = 'dropbox'), + }, + } + const res = { + sendStatus: () => { + ctx.TpdsUpdateHandler.promises.deleteUpdate + .calledWith( + ctx.user_id, + '', + 'projectName', + '/here.txt', + ctx.source + ) + .should.equal(true) + resolve() + }, + } + ctx.TpdsController.deleteUpdate(req, res) + }) + }) + + it('should process the delete with the update receiver by id', function (ctx) { + return new Promise(resolve => { + const path = '/here.txt' + const req = { + params: { 0: path, user_id: ctx.user_id, project_id: '123' }, + session: { + destroy() {}, + }, + headers: { + 'x-update-source': (ctx.source = 'dropbox'), + }, + } + const res = { + sendStatus: () => { + ctx.TpdsUpdateHandler.promises.deleteUpdate.should.have.been.calledWith( + ctx.user_id, + '123', + '', // projectName + '/here.txt', + ctx.source + ) + resolve() + }, + } + ctx.TpdsController.deleteUpdate(req, res) + }) + }) + }) + + describe('updateFolder', function () { + beforeEach(function (ctx) { + ctx.req = { + body: { userId: ctx.user_id, path: '/abc/def/ghi.txt' }, + } + ctx.res = { + json: sinon.stub(), + } + }) + + it("creates a folder if it doesn't exist", function (ctx) { + return new Promise(resolve => { + const metadata = { + folderId: new ObjectId(), + projectId: new ObjectId(), + path: '/def/ghi.txt', + parentFolderId: new ObjectId(), + } + ctx.TpdsUpdateHandler.promises.createFolder.resolves(metadata) + ctx.res.json.callsFake(body => { + expect(body).to.deep.equal({ + entityId: metadata.folderId.toString(), + projectId: metadata.projectId.toString(), + path: metadata.path, + folderId: metadata.parentFolderId.toString(), + }) + resolve() + }) + ctx.TpdsController.updateFolder(ctx.req, ctx.res) + }) + }) + + it('supports top level folders', function (ctx) { + return new Promise(resolve => { + const metadata = { + folderId: new ObjectId(), + projectId: new ObjectId(), + path: '/', + parentFolderId: null, + } + ctx.TpdsUpdateHandler.promises.createFolder.resolves(metadata) + ctx.res.json.callsFake(body => { + expect(body).to.deep.equal({ + entityId: metadata.folderId.toString(), + projectId: metadata.projectId.toString(), + path: metadata.path, + folderId: null, + }) + resolve() + }) + ctx.TpdsController.updateFolder(ctx.req, ctx.res) + }) + }) + + it("returns a 409 if the folder couldn't be created", function (ctx) { + return new Promise(resolve => { + ctx.TpdsUpdateHandler.promises.createFolder.resolves(null) + ctx.HttpErrorHandler.conflict.callsFake((req, res) => { + expect(req).to.equal(ctx.req) + expect(res).to.equal(ctx.res) + resolve() + }) + ctx.TpdsController.updateFolder(ctx.req, ctx.res) + }) + }) + }) + + describe('parseParams', function () { + it('should take the project name off the start and replace with slash', function (ctx) { + const path = 'noSlashHere' + const req = { params: { 0: path, user_id: ctx.user_id } } + const result = ctx.TpdsController.parseParams(req) + result.userId.should.equal(ctx.user_id) + result.filePath.should.equal('/') + result.projectName.should.equal(path) + }) + + it('should take the project name off the start and it with no slashes in', function (ctx) { + const path = '/project/file.tex' + const req = { params: { 0: path, user_id: ctx.user_id } } + const result = ctx.TpdsController.parseParams(req) + result.userId.should.equal(ctx.user_id) + result.filePath.should.equal('/file.tex') + result.projectName.should.equal('project') + }) + + it('should take the project name of and return a slash for the file path', function (ctx) { + const path = '/project_name' + const req = { params: { 0: path, user_id: ctx.user_id } } + const result = ctx.TpdsController.parseParams(req) + result.projectName.should.equal('project_name') + result.filePath.should.equal('/') + }) + }) + + describe('updateProjectContents', function () { + beforeEach(async function (ctx) { + ctx.req = { + params: { + 0: (ctx.path = 'chapters/main.tex'), + project_id: (ctx.project_id = 'project-id-123'), + }, + session: { + destroy: sinon.stub(), + }, + headers: { + 'x-update-source': (ctx.source = 'github'), + }, + } + + ctx.res = { + json: sinon.stub(), + sendStatus: sinon.stub(), + } + + await ctx.TpdsController.promises.updateProjectContents(ctx.req, ctx.res) + }) + + it('should merge the update', function (ctx) { + ctx.UpdateMerger.promises.mergeUpdate.should.be.calledWith( + null, + ctx.project_id, + `/${ctx.path}`, + ctx.req, + ctx.source + ) + }) + + it('should return a success', function (ctx) { + ctx.res.json.should.be.calledWith({ + entityId: ctx.metadata.entityId.toString(), + rev: ctx.metadata.rev, + }) + }) + }) + + describe('deleteProjectContents', function () { + beforeEach(async function (ctx) { + ctx.req = { + params: { + 0: (ctx.path = 'chapters/main.tex'), + project_id: (ctx.project_id = 'project-id-123'), + }, + session: { + destroy: sinon.stub(), + }, + headers: { + 'x-update-source': (ctx.source = 'github'), + }, + } + ctx.res = { + sendStatus: sinon.stub(), + json: sinon.stub(), + } + + await ctx.TpdsController.promises.deleteProjectContents(ctx.req, ctx.res) + }) + + it('should delete the file', function (ctx) { + ctx.UpdateMerger.promises.deleteUpdate.should.be.calledWith( + null, + ctx.project_id, + `/${ctx.path}`, + ctx.source + ) + }) + + it('should return a success', function (ctx) { + ctx.res.json.should.be.calledWith({ + entityId: ctx.metadata.entityId, + }) + }) + }) + + describe('getQueues', function () { + beforeEach(function (ctx) { + ctx.req = {} + ctx.res = { json: sinon.stub() } + ctx.next = sinon.stub() + }) + + describe('success', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.res.json.callsFake(() => { + resolve() + }) + ctx.TpdsController.getQueues(ctx.req, ctx.res, ctx.next) + }) + }) + + it('should use userId from session', function (ctx) { + ctx.SessionManager.getLoggedInUserId.should.have.been.calledOnce + ctx.TpdsQueueManager.promises.getQueues.should.have.been.calledWith( + 'user-id' + ) + }) + + it('should call json with response', function (ctx) { + ctx.res.json.should.have.been.calledWith('queues') + ctx.next.should.not.have.been.called + }) + }) + + describe('error', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.err = new Error() + ctx.TpdsQueueManager.promises.getQueues = sinon + .stub() + .rejects(ctx.err) + ctx.next.callsFake(() => { + resolve() + }) + ctx.TpdsController.getQueues(ctx.req, ctx.res, ctx.next) + }) + }) + + it('should call next with error', function (ctx) { + ctx.res.json.should.not.have.been.called + ctx.next.should.have.been.calledWith(ctx.err) + }) + }) + }) +}) diff --git a/services/web/test/unit/src/ThirdPartyDataStore/TpdsControllerTests.mjs b/services/web/test/unit/src/ThirdPartyDataStore/TpdsControllerTests.mjs deleted file mode 100644 index 4dd72b117f..0000000000 --- a/services/web/test/unit/src/ThirdPartyDataStore/TpdsControllerTests.mjs +++ /dev/null @@ -1,510 +0,0 @@ -import mongodb from 'mongodb-legacy' -import { expect } from 'chai' -import esmock from 'esmock' -import sinon from 'sinon' -import Errors from '../../../../app/src/Features/Errors/Errors.js' -import MockResponse from '../helpers/MockResponse.js' -import MockRequest from '../helpers/MockRequest.js' - -const ObjectId = mongodb.ObjectId - -const MODULE_PATH = - '../../../../app/src/Features/ThirdPartyDataStore/TpdsController.mjs' - -describe('TpdsController', function () { - beforeEach(async function () { - this.metadata = { - projectId: new ObjectId(), - entityId: new ObjectId(), - folderId: new ObjectId(), - entityType: 'doc', - rev: 2, - } - this.TpdsUpdateHandler = { - promises: { - newUpdate: sinon.stub().resolves(this.metadata), - deleteUpdate: sinon.stub().resolves(this.metadata.entityId), - createFolder: sinon.stub().resolves(), - }, - } - this.UpdateMerger = { - promises: { - mergeUpdate: sinon.stub().resolves(this.metadata), - deleteUpdate: sinon.stub().resolves(this.metadata.entityId), - }, - } - this.NotificationsBuilder = { - tpdsFileLimit: sinon.stub().returns({ create: sinon.stub() }), - } - this.SessionManager = { - getLoggedInUserId: sinon.stub().returns('user-id'), - } - this.TpdsQueueManager = { - promises: { - getQueues: sinon.stub().resolves('queues'), - }, - } - this.HttpErrorHandler = { - conflict: sinon.stub(), - } - - this.newProject = { _id: new ObjectId() } - this.ProjectCreationHandler = { - promises: { createBlankProject: sinon.stub().resolves(this.newProject) }, - } - this.ProjectDetailsHandler = { - promises: { - generateUniqueName: sinon.stub().resolves('unique'), - }, - } - this.TpdsController = await esmock.strict(MODULE_PATH, { - '../../../../app/src/Features/ThirdPartyDataStore/TpdsUpdateHandler': - this.TpdsUpdateHandler, - '../../../../app/src/Features/ThirdPartyDataStore/UpdateMerger': - this.UpdateMerger, - '../../../../app/src/Features/Notifications/NotificationsBuilder': - this.NotificationsBuilder, - '../../../../app/src/Features/Authentication/SessionManager': - this.SessionManager, - '../../../../app/src/Features/Errors/HttpErrorHandler': - this.HttpErrorHandler, - '../../../../app/src/Features/ThirdPartyDataStore/TpdsQueueManager': - this.TpdsQueueManager, - '../../../../app/src/Features/Project/ProjectCreationHandler': - this.ProjectCreationHandler, - '../../../../app/src/Features/Project/ProjectDetailsHandler': - this.ProjectDetailsHandler, - }) - - this.user_id = 'dsad29jlkjas' - }) - - describe('creating a project', function () { - it('should yield the new projects id', function (done) { - const res = new MockResponse() - const req = new MockRequest() - req.params.user_id = this.user_id - req.body = { projectName: 'foo' } - res.callback = err => { - if (err) done(err) - expect(res.body).to.equal( - JSON.stringify({ projectId: this.newProject._id.toString() }) - ) - expect( - this.ProjectDetailsHandler.promises.generateUniqueName - ).to.have.been.calledWith(this.user_id, 'foo') - expect( - this.ProjectCreationHandler.promises.createBlankProject - ).to.have.been.calledWith( - this.user_id, - 'unique', - {}, - { skipCreatingInTPDS: true } - ) - done() - } - this.TpdsController.createProject(req, res) - }) - }) - - describe('getting an update', function () { - beforeEach(function () { - this.projectName = 'projectName' - this.path = '/here.txt' - this.req = { - params: { - 0: `${this.projectName}${this.path}`, - user_id: this.user_id, - project_id: '', - }, - headers: { - 'x-update-source': (this.source = 'dropbox'), - }, - } - }) - - it('should process the update with the update receiver by name', function (done) { - const res = { - json: payload => { - expect(payload).to.deep.equal({ - status: 'applied', - projectId: this.metadata.projectId.toString(), - entityId: this.metadata.entityId.toString(), - folderId: this.metadata.folderId.toString(), - entityType: this.metadata.entityType, - rev: this.metadata.rev.toString(), - }) - this.TpdsUpdateHandler.promises.newUpdate - .calledWith( - this.user_id, - '', // projectId - this.projectName, - this.path, - this.req, - this.source - ) - .should.equal(true) - done() - }, - } - this.TpdsController.mergeUpdate(this.req, res) - }) - - it('should indicate in the response when the update was rejected', function (done) { - this.TpdsUpdateHandler.promises.newUpdate.resolves(null) - const res = { - json: payload => { - expect(payload).to.deep.equal({ status: 'rejected' }) - done() - }, - } - this.TpdsController.mergeUpdate(this.req, res) - }) - - it('should process the update with the update receiver by id', function (done) { - const path = '/here.txt' - const req = { - pause() {}, - params: { 0: path, user_id: this.user_id, project_id: '123' }, - session: { - destroy() {}, - }, - headers: { - 'x-update-source': (this.source = 'dropbox'), - }, - } - const res = { - json: () => { - this.TpdsUpdateHandler.promises.newUpdate.should.have.been.calledWith( - this.user_id, - '123', - '', // projectName - '/here.txt', - req, - this.source - ) - done() - }, - } - this.TpdsController.mergeUpdate(req, res) - }) - - it('should return a 500 error when the update receiver fails', function (done) { - this.TpdsUpdateHandler.promises.newUpdate.rejects(new Error()) - const res = { - json: sinon.stub(), - } - this.TpdsController.mergeUpdate(this.req, res, err => { - expect(err).to.exist - expect(res.json).not.to.have.been.called - done() - }) - }) - - it('should return a 400 error when the project is too big', function (done) { - this.TpdsUpdateHandler.promises.newUpdate.rejects({ - message: 'project_has_too_many_files', - }) - const res = { - sendStatus: status => { - expect(status).to.equal(400) - this.NotificationsBuilder.tpdsFileLimit.should.have.been.calledWith( - this.user_id - ) - done() - }, - } - this.TpdsController.mergeUpdate(this.req, res) - }) - - it('should return a 429 error when the update receiver fails due to too many requests error', function (done) { - this.TpdsUpdateHandler.promises.newUpdate.rejects( - new Errors.TooManyRequestsError('project on cooldown') - ) - const res = { - sendStatus: status => { - expect(status).to.equal(429) - done() - }, - } - this.TpdsController.mergeUpdate(this.req, res) - }) - }) - - describe('getting a delete update', function () { - it('should process the delete with the update receiver by name', function (done) { - const path = '/projectName/here.txt' - const req = { - params: { 0: path, user_id: this.user_id, project_id: '' }, - session: { - destroy() {}, - }, - headers: { - 'x-update-source': (this.source = 'dropbox'), - }, - } - const res = { - sendStatus: () => { - this.TpdsUpdateHandler.promises.deleteUpdate - .calledWith( - this.user_id, - '', - 'projectName', - '/here.txt', - this.source - ) - .should.equal(true) - done() - }, - } - this.TpdsController.deleteUpdate(req, res) - }) - - it('should process the delete with the update receiver by id', function (done) { - const path = '/here.txt' - const req = { - params: { 0: path, user_id: this.user_id, project_id: '123' }, - session: { - destroy() {}, - }, - headers: { - 'x-update-source': (this.source = 'dropbox'), - }, - } - const res = { - sendStatus: () => { - this.TpdsUpdateHandler.promises.deleteUpdate.should.have.been.calledWith( - this.user_id, - '123', - '', // projectName - '/here.txt', - this.source - ) - done() - }, - } - this.TpdsController.deleteUpdate(req, res) - }) - }) - - describe('updateFolder', function () { - beforeEach(function () { - this.req = { - body: { userId: this.user_id, path: '/abc/def/ghi.txt' }, - } - this.res = { - json: sinon.stub(), - } - }) - - it("creates a folder if it doesn't exist", function (done) { - const metadata = { - folderId: new ObjectId(), - projectId: new ObjectId(), - path: '/def/ghi.txt', - parentFolderId: new ObjectId(), - } - this.TpdsUpdateHandler.promises.createFolder.resolves(metadata) - this.res.json.callsFake(body => { - expect(body).to.deep.equal({ - entityId: metadata.folderId.toString(), - projectId: metadata.projectId.toString(), - path: metadata.path, - folderId: metadata.parentFolderId.toString(), - }) - done() - }) - this.TpdsController.updateFolder(this.req, this.res) - }) - - it('supports top level folders', function (done) { - const metadata = { - folderId: new ObjectId(), - projectId: new ObjectId(), - path: '/', - parentFolderId: null, - } - this.TpdsUpdateHandler.promises.createFolder.resolves(metadata) - this.res.json.callsFake(body => { - expect(body).to.deep.equal({ - entityId: metadata.folderId.toString(), - projectId: metadata.projectId.toString(), - path: metadata.path, - folderId: null, - }) - done() - }) - this.TpdsController.updateFolder(this.req, this.res) - }) - - it("returns a 409 if the folder couldn't be created", function (done) { - this.TpdsUpdateHandler.promises.createFolder.resolves(null) - this.HttpErrorHandler.conflict.callsFake((req, res) => { - expect(req).to.equal(this.req) - expect(res).to.equal(this.res) - done() - }) - this.TpdsController.updateFolder(this.req, this.res) - }) - }) - - describe('parseParams', function () { - it('should take the project name off the start and replace with slash', function () { - const path = 'noSlashHere' - const req = { params: { 0: path, user_id: this.user_id } } - const result = this.TpdsController.parseParams(req) - result.userId.should.equal(this.user_id) - result.filePath.should.equal('/') - result.projectName.should.equal(path) - }) - - it('should take the project name off the start and it with no slashes in', function () { - const path = '/project/file.tex' - const req = { params: { 0: path, user_id: this.user_id } } - const result = this.TpdsController.parseParams(req) - result.userId.should.equal(this.user_id) - result.filePath.should.equal('/file.tex') - result.projectName.should.equal('project') - }) - - it('should take the project name of and return a slash for the file path', function () { - const path = '/project_name' - const req = { params: { 0: path, user_id: this.user_id } } - const result = this.TpdsController.parseParams(req) - result.projectName.should.equal('project_name') - result.filePath.should.equal('/') - }) - }) - - describe('updateProjectContents', function () { - beforeEach(async function () { - this.req = { - params: { - 0: (this.path = 'chapters/main.tex'), - project_id: (this.project_id = 'project-id-123'), - }, - session: { - destroy: sinon.stub(), - }, - headers: { - 'x-update-source': (this.source = 'github'), - }, - } - - this.res = { - json: sinon.stub(), - sendStatus: sinon.stub(), - } - - await this.TpdsController.promises.updateProjectContents( - this.req, - this.res - ) - }) - - it('should merge the update', function () { - this.UpdateMerger.promises.mergeUpdate.should.be.calledWith( - null, - this.project_id, - `/${this.path}`, - this.req, - this.source - ) - }) - - it('should return a success', function () { - this.res.json.should.be.calledWith({ - entityId: this.metadata.entityId.toString(), - rev: this.metadata.rev, - }) - }) - }) - - describe('deleteProjectContents', function () { - beforeEach(async function () { - this.req = { - params: { - 0: (this.path = 'chapters/main.tex'), - project_id: (this.project_id = 'project-id-123'), - }, - session: { - destroy: sinon.stub(), - }, - headers: { - 'x-update-source': (this.source = 'github'), - }, - } - this.res = { - sendStatus: sinon.stub(), - json: sinon.stub(), - } - - await this.TpdsController.promises.deleteProjectContents( - this.req, - this.res - ) - }) - - it('should delete the file', function () { - this.UpdateMerger.promises.deleteUpdate.should.be.calledWith( - null, - this.project_id, - `/${this.path}`, - this.source - ) - }) - - it('should return a success', function () { - this.res.json.should.be.calledWith({ - entityId: this.metadata.entityId, - }) - }) - }) - - describe('getQueues', function () { - beforeEach(function () { - this.req = {} - this.res = { json: sinon.stub() } - this.next = sinon.stub() - }) - - describe('success', function () { - beforeEach(function (done) { - this.res.json.callsFake(() => { - done() - }) - this.TpdsController.getQueues(this.req, this.res, this.next) - }) - - it('should use userId from session', function () { - this.SessionManager.getLoggedInUserId.should.have.been.calledOnce - this.TpdsQueueManager.promises.getQueues.should.have.been.calledWith( - 'user-id' - ) - }) - - it('should call json with response', function () { - this.res.json.should.have.been.calledWith('queues') - this.next.should.not.have.been.called - }) - }) - - describe('error', function () { - beforeEach(function (done) { - this.err = new Error() - this.TpdsQueueManager.promises.getQueues = sinon - .stub() - .rejects(this.err) - this.next.callsFake(() => { - done() - }) - this.TpdsController.getQueues(this.req, this.res, this.next) - }) - - it('should call next with error', function () { - this.res.json.should.not.have.been.called - this.next.should.have.been.calledWith(this.err) - }) - }) - }) -}) diff --git a/services/web/test/unit/src/ThirdPartyDataStore/TpdsUpdateHandlerTests.mjs b/services/web/test/unit/src/ThirdPartyDataStore/TpdsUpdateHandler.test.mjs similarity index 59% rename from services/web/test/unit/src/ThirdPartyDataStore/TpdsUpdateHandlerTests.mjs rename to services/web/test/unit/src/ThirdPartyDataStore/TpdsUpdateHandler.test.mjs index a5ca099b5b..08a7dcf494 100644 --- a/services/web/test/unit/src/ThirdPartyDataStore/TpdsUpdateHandlerTests.mjs +++ b/services/web/test/unit/src/ThirdPartyDataStore/TpdsUpdateHandler.test.mjs @@ -1,6 +1,5 @@ -import esmock from 'esmock' +import { expect, vi } from 'vitest' import sinon from 'sinon' -import { expect } from 'chai' import mongodb from 'mongodb-legacy' import Errors from '../../../../app/src/Features/Errors/Errors.js' @@ -9,120 +8,158 @@ const ObjectId = mongodb.ObjectId const MODULE_PATH = '../../../../app/src/Features/ThirdPartyDataStore/TpdsUpdateHandler.mjs' +vi.mock('../../../../app/src/Features/Errors/Errors.js', () => + vi.importActual('../../../../app/src/Features/Errors/Errors.js') +) + describe('TpdsUpdateHandler', function () { - beforeEach(async function () { - this.projectName = 'My recipes' - this.projects = { - active1: { _id: new ObjectId(), name: this.projectName }, - active2: { _id: new ObjectId(), name: this.projectName }, + beforeEach(async function (ctx) { + ctx.projectName = 'My recipes' + ctx.projects = { + active1: { _id: new ObjectId(), name: ctx.projectName }, + active2: { _id: new ObjectId(), name: ctx.projectName }, archived1: { _id: new ObjectId(), - name: this.projectName, - archived: [this.userId], + name: ctx.projectName, + archived: [ctx.userId], }, archived2: { _id: new ObjectId(), - name: this.projectName, - archived: [this.userId], + name: ctx.projectName, + archived: [ctx.userId], }, } - this.userId = new ObjectId() - this.source = 'dropbox' - this.path = `/some/file` - this.update = {} - this.folderPath = '/some/folder' - this.folder = { + ctx.userId = new ObjectId() + ctx.source = 'dropbox' + ctx.path = `/some/file` + ctx.update = {} + ctx.folderPath = '/some/folder' + ctx.folder = { _id: new ObjectId(), parentFolder_id: new ObjectId(), } - this.CooldownManager = { + ctx.CooldownManager = { promises: { isProjectOnCooldown: sinon.stub().resolves(false), }, } - this.FileTypeManager = { + ctx.FileTypeManager = { promises: { shouldIgnore: sinon.stub().resolves(false), }, } - this.Modules = { + ctx.Modules = { promises: { hooks: { fire: sinon.stub().resolves() }, }, } - this.notification = { + ctx.notification = { create: sinon.stub().resolves(), } - this.NotificationsBuilder = { + ctx.NotificationsBuilder = { promises: { - dropboxDuplicateProjectNames: sinon.stub().returns(this.notification), + dropboxDuplicateProjectNames: sinon.stub().returns(ctx.notification), }, } - this.ProjectCreationHandler = { + ctx.ProjectCreationHandler = { promises: { - createBlankProject: sinon.stub().resolves(this.projects.active1), + createBlankProject: sinon.stub().resolves(ctx.projects.active1), }, } - this.ProjectDeleter = { + ctx.ProjectDeleter = { promises: { markAsDeletedByExternalSource: sinon.stub().resolves(), }, } - this.ProjectGetter = { + ctx.ProjectGetter = { promises: { findUsersProjectsByName: sinon.stub(), findAllUsersProjects: sinon .stub() - .resolves({ owned: [this.projects.active1], readAndWrite: [] }), + .resolves({ owned: [ctx.projects.active1], readAndWrite: [] }), }, } - this.ProjectHelper = { + ctx.ProjectHelper = { isArchivedOrTrashed: sinon.stub().returns(false), } - this.ProjectHelper.isArchivedOrTrashed - .withArgs(this.projects.archived1, this.userId) + ctx.ProjectHelper.isArchivedOrTrashed + .withArgs(ctx.projects.archived1, ctx.userId) .returns(true) - this.ProjectHelper.isArchivedOrTrashed - .withArgs(this.projects.archived2, this.userId) + ctx.ProjectHelper.isArchivedOrTrashed + .withArgs(ctx.projects.archived2, ctx.userId) .returns(true) - this.RootDocManager = { + ctx.RootDocManager = { setRootDocAutomaticallyInBackground: sinon.stub(), } - this.UpdateMerger = { + ctx.UpdateMerger = { promises: { deleteUpdate: sinon.stub().resolves(), mergeUpdate: sinon.stub().resolves(), - createFolder: sinon.stub().resolves(this.folder), + createFolder: sinon.stub().resolves(ctx.folder), }, } - this.TpdsUpdateHandler = await esmock.strict(MODULE_PATH, { - '.../../../../app/src/Features/Cooldown/CooldownManager': - this.CooldownManager, - '../../../../app/src/Features/Uploads/FileTypeManager': - this.FileTypeManager, - '../../../../app/src/infrastructure/Modules': this.Modules, - '../../../../app/src/Features/Notifications/NotificationsBuilder': - this.NotificationsBuilder, - '../../../../app/src/Features/Project/ProjectCreationHandler': - this.ProjectCreationHandler, - '../../../../app/src/Features/Project/ProjectDeleter': - this.ProjectDeleter, - '../../../../app/src/Features/Project/ProjectGetter': this.ProjectGetter, - '../../../../app/src/Features/Project/ProjectHelper': this.ProjectHelper, - '../../../../app/src/Features/Project/ProjectRootDocManager': - this.RootDocManager, - '../../../../app/src/Features/ThirdPartyDataStore/UpdateMerger': - this.UpdateMerger, - }) + vi.doMock('../../../../app/src/Features/Cooldown/CooldownManager', () => ({ + default: ctx.CooldownManager, + })) + + vi.doMock('../../../../app/src/Features/Uploads/FileTypeManager', () => ({ + default: ctx.FileTypeManager, + })) + + vi.doMock('../../../../app/src/infrastructure/Modules', () => ({ + default: ctx.Modules, + })) + + vi.doMock( + '../../../../app/src/Features/Notifications/NotificationsBuilder', + () => ({ + default: ctx.NotificationsBuilder, + }) + ) + + vi.doMock( + '../../../../app/src/Features/Project/ProjectCreationHandler', + () => ({ + default: ctx.ProjectCreationHandler, + }) + ) + + vi.doMock('../../../../app/src/Features/Project/ProjectDeleter', () => ({ + default: ctx.ProjectDeleter, + })) + + vi.doMock('../../../../app/src/Features/Project/ProjectGetter', () => ({ + default: ctx.ProjectGetter, + })) + + vi.doMock('../../../../app/src/Features/Project/ProjectHelper', () => ({ + default: ctx.ProjectHelper, + })) + + vi.doMock( + '../../../../app/src/Features/Project/ProjectRootDocManager', + () => ({ + default: ctx.RootDocManager, + }) + ) + + vi.doMock( + '../../../../app/src/Features/ThirdPartyDataStore/UpdateMerger', + () => ({ + default: ctx.UpdateMerger, + }) + ) + + ctx.TpdsUpdateHandler = (await import(MODULE_PATH)).default }) describe('getting an update', function () { describe('byId', function () { describe('with no matching project', function () { - beforeEach(function () { - this.projectId = new ObjectId().toString() + beforeEach(function (ctx) { + ctx.projectId = new ObjectId().toString() }) receiveUpdateById() expectProjectNotCreated() @@ -130,8 +167,8 @@ describe('TpdsUpdateHandler', function () { }) describe('with one matching active project', function () { - beforeEach(function () { - this.projectId = this.projects.active1._id.toString() + beforeEach(function (ctx) { + ctx.projectId = ctx.projects.active1._id.toString() }) receiveUpdateById() expectProjectNotCreated() @@ -187,8 +224,8 @@ describe('TpdsUpdateHandler', function () { describe('update to a file that should be ignored', async function () { setupMatchingProjects(['active1']) - beforeEach(function () { - this.FileTypeManager.promises.shouldIgnore.resolves(true) + beforeEach(function (ctx) { + ctx.FileTypeManager.promises.shouldIgnore.resolves(true) }) receiveUpdate() expectProjectNotCreated() @@ -199,15 +236,15 @@ describe('TpdsUpdateHandler', function () { describe('update to a project on cooldown', async function () { setupMatchingProjects(['active1']) setupProjectOnCooldown() - beforeEach(async function () { + beforeEach(async function (ctx) { await expect( - this.TpdsUpdateHandler.promises.newUpdate( - this.userId, + ctx.TpdsUpdateHandler.promises.newUpdate( + ctx.userId, '', // projectId - this.projectName, - this.path, - this.update, - this.source + ctx.projectName, + ctx.path, + ctx.update, + ctx.source ) ).to.be.rejectedWith(Errors.TooManyRequestsError) }) @@ -218,8 +255,8 @@ describe('TpdsUpdateHandler', function () { describe('getting a file delete', function () { describe('byId', function () { describe('with no matching project', function () { - beforeEach(function () { - this.projectId = new ObjectId().toString() + beforeEach(function (ctx) { + ctx.projectId = new ObjectId().toString() }) receiveFileDeleteById() expectDeleteNotProcessed() @@ -227,8 +264,8 @@ describe('TpdsUpdateHandler', function () { }) describe('with one matching active project', function () { - beforeEach(function () { - this.projectId = this.projects.active1._id.toString() + beforeEach(function (ctx) { + ctx.projectId = ctx.projects.active1._id.toString() }) receiveFileDeleteById() expectDeleteProcessed() @@ -379,13 +416,13 @@ describe('TpdsUpdateHandler', function () { describe('update to a project on cooldown', async function () { setupMatchingProjects(['active1']) setupProjectOnCooldown() - beforeEach(async function () { + beforeEach(async function (ctx) { await expect( - this.TpdsUpdateHandler.promises.createFolder( - this.userId, - this.projectId, - this.projectName, - this.path + ctx.TpdsUpdateHandler.promises.createFolder( + ctx.userId, + ctx.projectId, + ctx.projectName, + ctx.path ) ).to.be.rejectedWith(Errors.TooManyRequestsError) }) @@ -397,18 +434,18 @@ describe('TpdsUpdateHandler', function () { /* Setup helpers */ function setupMatchingProjects(projectKeys) { - beforeEach(function () { - const projects = projectKeys.map(key => this.projects[key]) - this.ProjectGetter.promises.findUsersProjectsByName - .withArgs(this.userId, this.projectName) + beforeEach(function (ctx) { + const projects = projectKeys.map(key => ctx.projects[key]) + ctx.ProjectGetter.promises.findUsersProjectsByName + .withArgs(ctx.userId, ctx.projectName) .resolves(projects) }) } function setupProjectOnCooldown() { - beforeEach(function () { - this.CooldownManager.promises.isProjectOnCooldown - .withArgs(this.projects.active1._id) + beforeEach(function (ctx) { + ctx.CooldownManager.promises.isProjectOnCooldown + .withArgs(ctx.projects.active1._id) .resolves(true) }) } @@ -416,76 +453,77 @@ function setupProjectOnCooldown() { /* Test helpers */ function receiveUpdate() { - beforeEach(async function () { - await this.TpdsUpdateHandler.promises.newUpdate( - this.userId, + beforeEach(async function (ctx) { + await ctx.TpdsUpdateHandler.promises.newUpdate( + ctx.userId, '', // projectId - this.projectName, - this.path, - this.update, - this.source + ctx.projectName, + ctx.path, + ctx.update, + ctx.source ) }) } function receiveUpdateById() { - beforeEach(function (done) { - this.TpdsUpdateHandler.newUpdate( - this.userId, - this.projectId, + beforeEach(async function (ctx) { + await ctx.TpdsUpdateHandler.promises.newUpdate( + ctx.userId, + ctx.projectId, '', // projectName - this.path, - this.update, - this.source, - done + ctx.path, + ctx.update, + ctx.source ) }) } function receiveFileDelete() { - beforeEach(async function () { - await this.TpdsUpdateHandler.promises.deleteUpdate( - this.userId, + beforeEach(async function (ctx) { + await ctx.TpdsUpdateHandler.promises.deleteUpdate( + ctx.userId, '', // projectId - this.projectName, - this.path, - this.source + ctx.projectName, + ctx.path, + ctx.source ) }) } function receiveFileDeleteById() { - beforeEach(function (done) { - this.TpdsUpdateHandler.deleteUpdate( - this.userId, - this.projectId, - '', // projectName - this.path, - this.source, - done - ) + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.TpdsUpdateHandler.deleteUpdate( + ctx.userId, + ctx.projectId, + '', // projectName + ctx.path, + ctx.source, + resolve + ) + }) }) } function receiveProjectDelete() { - beforeEach(async function () { - await this.TpdsUpdateHandler.promises.deleteUpdate( - this.userId, + beforeEach(async function (ctx) { + await ctx.TpdsUpdateHandler.promises.deleteUpdate( + ctx.userId, '', // projectId - this.projectName, + ctx.projectName, '/', - this.source + ctx.source ) }) } function receiveFolderUpdate() { - beforeEach(async function () { - await this.TpdsUpdateHandler.promises.createFolder( - this.userId, - this.projectId, - this.projectName, - this.folderPath + beforeEach(async function (ctx) { + await ctx.TpdsUpdateHandler.promises.createFolder( + ctx.userId, + ctx.projectId, + ctx.projectName, + ctx.folderPath ) }) } @@ -493,121 +531,121 @@ function receiveFolderUpdate() { /* Expectations */ function expectProjectCreated() { - it('creates a project', function () { + it('creates a project', function (ctx) { expect( - this.ProjectCreationHandler.promises.createBlankProject - ).to.have.been.calledWith(this.userId, this.projectName) + ctx.ProjectCreationHandler.promises.createBlankProject + ).to.have.been.calledWith(ctx.userId, ctx.projectName) }) - it('sets the root doc', function () { + it('sets the root doc', function (ctx) { expect( - this.RootDocManager.setRootDocAutomaticallyInBackground - ).to.have.been.calledWith(this.projects.active1._id) + ctx.RootDocManager.setRootDocAutomaticallyInBackground + ).to.have.been.calledWith(ctx.projects.active1._id) }) } function expectProjectNotCreated() { - it('does not create a project', function () { - expect(this.ProjectCreationHandler.promises.createBlankProject).not.to.have + it('does not create a project', function (ctx) { + expect(ctx.ProjectCreationHandler.promises.createBlankProject).not.to.have .been.called }) - it('does not set the root doc', function () { - expect(this.RootDocManager.setRootDocAutomaticallyInBackground).not.to.have + it('does not set the root doc', function (ctx) { + expect(ctx.RootDocManager.setRootDocAutomaticallyInBackground).not.to.have .been.called }) } function expectUpdateProcessed() { - it('processes the update', function () { - expect(this.UpdateMerger.promises.mergeUpdate).to.have.been.calledWith( - this.userId, - this.projects.active1._id, - this.path, - this.update, - this.source + it('processes the update', function (ctx) { + expect(ctx.UpdateMerger.promises.mergeUpdate).to.have.been.calledWith( + ctx.userId, + ctx.projects.active1._id, + ctx.path, + ctx.update, + ctx.source ) }) } function expectUpdateNotProcessed() { - it('does not process the update', function () { - expect(this.UpdateMerger.promises.mergeUpdate).not.to.have.been.called + it('does not process the update', function (ctx) { + expect(ctx.UpdateMerger.promises.mergeUpdate).not.to.have.been.called }) } function expectFolderUpdateProcessed() { - it('processes the folder update', function () { - expect(this.UpdateMerger.promises.createFolder).to.have.been.calledWith( - this.projects.active1._id, - this.folderPath, - this.userId + it('processes the folder update', function (ctx) { + expect(ctx.UpdateMerger.promises.createFolder).to.have.been.calledWith( + ctx.projects.active1._id, + ctx.folderPath, + ctx.userId ) }) } function expectFolderUpdateNotProcessed() { - it("doesn't process the folder update", function () { - expect(this.UpdateMerger.promises.createFolder).not.to.have.been.called + it("doesn't process the folder update", function (ctx) { + expect(ctx.UpdateMerger.promises.createFolder).not.to.have.been.called }) } function expectDropboxUnlinked() { - it('unlinks Dropbox', function () { - expect(this.Modules.promises.hooks.fire).to.have.been.calledWith( + it('unlinks Dropbox', function (ctx) { + expect(ctx.Modules.promises.hooks.fire).to.have.been.calledWith( 'removeDropbox', - this.userId, + ctx.userId, 'duplicate-projects' ) }) - it('creates a notification that dropbox was unlinked', function () { + it('creates a notification that dropbox was unlinked', function (ctx) { expect( - this.NotificationsBuilder.promises.dropboxDuplicateProjectNames - ).to.have.been.calledWith(this.userId) - expect(this.notification.create).to.have.been.calledWith(this.projectName) + ctx.NotificationsBuilder.promises.dropboxDuplicateProjectNames + ).to.have.been.calledWith(ctx.userId) + expect(ctx.notification.create).to.have.been.calledWith(ctx.projectName) }) } function expectDropboxNotUnlinked() { - it('does not unlink Dropbox', function () { - expect(this.Modules.promises.hooks.fire).not.to.have.been.called + it('does not unlink Dropbox', function (ctx) { + expect(ctx.Modules.promises.hooks.fire).not.to.have.been.called }) - it('does not create a notification that dropbox was unlinked', function () { - expect(this.NotificationsBuilder.promises.dropboxDuplicateProjectNames).not + it('does not create a notification that dropbox was unlinked', function (ctx) { + expect(ctx.NotificationsBuilder.promises.dropboxDuplicateProjectNames).not .to.have.been.called }) } function expectDeleteProcessed() { - it('processes the delete', function () { - expect(this.UpdateMerger.promises.deleteUpdate).to.have.been.calledWith( - this.userId, - this.projects.active1._id, - this.path, - this.source + it('processes the delete', function (ctx) { + expect(ctx.UpdateMerger.promises.deleteUpdate).to.have.been.calledWith( + ctx.userId, + ctx.projects.active1._id, + ctx.path, + ctx.source ) }) } function expectDeleteNotProcessed() { - it('does not process the delete', function () { - expect(this.UpdateMerger.promises.deleteUpdate).not.to.have.been.called + it('does not process the delete', function (ctx) { + expect(ctx.UpdateMerger.promises.deleteUpdate).not.to.have.been.called }) } function expectProjectDeleted() { - it('deletes the project', function () { + it('deletes the project', function (ctx) { expect( - this.ProjectDeleter.promises.markAsDeletedByExternalSource - ).to.have.been.calledWith(this.projects.active1._id) + ctx.ProjectDeleter.promises.markAsDeletedByExternalSource + ).to.have.been.calledWith(ctx.projects.active1._id) }) } function expectProjectNotDeleted() { - it('does not delete the project', function () { - expect(this.ProjectDeleter.promises.markAsDeletedByExternalSource).not.to + it('does not delete the project', function (ctx) { + expect(ctx.ProjectDeleter.promises.markAsDeletedByExternalSource).not.to .have.been.called }) } diff --git a/services/web/test/unit/src/TokenAccess/TokenAccessController.test.mjs b/services/web/test/unit/src/TokenAccess/TokenAccessController.test.mjs new file mode 100644 index 0000000000..96d2d19b04 --- /dev/null +++ b/services/web/test/unit/src/TokenAccess/TokenAccessController.test.mjs @@ -0,0 +1,1276 @@ +import { expect, vi } from 'vitest' +import sinon from 'sinon' +import mongodb from 'mongodb-legacy' +import MockRequest from '../helpers/MockRequest.js' +import MockResponse from '../helpers/MockResponse.js' +import PrivilegeLevels from '../../../../app/src/Features/Authorization/PrivilegeLevels.js' +import { getSafeRedirectPath } from '../../../../app/src/Features/Helpers/UrlHelper.js' + +const ObjectId = mongodb.ObjectId + +const MODULE_PATH = + '../../../../app/src/Features/TokenAccess/TokenAccessController' + +describe('TokenAccessController', function () { + beforeEach(async function (ctx) { + ctx.token = 'abc123' + ctx.user = { _id: new ObjectId() } + ctx.project = { + _id: new ObjectId(), + owner_ref: ctx.user._id, + name: 'test', + tokenAccessReadAndWrite_refs: [], + tokenAccessReadOnly_refs: [], + } + ctx.req = new MockRequest() + ctx.res = new MockResponse() + ctx.next = sinon.stub().returns() + + ctx.Settings = { + siteUrl: 'https://www.dev-overleaf.com', + adminPrivilegeAvailable: false, + adminUrl: 'https://admin.dev-overleaf.com', + adminDomains: ['overleaf.com'], + } + ctx.TokenAccessHandler = { + TOKEN_TYPES: { + READ_ONLY: 'readOnly', + READ_AND_WRITE: 'readAndWrite', + }, + isReadAndWriteToken: sinon.stub().returns(true), + isReadOnlyToken: sinon.stub().returns(true), + tokenAccessEnabledForProject: sinon.stub().returns(true), + checkTokenHashPrefix: sinon.stub(), + makeTokenUrl: sinon.stub().returns('/'), + grantSessionTokenAccess: sinon.stub(), + promises: { + addReadOnlyUserToProject: sinon.stub().resolves(), + getProjectByToken: sinon.stub().resolves(ctx.project), + getV1DocPublishedInfo: sinon.stub().resolves({ allow: true }), + getV1DocInfo: sinon.stub(), + removeReadAndWriteUserFromProject: sinon.stub().resolves(), + moveReadAndWriteUserToReadOnly: sinon.stub().resolves(), + }, + } + + ctx.SessionManager = { + getLoggedInUserId: sinon.stub().returns(ctx.user._id), + getSessionUser: sinon.stub().returns(ctx.user._id), + } + + ctx.AuthenticationController = { + setRedirectInSession: sinon.stub(), + } + + ctx.AuthorizationManager = { + promises: { + getPrivilegeLevelForProject: sinon + .stub() + .resolves(PrivilegeLevels.NONE), + }, + } + + ctx.AuthorizationMiddleware = {} + + ctx.ProjectAuditLogHandler = { + promises: { + addEntry: sinon.stub().resolves(), + }, + } + + ctx.SplitTestHandler = { + promises: { + getAssignment: sinon.stub().resolves({ variant: 'default' }), + getAssignmentForUser: sinon.stub().resolves({ variant: 'default' }), + }, + } + + ctx.CollaboratorsInviteHandler = { + promises: { + revokeInviteForUser: sinon.stub().resolves(), + }, + } + + ctx.CollaboratorsHandler = { + promises: { + addUserIdToProject: sinon.stub().resolves(), + setCollaboratorPrivilegeLevel: sinon.stub().resolves(), + }, + } + + ctx.CollaboratorsGetter = { + promises: { + userIsReadWriteTokenMember: sinon.stub().resolves(), + isUserInvitedReadWriteMemberOfProject: sinon.stub().resolves(), + isUserInvitedMemberOfProject: sinon.stub().resolves(), + }, + } + + ctx.EditorRealTimeController = { emitToRoom: sinon.stub() } + + ctx.ProjectGetter = { + promises: { + getProject: sinon.stub().resolves(ctx.project), + }, + } + + ctx.AnalyticsManager = { + recordEventForSession: sinon.stub(), + recordEventForUserInBackground: sinon.stub(), + } + + ctx.UserGetter = { + promises: { + getUser: sinon.stub().callsFake(async (userId, filter) => { + if (userId === ctx.userId) { + return ctx.user + } else { + return null + } + }), + getUserEmail: sinon.stub().resolves(), + getUserConfirmedEmails: sinon.stub().resolves(), + }, + } + + ctx.LimitationsManager = { + promises: { + canAcceptEditCollaboratorInvite: sinon.stub().resolves(), + }, + } + + vi.doMock('@overleaf/settings', () => ({ + default: ctx.Settings, + })) + + vi.doMock( + '../../../../app/src/Features/TokenAccess/TokenAccessHandler', + () => ({ + default: ctx.TokenAccessHandler, + }) + ) + + vi.doMock( + '../../../../app/src/Features/Authentication/AuthenticationController', + () => ({ + default: ctx.AuthenticationController, + }) + ) + + vi.doMock( + '../../../../app/src/Features/Authentication/SessionManager', + () => ({ + default: ctx.SessionManager, + }) + ) + + vi.doMock( + '../../../../app/src/Features/Authorization/AuthorizationManager', + () => ({ + default: ctx.AuthorizationManager, + }) + ) + + vi.doMock( + '../../../../app/src/Features/Authorization/AuthorizationMiddleware', + () => ({ + default: ctx.AuthorizationMiddleware, + }) + ) + + vi.doMock( + '../../../../app/src/Features/Project/ProjectAuditLogHandler', + () => ({ + default: ctx.ProjectAuditLogHandler, + }) + ) + + vi.doMock( + '../../../../app/src/Features/SplitTests/SplitTestHandler', + () => ({ + default: ctx.SplitTestHandler, + }) + ) + + vi.doMock('../../../../app/src/Features/Errors/Errors', () => ({ + default: (ctx.Errors = { + NotFoundError: sinon.stub(), + }), + })) + + vi.doMock( + '../../../../app/src/Features/Collaborators/CollaboratorsHandler', + () => ({ + default: ctx.CollaboratorsHandler, + }) + ) + + vi.doMock( + '../../../../app/src/Features/Collaborators/CollaboratorsInviteHandler', + () => ({ + default: ctx.CollaboratorsInviteHandler, + }) + ) + + vi.doMock( + '../../../../app/src/Features/Collaborators/CollaboratorsGetter', + () => ({ + default: ctx.CollaboratorsGetter, + }) + ) + + vi.doMock( + '../../../../app/src/Features/Editor/EditorRealTimeController', + () => ({ + default: ctx.EditorRealTimeController, + }) + ) + + vi.doMock('../../../../app/src/Features/Project/ProjectGetter', () => ({ + default: ctx.ProjectGetter, + })) + + vi.doMock('../../../../app/src/Features/Helpers/AsyncFormHelper', () => ({ + default: (ctx.AsyncFormHelper = { + redirect: sinon.stub(), + }), + })) + + vi.doMock( + '../../../../app/src/Features/Helpers/AdminAuthorizationHelper', + () => + (ctx.AdminAuthorizationHelper = { + canRedirectToAdminDomain: sinon.stub(), + }) + ) + + vi.doMock( + '../../../../app/src/Features/Helpers/UrlHelper', + () => + (ctx.UrlHelper = { + getSafeAdminDomainRedirect: sinon + .stub() + .callsFake( + path => `${ctx.Settings.adminUrl}${getSafeRedirectPath(path)}` + ), + }) + ) + + vi.doMock( + '../../../../app/src/Features/Analytics/AnalyticsManager', + () => ({ + default: ctx.AnalyticsManager, + }) + ) + + vi.doMock('../../../../app/src/Features/User/UserGetter', () => ({ + default: ctx.UserGetter, + })) + + vi.doMock( + '../../../../app/src/Features/Subscription/LimitationsManager', + () => ({ + default: ctx.LimitationsManager, + }) + ) + + ctx.TokenAccessController = (await import(MODULE_PATH)).default + }) + + describe('grantTokenAccessReadAndWrite', function () { + beforeEach(function (ctx) { + ctx.LimitationsManager.promises.canAcceptEditCollaboratorInvite.resolves( + true + ) + }) + + describe('normal case (edit slot available)', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.LimitationsManager.promises.canAcceptEditCollaboratorInvite.resolves( + true + ) + ctx.req.params = { token: ctx.token } + ctx.req.body = { + confirmedByUser: true, + tokenHashPrefix: '#prefix', + } + ctx.res.callback = resolve + ctx.TokenAccessController.grantTokenAccessReadAndWrite( + ctx.req, + ctx.res, + resolve + ) + }) + }) + + it('adds the user as a read and write invited member', function (ctx) { + expect( + ctx.CollaboratorsHandler.promises.addUserIdToProject + ).to.have.been.calledWith( + ctx.project._id, + undefined, + ctx.user._id, + PrivilegeLevels.READ_AND_WRITE + ) + }) + + it('writes a project audit log', function (ctx) { + expect( + ctx.ProjectAuditLogHandler.promises.addEntry + ).to.have.been.calledWith( + ctx.project._id, + 'accept-via-link-sharing', + ctx.user._id, + ctx.req.ip, + { privileges: 'readAndWrite' } + ) + }) + + it('records a project-joined event for the user', function (ctx) { + expect( + ctx.AnalyticsManager.recordEventForUserInBackground + ).to.have.been.calledWith(ctx.user._id, 'project-joined', { + mode: 'edit', + projectId: ctx.project._id.toString(), + ownerId: ctx.project.owner_ref.toString(), + role: PrivilegeLevels.READ_AND_WRITE, + source: 'link-sharing', + }) + }) + + it('emits a project membership changed event', function (ctx) { + expect(ctx.EditorRealTimeController.emitToRoom).to.have.been.calledWith( + ctx.project._id, + 'project:membership:changed', + { members: true, invites: true } + ) + }) + + it('checks token hash', function (ctx) { + expect( + ctx.TokenAccessHandler.checkTokenHashPrefix + ).to.have.been.calledWith( + ctx.token, + '#prefix', + 'readAndWrite', + ctx.user._id, + { projectId: ctx.project._id, action: 'continue' } + ) + }) + }) + + describe('when there are no edit collaborator slots available', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.LimitationsManager.promises.canAcceptEditCollaboratorInvite.resolves( + false + ) + ctx.req.params = { token: ctx.token } + ctx.req.body = { + confirmedByUser: true, + tokenHashPrefix: '#prefix', + } + ctx.res.callback = resolve + ctx.TokenAccessController.grantTokenAccessReadAndWrite( + ctx.req, + ctx.res, + resolve + ) + }) + }) + + it('adds the user as a read only invited member instead (pendingEditor)', function (ctx) { + expect( + ctx.CollaboratorsHandler.promises.addUserIdToProject + ).to.have.been.calledWith( + ctx.project._id, + undefined, + ctx.user._id, + PrivilegeLevels.READ_ONLY, + { pendingEditor: true } + ) + }) + + it('writes a project audit log', function (ctx) { + expect( + ctx.ProjectAuditLogHandler.promises.addEntry + ).to.have.been.calledWith( + ctx.project._id, + 'accept-via-link-sharing', + ctx.user._id, + ctx.req.ip, + { privileges: 'readOnly', pendingEditor: true } + ) + }) + + it('records a project-joined event for the user', function (ctx) { + expect( + ctx.AnalyticsManager.recordEventForUserInBackground + ).to.have.been.calledWith(ctx.user._id, 'project-joined', { + mode: 'view', + projectId: ctx.project._id.toString(), + pendingEditor: true, + ownerId: ctx.project.owner_ref.toString(), + role: PrivilegeLevels.READ_ONLY, + source: 'link-sharing', + }) + }) + + it('emits a project membership changed event', function (ctx) { + expect(ctx.EditorRealTimeController.emitToRoom).to.have.been.calledWith( + ctx.project._id, + 'project:membership:changed', + { members: true, invites: true } + ) + }) + + it('checks token hash', function (ctx) { + expect( + ctx.TokenAccessHandler.checkTokenHashPrefix + ).to.have.been.calledWith( + ctx.token, + '#prefix', + 'readAndWrite', + ctx.user._id, + { projectId: ctx.project._id, action: 'continue' } + ) + }) + }) + + describe('when the access was already granted', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.project.tokenAccessReadAndWrite_refs.push(ctx.user._id) + ctx.req.params = { token: ctx.token } + ctx.req.body = { confirmedByUser: true } + ctx.res.callback = resolve + ctx.TokenAccessController.grantTokenAccessReadAndWrite( + ctx.req, + ctx.res, + resolve + ) + }) + }) + + it('writes a project audit log', function (ctx) { + expect( + ctx.ProjectAuditLogHandler.promises.addEntry + ).to.have.been.calledWith( + ctx.project._id, + 'accept-via-link-sharing', + ctx.user._id, + ctx.req.ip, + { privileges: 'readAndWrite' } + ) + }) + + it('checks token hash', function (ctx) { + expect( + ctx.TokenAccessHandler.checkTokenHashPrefix + ).to.have.been.calledWith( + ctx.token, + undefined, + 'readAndWrite', + ctx.user._id, + { projectId: ctx.project._id, action: 'continue' } + ) + }) + }) + + describe('hash prefix missing in request', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.req.params = { token: ctx.token } + ctx.req.body = { confirmedByUser: true } + ctx.res.callback = resolve + ctx.TokenAccessController.grantTokenAccessReadAndWrite( + ctx.req, + ctx.res, + resolve + ) + }) + }) + + it('adds the user as a read and write invited member', function (ctx) { + expect( + ctx.CollaboratorsHandler.promises.addUserIdToProject + ).to.have.been.calledWith( + ctx.project._id, + undefined, + ctx.user._id, + PrivilegeLevels.READ_AND_WRITE + ) + }) + + it('checks the hash prefix', function (ctx) { + expect( + ctx.TokenAccessHandler.checkTokenHashPrefix + ).to.have.been.calledWith( + ctx.token, + undefined, + 'readAndWrite', + ctx.user._id, + { projectId: ctx.project._id, action: 'continue' } + ) + }) + }) + + describe('user is owner of project', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.AuthorizationManager.promises.getPrivilegeLevelForProject.returns( + PrivilegeLevels.OWNER + ) + ctx.req.params = { token: ctx.token } + ctx.req.body = {} + ctx.res.callback = resolve + ctx.TokenAccessController.grantTokenAccessReadAndWrite( + ctx.req, + ctx.res, + resolve + ) + }) + }) + it('checks token hash and includes log data', function (ctx) { + expect( + ctx.TokenAccessHandler.checkTokenHashPrefix + ).to.have.been.calledWith( + ctx.token, + undefined, + 'readAndWrite', + ctx.user._id, + { + projectId: ctx.project._id, + action: 'user already has higher or same privilege', + } + ) + }) + }) + + describe('when user is not logged in', function () { + beforeEach(function (ctx) { + ctx.SessionManager.getLoggedInUserId.returns(null) + ctx.req.params = { token: ctx.token } + ctx.req.body = { tokenHashPrefix: '#prefix' } + }) + describe('ANONYMOUS_READ_AND_WRITE_ENABLED is undefined', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.res.callback = resolve + ctx.TokenAccessController.grantTokenAccessReadAndWrite( + ctx.req, + ctx.res, + resolve + ) + }) + }) + + it('redirects to restricted', function (ctx) { + expect(ctx.res.json).to.have.been.calledWith({ + redirect: '/restricted', + anonWriteAccessDenied: true, + }) + }) + + it('checks the hash prefix and includes log data', function (ctx) { + expect( + ctx.TokenAccessHandler.checkTokenHashPrefix + ).to.have.been.calledWith( + ctx.token, + '#prefix', + 'readAndWrite', + null, + { + action: 'denied anonymous read-and-write token access', + } + ) + }) + + it('saves redirect URL with URL fragment', function (ctx) { + expect( + ctx.AuthenticationController.setRedirectInSession.lastCall.args[1] + ).to.equal('/#prefix') + }) + }) + + describe('ANONYMOUS_READ_AND_WRITE_ENABLED is true', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.TokenAccessHandler.ANONYMOUS_READ_AND_WRITE_ENABLED = true + ctx.res.callback = resolve + + ctx.TokenAccessController.grantTokenAccessReadAndWrite( + ctx.req, + ctx.res, + resolve + ) + }) + }) + + it('redirects to project', function (ctx) { + expect(ctx.res.json).to.have.been.calledWith({ + redirect: `/project/${ctx.project._id}`, + grantAnonymousAccess: 'readAndWrite', + }) + }) + + it('checks the hash prefix and includes log data', function (ctx) { + expect( + ctx.TokenAccessHandler.checkTokenHashPrefix + ).to.have.been.calledWith( + ctx.token, + '#prefix', + 'readAndWrite', + null, + { + projectId: ctx.project._id, + action: 'granting read-write anonymous access', + } + ) + }) + }) + }) + + describe('when Overleaf SaaS', function () { + beforeEach(function (ctx) { + ctx.Settings.overleaf = {} + }) + describe('when token is for v1 project', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.TokenAccessHandler.promises.getProjectByToken.resolves( + undefined + ) + ctx.TokenAccessHandler.promises.getV1DocInfo.resolves({ + exists: true, + has_owner: true, + }) + ctx.req.params = { token: ctx.token } + ctx.req.body = { tokenHashPrefix: '#prefix' } + ctx.res.callback = resolve + ctx.TokenAccessController.grantTokenAccessReadAndWrite( + ctx.req, + ctx.res, + resolve + ) + }) + }) + it('returns v1 import data', function (ctx) { + expect(ctx.res.json).to.have.been.calledWith({ + v1Import: { + status: 'canDownloadZip', + projectId: ctx.token, + hasOwner: true, + name: 'Untitled', + brandInfo: undefined, + }, + }) + }) + it('checks the hash prefix and includes log data', function (ctx) { + expect( + ctx.TokenAccessHandler.checkTokenHashPrefix + ).to.have.been.calledWith( + ctx.token, + '#prefix', + 'readAndWrite', + ctx.user._id, + { + action: 'import v1', + } + ) + }) + }) + + describe('when token is not for a v1 or v2 project', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.TokenAccessHandler.promises.getProjectByToken.resolves( + undefined + ) + ctx.TokenAccessHandler.promises.getV1DocInfo.resolves({ + exists: false, + }) + ctx.req.params = { token: ctx.token } + ctx.req.body = { tokenHashPrefix: '#prefix' } + ctx.res.callback = resolve + ctx.TokenAccessController.grantTokenAccessReadAndWrite( + ctx.req, + ctx.res, + resolve + ) + }) + }) + it('returns 404', function (ctx) { + expect(ctx.res.sendStatus).to.have.been.calledWith(404) + }) + it('checks the hash prefix and includes log data', function (ctx) { + expect( + ctx.TokenAccessHandler.checkTokenHashPrefix + ).to.have.been.calledWith( + ctx.token, + '#prefix', + 'readAndWrite', + ctx.user._id, + { + action: '404', + } + ) + }) + }) + }) + + describe('not Overleaf SaaS', function () { + beforeEach(function (ctx) { + ctx.TokenAccessHandler.promises.getProjectByToken.resolves(undefined) + ctx.req.params = { token: ctx.token } + ctx.req.body = { tokenHashPrefix: '#prefix' } + }) + it('passes Errors.NotFoundError to next when project not found and still checks token hash', function (ctx) { + return new Promise(resolve => { + ctx.TokenAccessController.grantTokenAccessReadAndWrite( + ctx.req, + ctx.res, + args => { + expect(args).to.be.instanceof(ctx.Errors.NotFoundError) + + expect( + ctx.TokenAccessHandler.checkTokenHashPrefix + ).to.have.been.calledWith( + ctx.token, + '#prefix', + 'readAndWrite', + ctx.user._id, + { + action: '404', + } + ) + + resolve() + } + ) + }) + }) + }) + + describe('when user is admin', function () { + const admin = { _id: new ObjectId(), isAdmin: true } + beforeEach(function (ctx) { + ctx.SessionManager.getLoggedInUserId.returns(admin._id) + ctx.SessionManager.getSessionUser.returns(admin) + ctx.AdminAuthorizationHelper.canRedirectToAdminDomain.returns(true) + ctx.req.params = { token: ctx.token } + ctx.req.body = { confirmedByUser: true, tokenHashPrefix: '#prefix' } + }) + + it('redirects if project owner is non-admin', function (ctx) { + ctx.UserGetter.promises.getUserConfirmedEmails = sinon + .stub() + .resolves([{ email: 'test@not-overleaf.com' }]) + return new Promise(resolve => { + ctx.res.callback = () => { + expect(ctx.res.json).to.have.been.calledWith({ + redirect: `${ctx.Settings.adminUrl}/#prefix`, + }) + resolve() + } + ctx.TokenAccessController.grantTokenAccessReadAndWrite( + ctx.req, + ctx.res + ) + }) + }) + + it('grants access if project owner is an internal staff', function (ctx) { + const internalStaff = { _id: new ObjectId(), isAdmin: true } + const projectFromInternalStaff = { + _id: new ObjectId(), + name: 'test', + tokenAccessReadAndWrite_refs: [], + tokenAccessReadOnly_refs: [], + owner_ref: internalStaff._id, + } + ctx.UserGetter.promises.getUser = sinon.stub().resolves(internalStaff) + ctx.UserGetter.promises.getUserConfirmedEmails = sinon + .stub() + .resolves([{ email: 'test@overleaf.com' }]) + ctx.TokenAccessHandler.promises.getProjectByToken = sinon + .stub() + .resolves(projectFromInternalStaff) + ctx.res.callback = () => { + expect( + ctx.CollaboratorsHandler.promises.addUserIdToProject + ).to.have.been.calledWith( + projectFromInternalStaff._id, + undefined, + admin._id, + PrivilegeLevels.READ_AND_WRITE + ) + } + ctx.TokenAccessController.grantTokenAccessReadAndWrite(ctx.req, ctx.res) + }) + }) + + it('passes Errors.NotFoundError to next when token access is not enabled but still checks token hash', function (ctx) { + return new Promise(resolve => { + ctx.TokenAccessHandler.tokenAccessEnabledForProject.returns(false) + ctx.req.params = { token: ctx.token } + ctx.req.body = { tokenHashPrefix: '#prefix' } + ctx.TokenAccessController.grantTokenAccessReadAndWrite( + ctx.req, + ctx.res, + args => { + expect(args).to.be.instanceof(ctx.Errors.NotFoundError) + + expect( + ctx.TokenAccessHandler.checkTokenHashPrefix + ).to.have.been.calledWith( + ctx.token, + '#prefix', + 'readAndWrite', + ctx.user._id, + { + projectId: ctx.project._id, + action: 'token access not enabled', + } + ) + + resolve() + } + ) + }) + }) + + it('returns 400 when not using a read write token', function (ctx) { + ctx.TokenAccessHandler.isReadAndWriteToken.returns(false) + ctx.req.params = { token: ctx.token } + ctx.req.body = { tokenHashPrefix: '#prefix' } + ctx.TokenAccessController.grantTokenAccessReadAndWrite(ctx.req, ctx.res) + expect(ctx.res.sendStatus).to.have.been.calledWith(400) + }) + }) + + describe('grantTokenAccessReadOnly', function () { + describe('normal case', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.req.params = { token: ctx.token } + ctx.req.body = { confirmedByUser: true, tokenHashPrefix: '#prefix' } + ctx.res.callback = resolve + ctx.TokenAccessController.grantTokenAccessReadOnly( + ctx.req, + ctx.res, + resolve + ) + }) + }) + + it('grants read-only access', function (ctx) { + expect( + ctx.TokenAccessHandler.promises.addReadOnlyUserToProject + ).to.have.been.calledWith( + ctx.user._id, + ctx.project._id, + ctx.project.owner_ref + ) + }) + + it('writes a project audit log', function (ctx) { + expect( + ctx.ProjectAuditLogHandler.promises.addEntry + ).to.have.been.calledWith( + ctx.project._id, + 'join-via-token', + ctx.user._id, + ctx.req.ip, + { privileges: 'readOnly' } + ) + }) + + it('checks if hash prefix matches', function (ctx) { + expect( + ctx.TokenAccessHandler.checkTokenHashPrefix + ).to.have.been.calledWith( + ctx.token, + '#prefix', + 'readOnly', + ctx.user._id, + { projectId: ctx.project._id, action: 'continue' } + ) + }) + }) + + describe('when the access was already granted', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.project.tokenAccessReadOnly_refs.push(ctx.user._id) + ctx.req.params = { token: ctx.token } + ctx.req.body = { confirmedByUser: true } + ctx.res.callback = resolve + ctx.TokenAccessController.grantTokenAccessReadOnly( + ctx.req, + ctx.res, + resolve + ) + }) + }) + + it("doesn't write a project audit log", function (ctx) { + expect(ctx.ProjectAuditLogHandler.promises.addEntry).to.not.have.been + .called + }) + + it('still checks if hash prefix matches', function (ctx) { + expect( + ctx.TokenAccessHandler.checkTokenHashPrefix + ).to.have.been.calledWith( + ctx.token, + undefined, + 'readOnly', + ctx.user._id, + { projectId: ctx.project._id, action: 'continue' } + ) + }) + }) + + it('returns 400 when not using a read only token', function (ctx) { + ctx.TokenAccessHandler.isReadOnlyToken.returns(false) + ctx.req.params = { token: ctx.token } + ctx.req.body = { tokenHashPrefix: '#prefix' } + ctx.TokenAccessController.grantTokenAccessReadOnly(ctx.req, ctx.res) + expect(ctx.res.sendStatus).to.have.been.calledWith(400) + }) + + describe('anonymous users', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.req.params = { token: ctx.token } + ctx.SessionManager.getLoggedInUserId.returns(null) + ctx.res.callback = resolve + + ctx.TokenAccessController.grantTokenAccessReadOnly( + ctx.req, + ctx.res, + resolve + ) + }) + }) + + it('allows anonymous users and checks the token hash', function (ctx) { + expect(ctx.res.json).to.have.been.calledWith({ + redirect: `/project/${ctx.project._id}`, + grantAnonymousAccess: 'readOnly', + }) + + expect( + ctx.TokenAccessHandler.checkTokenHashPrefix + ).to.have.been.calledWith(ctx.token, undefined, 'readOnly', null, { + projectId: ctx.project._id, + action: 'granting read-only anonymous access', + }) + }) + }) + + describe('user is owner of project', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.AuthorizationManager.promises.getPrivilegeLevelForProject.returns( + PrivilegeLevels.OWNER + ) + ctx.req.params = { token: ctx.token } + ctx.req.body = {} + ctx.res.callback = resolve + ctx.TokenAccessController.grantTokenAccessReadOnly( + ctx.req, + ctx.res, + resolve + ) + }) + }) + it('checks token hash and includes log data', function (ctx) { + expect( + ctx.TokenAccessHandler.checkTokenHashPrefix + ).to.have.been.calledWith( + ctx.token, + undefined, + 'readOnly', + ctx.user._id, + { + projectId: ctx.project._id, + action: 'user already has higher or same privilege', + } + ) + }) + }) + + it('passes Errors.NotFoundError to next when token access is not enabled but still checks token hash', function (ctx) { + return new Promise(resolve => { + ctx.TokenAccessHandler.tokenAccessEnabledForProject.returns(false) + ctx.req.params = { token: ctx.token } + ctx.req.body = { tokenHashPrefix: '#prefix' } + ctx.TokenAccessController.grantTokenAccessReadOnly( + ctx.req, + ctx.res, + args => { + expect(args).to.be.instanceof(ctx.Errors.NotFoundError) + + expect( + ctx.TokenAccessHandler.checkTokenHashPrefix + ).to.have.been.calledWith( + ctx.token, + '#prefix', + 'readOnly', + ctx.user._id, + { + projectId: ctx.project._id, + action: 'token access not enabled', + } + ) + + resolve() + } + ) + }) + }) + }) + + describe('ensureUserCanUseSharingUpdatesConsentPage', function () { + beforeEach(function (ctx) { + ctx.req.params = { Project_id: ctx.project._id } + }) + + describe('when not in link sharing changes test', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.AsyncFormHelper.redirect = sinon.stub().callsFake(() => resolve()) + ctx.TokenAccessController.ensureUserCanUseSharingUpdatesConsentPage( + ctx.req, + ctx.res, + resolve + ) + }) + }) + + it('redirects to the project/editor', function (ctx) { + expect(ctx.AsyncFormHelper.redirect).to.have.been.calledWith( + ctx.req, + ctx.res, + `/project/${ctx.project._id}` + ) + }) + }) + + describe('when link sharing changes test active', function () { + beforeEach(function (ctx) { + ctx.SplitTestHandler.promises.getAssignmentForUser.resolves({ + variant: 'active', + }) + }) + + describe('when user is not an invited editor and is a read write token member', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.CollaboratorsGetter.promises.isUserInvitedReadWriteMemberOfProject.resolves( + false + ) + ctx.CollaboratorsGetter.promises.userIsReadWriteTokenMember.resolves( + true + ) + ctx.next.callsFake(() => resolve()) + ctx.TokenAccessController.ensureUserCanUseSharingUpdatesConsentPage( + ctx.req, + ctx.res, + ctx.next + ) + }) + }) + + it('calls next', function (ctx) { + expect( + ctx.CollaboratorsGetter.promises + .isUserInvitedReadWriteMemberOfProject + ).to.have.been.calledWith(ctx.user._id, ctx.project._id) + expect( + ctx.CollaboratorsGetter.promises.userIsReadWriteTokenMember + ).to.have.been.calledWith(ctx.user._id, ctx.project._id) + expect(ctx.next).to.have.been.calledOnce + expect(ctx.next.firstCall.args[0]).to.not.exist + }) + }) + + describe('when user is already an invited editor', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.CollaboratorsGetter.promises.isUserInvitedReadWriteMemberOfProject.resolves( + true + ) + ctx.AsyncFormHelper.redirect = sinon + .stub() + .callsFake(() => resolve()) + ctx.TokenAccessController.ensureUserCanUseSharingUpdatesConsentPage( + ctx.req, + ctx.res, + resolve + ) + }) + }) + + it('redirects to the project/editor', function (ctx) { + expect(ctx.AsyncFormHelper.redirect).to.have.been.calledWith( + ctx.req, + ctx.res, + `/project/${ctx.project._id}` + ) + }) + }) + + describe('when user not a read write token member', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.CollaboratorsGetter.promises.userIsReadWriteTokenMember.resolves( + false + ) + ctx.AsyncFormHelper.redirect = sinon + .stub() + .callsFake(() => resolve()) + ctx.TokenAccessController.ensureUserCanUseSharingUpdatesConsentPage( + ctx.req, + ctx.res, + resolve + ) + }) + }) + + it('redirects to the project/editor', function (ctx) { + expect(ctx.AsyncFormHelper.redirect).to.have.been.calledWith( + ctx.req, + ctx.res, + `/project/${ctx.project._id}` + ) + }) + }) + }) + }) + + describe('moveReadWriteToCollaborators', function () { + beforeEach(function (ctx) { + ctx.req.params = { Project_id: ctx.project._id } + }) + + describe('when there are collaborator slots available', function () { + beforeEach(function (ctx) { + ctx.LimitationsManager.promises.canAcceptEditCollaboratorInvite.resolves( + true + ) + }) + + describe('previously joined token access user moving to named collaborator', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.CollaboratorsGetter.promises.isUserInvitedMemberOfProject.resolves( + false + ) + ctx.res.callback = resolve + ctx.TokenAccessController.moveReadWriteToCollaborators( + ctx.req, + ctx.res, + resolve + ) + }) + }) + + it('sets the privilege level to read and write for the invited viewer', function (ctx) { + expect( + ctx.TokenAccessHandler.promises.removeReadAndWriteUserFromProject + ).to.have.been.calledWith(ctx.user._id, ctx.project._id) + expect( + ctx.CollaboratorsHandler.promises.addUserIdToProject + ).to.have.been.calledWith( + ctx.project._id, + undefined, + ctx.user._id, + PrivilegeLevels.READ_AND_WRITE + ) + expect(ctx.res.sendStatus).to.have.been.calledWith(204) + }) + }) + }) + + describe('when there are no edit collaborator slots available', function () { + beforeEach(function (ctx) { + ctx.LimitationsManager.promises.canAcceptEditCollaboratorInvite.resolves( + false + ) + }) + + describe('previously joined token access user moving to named collaborator', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.CollaboratorsGetter.promises.isUserInvitedMemberOfProject.resolves( + false + ) + ctx.res.callback = resolve + ctx.TokenAccessController.moveReadWriteToCollaborators( + ctx.req, + ctx.res, + resolve + ) + }) + }) + + it('sets the privilege level to read only for the invited viewer (pendingEditor)', function (ctx) { + expect( + ctx.TokenAccessHandler.promises.removeReadAndWriteUserFromProject + ).to.have.been.calledWith(ctx.user._id, ctx.project._id) + expect( + ctx.CollaboratorsHandler.promises.addUserIdToProject + ).to.have.been.calledWith( + ctx.project._id, + undefined, + ctx.user._id, + PrivilegeLevels.READ_ONLY, + { pendingEditor: true } + ) + expect(ctx.res.sendStatus).to.have.been.calledWith(204) + }) + }) + }) + }) + + describe('moveReadWriteToReadOnly', function () { + beforeEach(function (ctx) { + ctx.req.params = { Project_id: ctx.project._id } + }) + + describe('previously joined token access user moving to anonymous viewer', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.res.callback = resolve + ctx.TokenAccessController.moveReadWriteToReadOnly( + ctx.req, + ctx.res, + resolve + ) + }) + }) + + it('removes them from read write token access refs and adds them to read only token access refs', function (ctx) { + expect( + ctx.TokenAccessHandler.promises.moveReadAndWriteUserToReadOnly + ).to.have.been.calledWith(ctx.user._id, ctx.project._id) + expect(ctx.res.sendStatus).to.have.been.calledWith(204) + }) + + it('writes a project audit log', function (ctx) { + expect( + ctx.ProjectAuditLogHandler.promises.addEntry + ).to.have.been.calledWith( + ctx.project._id, + 'readonly-via-sharing-updates', + ctx.user._id, + ctx.req.ip + ) + }) + }) + }) +}) diff --git a/services/web/test/unit/src/TokenAccess/TokenAccessControllerTests.mjs b/services/web/test/unit/src/TokenAccess/TokenAccessControllerTests.mjs deleted file mode 100644 index 8097218076..0000000000 --- a/services/web/test/unit/src/TokenAccess/TokenAccessControllerTests.mjs +++ /dev/null @@ -1,1143 +0,0 @@ -import esmock from 'esmock' -import sinon from 'sinon' -import { expect } from 'chai' -import mongodb from 'mongodb-legacy' -import MockRequest from '../helpers/MockRequest.js' -import MockResponse from '../helpers/MockResponse.js' -import PrivilegeLevels from '../../../../app/src/Features/Authorization/PrivilegeLevels.js' -import { getSafeRedirectPath } from '../../../../app/src/Features/Helpers/UrlHelper.js' - -const ObjectId = mongodb.ObjectId - -const MODULE_PATH = - '../../../../app/src/Features/TokenAccess/TokenAccessController' - -describe('TokenAccessController', function () { - beforeEach(async function () { - this.token = 'abc123' - this.user = { _id: new ObjectId() } - this.project = { - _id: new ObjectId(), - owner_ref: this.user._id, - name: 'test', - tokenAccessReadAndWrite_refs: [], - tokenAccessReadOnly_refs: [], - } - this.req = new MockRequest() - this.res = new MockResponse() - this.next = sinon.stub().returns() - - this.Settings = { - siteUrl: 'https://www.dev-overleaf.com', - adminPrivilegeAvailable: false, - adminUrl: 'https://admin.dev-overleaf.com', - adminDomains: ['overleaf.com'], - } - this.TokenAccessHandler = { - TOKEN_TYPES: { - READ_ONLY: 'readOnly', - READ_AND_WRITE: 'readAndWrite', - }, - isReadAndWriteToken: sinon.stub().returns(true), - isReadOnlyToken: sinon.stub().returns(true), - tokenAccessEnabledForProject: sinon.stub().returns(true), - checkTokenHashPrefix: sinon.stub(), - makeTokenUrl: sinon.stub().returns('/'), - grantSessionTokenAccess: sinon.stub(), - promises: { - addReadOnlyUserToProject: sinon.stub().resolves(), - getProjectByToken: sinon.stub().resolves(this.project), - getV1DocPublishedInfo: sinon.stub().resolves({ allow: true }), - getV1DocInfo: sinon.stub(), - removeReadAndWriteUserFromProject: sinon.stub().resolves(), - moveReadAndWriteUserToReadOnly: sinon.stub().resolves(), - }, - } - - this.SessionManager = { - getLoggedInUserId: sinon.stub().returns(this.user._id), - getSessionUser: sinon.stub().returns(this.user._id), - } - - this.AuthenticationController = { - setRedirectInSession: sinon.stub(), - } - - this.AuthorizationManager = { - promises: { - getPrivilegeLevelForProject: sinon - .stub() - .resolves(PrivilegeLevels.NONE), - }, - } - - this.AuthorizationMiddleware = {} - - this.ProjectAuditLogHandler = { - promises: { - addEntry: sinon.stub().resolves(), - }, - } - - this.SplitTestHandler = { - promises: { - getAssignment: sinon.stub().resolves({ variant: 'default' }), - getAssignmentForUser: sinon.stub().resolves({ variant: 'default' }), - }, - } - - this.CollaboratorsInviteHandler = { - promises: { - revokeInviteForUser: sinon.stub().resolves(), - }, - } - - this.CollaboratorsHandler = { - promises: { - addUserIdToProject: sinon.stub().resolves(), - setCollaboratorPrivilegeLevel: sinon.stub().resolves(), - }, - } - - this.CollaboratorsGetter = { - promises: { - userIsReadWriteTokenMember: sinon.stub().resolves(), - isUserInvitedReadWriteMemberOfProject: sinon.stub().resolves(), - isUserInvitedMemberOfProject: sinon.stub().resolves(), - }, - } - - this.EditorRealTimeController = { emitToRoom: sinon.stub() } - - this.ProjectGetter = { - promises: { - getProject: sinon.stub().resolves(this.project), - }, - } - - this.AnalyticsManager = { - recordEventForSession: sinon.stub(), - recordEventForUserInBackground: sinon.stub(), - } - - this.UserGetter = { - promises: { - getUser: sinon.stub().callsFake(async (userId, filter) => { - if (userId === this.userId) { - return this.user - } else { - return null - } - }), - getUserEmail: sinon.stub().resolves(), - getUserConfirmedEmails: sinon.stub().resolves(), - }, - } - - this.LimitationsManager = { - promises: { - canAcceptEditCollaboratorInvite: sinon.stub().resolves(), - }, - } - - this.TokenAccessController = await esmock.strict(MODULE_PATH, { - '@overleaf/settings': this.Settings, - '../../../../app/src/Features/TokenAccess/TokenAccessHandler': - this.TokenAccessHandler, - '../../../../app/src/Features/Authentication/AuthenticationController': - this.AuthenticationController, - '../../../../app/src/Features/Authentication/SessionManager': - this.SessionManager, - '../../../../app/src/Features/Authorization/AuthorizationManager': - this.AuthorizationManager, - '../../../../app/src/Features/Authorization/AuthorizationMiddleware': - this.AuthorizationMiddleware, - '../../../../app/src/Features/Project/ProjectAuditLogHandler': - this.ProjectAuditLogHandler, - '../../../../app/src/Features/SplitTests/SplitTestHandler': - this.SplitTestHandler, - '../../../../app/src/Features/Errors/Errors': (this.Errors = { - NotFoundError: sinon.stub(), - }), - '../../../../app/src/Features/Collaborators/CollaboratorsHandler': - this.CollaboratorsHandler, - '../../../../app/src/Features/Collaborators/CollaboratorsInviteHandler': - this.CollaboratorsInviteHandler, - '../../../../app/src/Features/Collaborators/CollaboratorsGetter': - this.CollaboratorsGetter, - '../../../../app/src/Features/Editor/EditorRealTimeController': - this.EditorRealTimeController, - '../../../../app/src/Features/Project/ProjectGetter': this.ProjectGetter, - '../../../../app/src/Features/Helpers/AsyncFormHelper': - (this.AsyncFormHelper = { - redirect: sinon.stub(), - }), - '../../../../app/src/Features/Helpers/AdminAuthorizationHelper': - (this.AdminAuthorizationHelper = { - canRedirectToAdminDomain: sinon.stub(), - }), - '../../../../app/src/Features/Helpers/UrlHelper': (this.UrlHelper = { - getSafeAdminDomainRedirect: sinon - .stub() - .callsFake( - path => `${this.Settings.adminUrl}${getSafeRedirectPath(path)}` - ), - }), - '../../../../app/src/Features/Analytics/AnalyticsManager': - this.AnalyticsManager, - '../../../../app/src/Features/User/UserGetter': this.UserGetter, - '../../../../app/src/Features/Subscription/LimitationsManager': - this.LimitationsManager, - }) - }) - - describe('grantTokenAccessReadAndWrite', function () { - beforeEach(function () { - this.LimitationsManager.promises.canAcceptEditCollaboratorInvite.resolves( - true - ) - }) - - describe('normal case (edit slot available)', function () { - beforeEach(function (done) { - this.LimitationsManager.promises.canAcceptEditCollaboratorInvite.resolves( - true - ) - this.req.params = { token: this.token } - this.req.body = { - confirmedByUser: true, - tokenHashPrefix: '#prefix', - } - this.res.callback = done - this.TokenAccessController.grantTokenAccessReadAndWrite( - this.req, - this.res, - done - ) - }) - - it('adds the user as a read and write invited member', function () { - expect( - this.CollaboratorsHandler.promises.addUserIdToProject - ).to.have.been.calledWith( - this.project._id, - undefined, - this.user._id, - PrivilegeLevels.READ_AND_WRITE - ) - }) - - it('writes a project audit log', function () { - expect( - this.ProjectAuditLogHandler.promises.addEntry - ).to.have.been.calledWith( - this.project._id, - 'accept-via-link-sharing', - this.user._id, - this.req.ip, - { privileges: 'readAndWrite' } - ) - }) - - it('records a project-joined event for the user', function () { - expect( - this.AnalyticsManager.recordEventForUserInBackground - ).to.have.been.calledWith(this.user._id, 'project-joined', { - mode: 'edit', - projectId: this.project._id.toString(), - ownerId: this.project.owner_ref.toString(), - role: PrivilegeLevels.READ_AND_WRITE, - source: 'link-sharing', - }) - }) - - it('emits a project membership changed event', function () { - expect( - this.EditorRealTimeController.emitToRoom - ).to.have.been.calledWith( - this.project._id, - 'project:membership:changed', - { members: true, invites: true } - ) - }) - - it('checks token hash', function () { - expect( - this.TokenAccessHandler.checkTokenHashPrefix - ).to.have.been.calledWith( - this.token, - '#prefix', - 'readAndWrite', - this.user._id, - { projectId: this.project._id, action: 'continue' } - ) - }) - }) - - describe('when there are no edit collaborator slots available', function () { - beforeEach(function (done) { - this.LimitationsManager.promises.canAcceptEditCollaboratorInvite.resolves( - false - ) - this.req.params = { token: this.token } - this.req.body = { - confirmedByUser: true, - tokenHashPrefix: '#prefix', - } - this.res.callback = done - this.TokenAccessController.grantTokenAccessReadAndWrite( - this.req, - this.res, - done - ) - }) - - it('adds the user as a read only invited member instead (pendingEditor)', function () { - expect( - this.CollaboratorsHandler.promises.addUserIdToProject - ).to.have.been.calledWith( - this.project._id, - undefined, - this.user._id, - PrivilegeLevels.READ_ONLY, - { pendingEditor: true } - ) - }) - - it('writes a project audit log', function () { - expect( - this.ProjectAuditLogHandler.promises.addEntry - ).to.have.been.calledWith( - this.project._id, - 'accept-via-link-sharing', - this.user._id, - this.req.ip, - { privileges: 'readOnly', pendingEditor: true } - ) - }) - - it('records a project-joined event for the user', function () { - expect( - this.AnalyticsManager.recordEventForUserInBackground - ).to.have.been.calledWith(this.user._id, 'project-joined', { - mode: 'view', - projectId: this.project._id.toString(), - pendingEditor: true, - ownerId: this.project.owner_ref.toString(), - role: PrivilegeLevels.READ_ONLY, - source: 'link-sharing', - }) - }) - - it('emits a project membership changed event', function () { - expect( - this.EditorRealTimeController.emitToRoom - ).to.have.been.calledWith( - this.project._id, - 'project:membership:changed', - { members: true, invites: true } - ) - }) - - it('checks token hash', function () { - expect( - this.TokenAccessHandler.checkTokenHashPrefix - ).to.have.been.calledWith( - this.token, - '#prefix', - 'readAndWrite', - this.user._id, - { projectId: this.project._id, action: 'continue' } - ) - }) - }) - - describe('when the access was already granted', function () { - beforeEach(function (done) { - this.project.tokenAccessReadAndWrite_refs.push(this.user._id) - this.req.params = { token: this.token } - this.req.body = { confirmedByUser: true } - this.res.callback = done - this.TokenAccessController.grantTokenAccessReadAndWrite( - this.req, - this.res, - done - ) - }) - - it('writes a project audit log', function () { - expect( - this.ProjectAuditLogHandler.promises.addEntry - ).to.have.been.calledWith( - this.project._id, - 'accept-via-link-sharing', - this.user._id, - this.req.ip, - { privileges: 'readAndWrite' } - ) - }) - - it('checks token hash', function () { - expect( - this.TokenAccessHandler.checkTokenHashPrefix - ).to.have.been.calledWith( - this.token, - undefined, - 'readAndWrite', - this.user._id, - { projectId: this.project._id, action: 'continue' } - ) - }) - }) - - describe('hash prefix missing in request', function () { - beforeEach(function (done) { - this.req.params = { token: this.token } - this.req.body = { confirmedByUser: true } - this.res.callback = done - this.TokenAccessController.grantTokenAccessReadAndWrite( - this.req, - this.res, - done - ) - }) - - it('adds the user as a read and write invited member', function () { - expect( - this.CollaboratorsHandler.promises.addUserIdToProject - ).to.have.been.calledWith( - this.project._id, - undefined, - this.user._id, - PrivilegeLevels.READ_AND_WRITE - ) - }) - - it('checks the hash prefix', function () { - expect( - this.TokenAccessHandler.checkTokenHashPrefix - ).to.have.been.calledWith( - this.token, - undefined, - 'readAndWrite', - this.user._id, - { projectId: this.project._id, action: 'continue' } - ) - }) - }) - - describe('user is owner of project', function () { - beforeEach(function (done) { - this.AuthorizationManager.promises.getPrivilegeLevelForProject.returns( - PrivilegeLevels.OWNER - ) - this.req.params = { token: this.token } - this.req.body = {} - this.res.callback = done - this.TokenAccessController.grantTokenAccessReadAndWrite( - this.req, - this.res, - done - ) - }) - it('checks token hash and includes log data', function () { - expect( - this.TokenAccessHandler.checkTokenHashPrefix - ).to.have.been.calledWith( - this.token, - undefined, - 'readAndWrite', - this.user._id, - { - projectId: this.project._id, - action: 'user already has higher or same privilege', - } - ) - }) - }) - - describe('when user is not logged in', function () { - beforeEach(function () { - this.SessionManager.getLoggedInUserId.returns(null) - this.req.params = { token: this.token } - this.req.body = { tokenHashPrefix: '#prefix' } - }) - describe('ANONYMOUS_READ_AND_WRITE_ENABLED is undefined', function () { - beforeEach(function (done) { - this.res.callback = done - this.TokenAccessController.grantTokenAccessReadAndWrite( - this.req, - this.res, - done - ) - }) - - it('redirects to restricted', function () { - expect(this.res.json).to.have.been.calledWith({ - redirect: '/restricted', - anonWriteAccessDenied: true, - }) - }) - - it('checks the hash prefix and includes log data', function () { - expect( - this.TokenAccessHandler.checkTokenHashPrefix - ).to.have.been.calledWith( - this.token, - '#prefix', - 'readAndWrite', - null, - { - action: 'denied anonymous read-and-write token access', - } - ) - }) - - it('saves redirect URL with URL fragment', function () { - expect( - this.AuthenticationController.setRedirectInSession.lastCall.args[1] - ).to.equal('/#prefix') - }) - }) - - describe('ANONYMOUS_READ_AND_WRITE_ENABLED is true', function () { - beforeEach(function (done) { - this.TokenAccessHandler.ANONYMOUS_READ_AND_WRITE_ENABLED = true - this.res.callback = done - - this.TokenAccessController.grantTokenAccessReadAndWrite( - this.req, - this.res, - done - ) - }) - - it('redirects to project', function () { - expect(this.res.json).to.have.been.calledWith({ - redirect: `/project/${this.project._id}`, - grantAnonymousAccess: 'readAndWrite', - }) - }) - - it('checks the hash prefix and includes log data', function () { - expect( - this.TokenAccessHandler.checkTokenHashPrefix - ).to.have.been.calledWith( - this.token, - '#prefix', - 'readAndWrite', - null, - { - projectId: this.project._id, - action: 'granting read-write anonymous access', - } - ) - }) - }) - }) - - describe('when Overleaf SaaS', function () { - beforeEach(function () { - this.Settings.overleaf = {} - }) - describe('when token is for v1 project', function () { - beforeEach(function (done) { - this.TokenAccessHandler.promises.getProjectByToken.resolves(undefined) - this.TokenAccessHandler.promises.getV1DocInfo.resolves({ - exists: true, - has_owner: true, - }) - this.req.params = { token: this.token } - this.req.body = { tokenHashPrefix: '#prefix' } - this.res.callback = done - this.TokenAccessController.grantTokenAccessReadAndWrite( - this.req, - this.res, - done - ) - }) - it('returns v1 import data', function () { - expect(this.res.json).to.have.been.calledWith({ - v1Import: { - status: 'canDownloadZip', - projectId: this.token, - hasOwner: true, - name: 'Untitled', - brandInfo: undefined, - }, - }) - }) - it('checks the hash prefix and includes log data', function () { - expect( - this.TokenAccessHandler.checkTokenHashPrefix - ).to.have.been.calledWith( - this.token, - '#prefix', - 'readAndWrite', - this.user._id, - { - action: 'import v1', - } - ) - }) - }) - - describe('when token is not for a v1 or v2 project', function () { - beforeEach(function (done) { - this.TokenAccessHandler.promises.getProjectByToken.resolves(undefined) - this.TokenAccessHandler.promises.getV1DocInfo.resolves({ - exists: false, - }) - this.req.params = { token: this.token } - this.req.body = { tokenHashPrefix: '#prefix' } - this.res.callback = done - this.TokenAccessController.grantTokenAccessReadAndWrite( - this.req, - this.res, - done - ) - }) - it('returns 404', function () { - expect(this.res.sendStatus).to.have.been.calledWith(404) - }) - it('checks the hash prefix and includes log data', function () { - expect( - this.TokenAccessHandler.checkTokenHashPrefix - ).to.have.been.calledWith( - this.token, - '#prefix', - 'readAndWrite', - this.user._id, - { - action: '404', - } - ) - }) - }) - }) - - describe('not Overleaf SaaS', function () { - beforeEach(function () { - this.TokenAccessHandler.promises.getProjectByToken.resolves(undefined) - this.req.params = { token: this.token } - this.req.body = { tokenHashPrefix: '#prefix' } - }) - it('passes Errors.NotFoundError to next when project not found and still checks token hash', function (done) { - this.TokenAccessController.grantTokenAccessReadAndWrite( - this.req, - this.res, - args => { - expect(args).to.be.instanceof(this.Errors.NotFoundError) - - expect( - this.TokenAccessHandler.checkTokenHashPrefix - ).to.have.been.calledWith( - this.token, - '#prefix', - 'readAndWrite', - this.user._id, - { - action: '404', - } - ) - - done() - } - ) - }) - }) - - describe('when user is admin', function () { - const admin = { _id: new ObjectId(), isAdmin: true } - beforeEach(function () { - this.SessionManager.getLoggedInUserId.returns(admin._id) - this.SessionManager.getSessionUser.returns(admin) - this.AdminAuthorizationHelper.canRedirectToAdminDomain.returns(true) - this.req.params = { token: this.token } - this.req.body = { confirmedByUser: true, tokenHashPrefix: '#prefix' } - }) - - it('redirects if project owner is non-admin', function () { - this.UserGetter.promises.getUserConfirmedEmails = sinon - .stub() - .resolves([{ email: 'test@not-overleaf.com' }]) - this.res.callback = () => { - expect(this.res.json).to.have.been.calledWith({ - redirect: `${this.Settings.adminUrl}/#prefix`, - }) - } - this.TokenAccessController.grantTokenAccessReadAndWrite( - this.req, - this.res - ) - }) - - it('grants access if project owner is an internal staff', function () { - const internalStaff = { _id: new ObjectId(), isAdmin: true } - const projectFromInternalStaff = { - _id: new ObjectId(), - name: 'test', - tokenAccessReadAndWrite_refs: [], - tokenAccessReadOnly_refs: [], - owner_ref: internalStaff._id, - } - this.UserGetter.promises.getUser = sinon.stub().resolves(internalStaff) - this.UserGetter.promises.getUserConfirmedEmails = sinon - .stub() - .resolves([{ email: 'test@overleaf.com' }]) - this.TokenAccessHandler.promises.getProjectByToken = sinon - .stub() - .resolves(projectFromInternalStaff) - this.res.callback = () => { - expect( - this.CollaboratorsHandler.promises.addUserIdToProject - ).to.have.been.calledWith( - projectFromInternalStaff._id, - undefined, - admin._id, - PrivilegeLevels.READ_AND_WRITE - ) - } - this.TokenAccessController.grantTokenAccessReadAndWrite( - this.req, - this.res - ) - }) - }) - - it('passes Errors.NotFoundError to next when token access is not enabled but still checks token hash', function (done) { - this.TokenAccessHandler.tokenAccessEnabledForProject.returns(false) - this.req.params = { token: this.token } - this.req.body = { tokenHashPrefix: '#prefix' } - this.TokenAccessController.grantTokenAccessReadAndWrite( - this.req, - this.res, - args => { - expect(args).to.be.instanceof(this.Errors.NotFoundError) - - expect( - this.TokenAccessHandler.checkTokenHashPrefix - ).to.have.been.calledWith( - this.token, - '#prefix', - 'readAndWrite', - this.user._id, - { - projectId: this.project._id, - action: 'token access not enabled', - } - ) - - done() - } - ) - }) - - it('returns 400 when not using a read write token', function () { - this.TokenAccessHandler.isReadAndWriteToken.returns(false) - this.req.params = { token: this.token } - this.req.body = { tokenHashPrefix: '#prefix' } - this.TokenAccessController.grantTokenAccessReadAndWrite( - this.req, - this.res - ) - expect(this.res.sendStatus).to.have.been.calledWith(400) - }) - }) - - describe('grantTokenAccessReadOnly', function () { - describe('normal case', function () { - beforeEach(function (done) { - this.req.params = { token: this.token } - this.req.body = { confirmedByUser: true, tokenHashPrefix: '#prefix' } - this.res.callback = done - this.TokenAccessController.grantTokenAccessReadOnly( - this.req, - this.res, - done - ) - }) - - it('grants read-only access', function () { - expect( - this.TokenAccessHandler.promises.addReadOnlyUserToProject - ).to.have.been.calledWith( - this.user._id, - this.project._id, - this.project.owner_ref - ) - }) - - it('writes a project audit log', function () { - expect( - this.ProjectAuditLogHandler.promises.addEntry - ).to.have.been.calledWith( - this.project._id, - 'join-via-token', - this.user._id, - this.req.ip, - { privileges: 'readOnly' } - ) - }) - - it('checks if hash prefix matches', function () { - expect( - this.TokenAccessHandler.checkTokenHashPrefix - ).to.have.been.calledWith( - this.token, - '#prefix', - 'readOnly', - this.user._id, - { projectId: this.project._id, action: 'continue' } - ) - }) - }) - - describe('when the access was already granted', function () { - beforeEach(function (done) { - this.project.tokenAccessReadOnly_refs.push(this.user._id) - this.req.params = { token: this.token } - this.req.body = { confirmedByUser: true } - this.res.callback = done - this.TokenAccessController.grantTokenAccessReadOnly( - this.req, - this.res, - done - ) - }) - - it("doesn't write a project audit log", function () { - expect(this.ProjectAuditLogHandler.promises.addEntry).to.not.have.been - .called - }) - - it('still checks if hash prefix matches', function () { - expect( - this.TokenAccessHandler.checkTokenHashPrefix - ).to.have.been.calledWith( - this.token, - undefined, - 'readOnly', - this.user._id, - { projectId: this.project._id, action: 'continue' } - ) - }) - }) - - it('returns 400 when not using a read only token', function () { - this.TokenAccessHandler.isReadOnlyToken.returns(false) - this.req.params = { token: this.token } - this.req.body = { tokenHashPrefix: '#prefix' } - this.TokenAccessController.grantTokenAccessReadOnly(this.req, this.res) - expect(this.res.sendStatus).to.have.been.calledWith(400) - }) - - describe('anonymous users', function () { - beforeEach(function (done) { - this.req.params = { token: this.token } - this.SessionManager.getLoggedInUserId.returns(null) - this.res.callback = done - - this.TokenAccessController.grantTokenAccessReadOnly( - this.req, - this.res, - done - ) - }) - - it('allows anonymous users and checks the token hash', function () { - expect(this.res.json).to.have.been.calledWith({ - redirect: `/project/${this.project._id}`, - grantAnonymousAccess: 'readOnly', - }) - - expect( - this.TokenAccessHandler.checkTokenHashPrefix - ).to.have.been.calledWith(this.token, undefined, 'readOnly', null, { - projectId: this.project._id, - action: 'granting read-only anonymous access', - }) - }) - }) - - describe('user is owner of project', function () { - beforeEach(function (done) { - this.AuthorizationManager.promises.getPrivilegeLevelForProject.returns( - PrivilegeLevels.OWNER - ) - this.req.params = { token: this.token } - this.req.body = {} - this.res.callback = done - this.TokenAccessController.grantTokenAccessReadOnly( - this.req, - this.res, - done - ) - }) - it('checks token hash and includes log data', function () { - expect( - this.TokenAccessHandler.checkTokenHashPrefix - ).to.have.been.calledWith( - this.token, - undefined, - 'readOnly', - this.user._id, - { - projectId: this.project._id, - action: 'user already has higher or same privilege', - } - ) - }) - }) - - it('passes Errors.NotFoundError to next when token access is not enabled but still checks token hash', function (done) { - this.TokenAccessHandler.tokenAccessEnabledForProject.returns(false) - this.req.params = { token: this.token } - this.req.body = { tokenHashPrefix: '#prefix' } - this.TokenAccessController.grantTokenAccessReadOnly( - this.req, - this.res, - args => { - expect(args).to.be.instanceof(this.Errors.NotFoundError) - - expect( - this.TokenAccessHandler.checkTokenHashPrefix - ).to.have.been.calledWith( - this.token, - '#prefix', - 'readOnly', - this.user._id, - { - projectId: this.project._id, - action: 'token access not enabled', - } - ) - - done() - } - ) - }) - }) - - describe('ensureUserCanUseSharingUpdatesConsentPage', function () { - beforeEach(function () { - this.req.params = { Project_id: this.project._id } - }) - - describe('when not in link sharing changes test', function () { - beforeEach(function (done) { - this.AsyncFormHelper.redirect = sinon.stub().callsFake(() => done()) - this.TokenAccessController.ensureUserCanUseSharingUpdatesConsentPage( - this.req, - this.res, - done - ) - }) - - it('redirects to the project/editor', function () { - expect(this.AsyncFormHelper.redirect).to.have.been.calledWith( - this.req, - this.res, - `/project/${this.project._id}` - ) - }) - }) - - describe('when link sharing changes test active', function () { - beforeEach(function () { - this.SplitTestHandler.promises.getAssignmentForUser.resolves({ - variant: 'active', - }) - }) - - describe('when user is not an invited editor and is a read write token member', function () { - beforeEach(function (done) { - this.CollaboratorsGetter.promises.isUserInvitedReadWriteMemberOfProject.resolves( - false - ) - this.CollaboratorsGetter.promises.userIsReadWriteTokenMember.resolves( - true - ) - this.next.callsFake(() => done()) - this.TokenAccessController.ensureUserCanUseSharingUpdatesConsentPage( - this.req, - this.res, - this.next - ) - }) - - it('calls next', function () { - expect( - this.CollaboratorsGetter.promises - .isUserInvitedReadWriteMemberOfProject - ).to.have.been.calledWith(this.user._id, this.project._id) - expect( - this.CollaboratorsGetter.promises.userIsReadWriteTokenMember - ).to.have.been.calledWith(this.user._id, this.project._id) - expect(this.next).to.have.been.calledOnce - expect(this.next.firstCall.args[0]).to.not.exist - }) - }) - - describe('when user is already an invited editor', function () { - beforeEach(function (done) { - this.CollaboratorsGetter.promises.isUserInvitedReadWriteMemberOfProject.resolves( - true - ) - this.AsyncFormHelper.redirect = sinon.stub().callsFake(() => done()) - this.TokenAccessController.ensureUserCanUseSharingUpdatesConsentPage( - this.req, - this.res, - done - ) - }) - - it('redirects to the project/editor', function () { - expect(this.AsyncFormHelper.redirect).to.have.been.calledWith( - this.req, - this.res, - `/project/${this.project._id}` - ) - }) - }) - - describe('when user not a read write token member', function () { - beforeEach(function (done) { - this.CollaboratorsGetter.promises.userIsReadWriteTokenMember.resolves( - false - ) - this.AsyncFormHelper.redirect = sinon.stub().callsFake(() => done()) - this.TokenAccessController.ensureUserCanUseSharingUpdatesConsentPage( - this.req, - this.res, - done - ) - }) - - it('redirects to the project/editor', function () { - expect(this.AsyncFormHelper.redirect).to.have.been.calledWith( - this.req, - this.res, - `/project/${this.project._id}` - ) - }) - }) - }) - }) - - describe('moveReadWriteToCollaborators', function () { - beforeEach(function () { - this.req.params = { Project_id: this.project._id } - }) - - describe('when there are collaborator slots available', function () { - beforeEach(function () { - this.LimitationsManager.promises.canAcceptEditCollaboratorInvite.resolves( - true - ) - }) - - describe('previously joined token access user moving to named collaborator', function () { - beforeEach(function (done) { - this.CollaboratorsGetter.promises.isUserInvitedMemberOfProject.resolves( - false - ) - this.res.callback = done - this.TokenAccessController.moveReadWriteToCollaborators( - this.req, - this.res, - done - ) - }) - - it('sets the privilege level to read and write for the invited viewer', function () { - expect( - this.TokenAccessHandler.promises.removeReadAndWriteUserFromProject - ).to.have.been.calledWith(this.user._id, this.project._id) - expect( - this.CollaboratorsHandler.promises.addUserIdToProject - ).to.have.been.calledWith( - this.project._id, - undefined, - this.user._id, - PrivilegeLevels.READ_AND_WRITE - ) - expect(this.res.sendStatus).to.have.been.calledWith(204) - }) - }) - }) - - describe('when there are no edit collaborator slots available', function () { - beforeEach(function () { - this.LimitationsManager.promises.canAcceptEditCollaboratorInvite.resolves( - false - ) - }) - - describe('previously joined token access user moving to named collaborator', function () { - beforeEach(function (done) { - this.CollaboratorsGetter.promises.isUserInvitedMemberOfProject.resolves( - false - ) - this.res.callback = done - this.TokenAccessController.moveReadWriteToCollaborators( - this.req, - this.res, - done - ) - }) - - it('sets the privilege level to read only for the invited viewer (pendingEditor)', function () { - expect( - this.TokenAccessHandler.promises.removeReadAndWriteUserFromProject - ).to.have.been.calledWith(this.user._id, this.project._id) - expect( - this.CollaboratorsHandler.promises.addUserIdToProject - ).to.have.been.calledWith( - this.project._id, - undefined, - this.user._id, - PrivilegeLevels.READ_ONLY, - { pendingEditor: true } - ) - expect(this.res.sendStatus).to.have.been.calledWith(204) - }) - }) - }) - }) - - describe('moveReadWriteToReadOnly', function () { - beforeEach(function () { - this.req.params = { Project_id: this.project._id } - }) - - describe('previously joined token access user moving to anonymous viewer', function () { - beforeEach(function (done) { - this.res.callback = done - this.TokenAccessController.moveReadWriteToReadOnly( - this.req, - this.res, - done - ) - }) - - it('removes them from read write token access refs and adds them to read only token access refs', function () { - expect( - this.TokenAccessHandler.promises.moveReadAndWriteUserToReadOnly - ).to.have.been.calledWith(this.user._id, this.project._id) - expect(this.res.sendStatus).to.have.been.calledWith(204) - }) - - it('writes a project audit log', function () { - expect( - this.ProjectAuditLogHandler.promises.addEntry - ).to.have.been.calledWith( - this.project._id, - 'readonly-via-sharing-updates', - this.user._id, - this.req.ip - ) - }) - }) - }) -}) diff --git a/services/web/test/unit/src/Uploads/ProjectUploadController.test.mjs b/services/web/test/unit/src/Uploads/ProjectUploadController.test.mjs new file mode 100644 index 0000000000..443578f747 --- /dev/null +++ b/services/web/test/unit/src/Uploads/ProjectUploadController.test.mjs @@ -0,0 +1,367 @@ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS206: Consider reworking classes to avoid initClass + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +import { expect, vi } from 'vitest' +import sinon from 'sinon' +import MockRequest from '../helpers/MockRequest.js' +import MockResponse from '../helpers/MockResponse.js' +import ArchiveErrors from '../../../../app/src/Features/Uploads/ArchiveErrors.js' + +const modulePath = + '../../../../app/src/Features/Uploads/ProjectUploadController.mjs' + +describe('ProjectUploadController', function () { + beforeEach(async function (ctx) { + let Timer + ctx.req = new MockRequest() + ctx.res = new MockResponse() + ctx.user_id = 'user-id-123' + ctx.metrics = { + Timer: (Timer = (function () { + Timer = class Timer { + static initClass() { + this.prototype.done = sinon.stub() + } + } + Timer.initClass() + return Timer + })()), + } + ctx.SessionManager = { + getLoggedInUserId: sinon.stub().returns(ctx.user_id), + } + ctx.ProjectLocator = { + promises: {}, + } + ctx.EditorController = { + promises: {}, + } + + vi.doMock('multer', () => ({ + default: sinon.stub(), + })) + + vi.doMock('@overleaf/settings', () => ({ + default: { path: {} }, + })) + + vi.doMock( + '../../../../app/src/Features/Uploads/ProjectUploadManager', + () => ({ + default: (ctx.ProjectUploadManager = {}), + }) + ) + + vi.doMock( + '../../../../app/src/Features/Uploads/FileSystemImportManager', + () => ({ + default: (ctx.FileSystemImportManager = {}), + }) + ) + + vi.doMock('@overleaf/metrics', () => ({ + default: ctx.metrics, + })) + + vi.doMock( + '../../../../app/src/Features/Authentication/SessionManager', + () => ({ + default: ctx.SessionManager, + }) + ) + + vi.doMock( + '../../../../app/src/Features/Uploads/ArchiveErrors', + () => ArchiveErrors + ) + + vi.doMock('../../../../app/src/Features/Project/ProjectLocator', () => ({ + default: ctx.ProjectLocator, + })) + + vi.doMock('../../../../app/src/Features/Editor/EditorController', () => ({ + default: ctx.EditorController, + })) + + vi.doMock('fs', () => ({ + default: (ctx.fs = {}), + })) + + ctx.ProjectUploadController = (await import(modulePath)).default + }) + + describe('uploadProject', function () { + beforeEach(function (ctx) { + ctx.path = '/path/to/file/on/disk.zip' + ctx.fileName = 'filename.zip' + ctx.req.file = { + path: ctx.path, + } + ctx.req.body = { + name: ctx.fileName, + } + ctx.req.session = { + user: { + _id: ctx.user_id, + }, + } + ctx.project = { _id: (ctx.project_id = 'project-id-123') } + + ctx.fs.unlink = sinon.stub() + }) + + describe('successfully', function () { + beforeEach(function (ctx) { + ctx.ProjectUploadManager.createProjectFromZipArchive = sinon + .stub() + .callsArgWith(3, null, ctx.project) + ctx.ProjectUploadController.uploadProject(ctx.req, ctx.res) + }) + + it('should create a project owned by the logged in user', function (ctx) { + ctx.ProjectUploadManager.createProjectFromZipArchive + .calledWith(ctx.user_id) + .should.equal(true) + }) + + it('should create a project with the same name as the zip archive', function (ctx) { + ctx.ProjectUploadManager.createProjectFromZipArchive + .calledWith(sinon.match.any, 'filename', sinon.match.any) + .should.equal(true) + }) + + it('should create a project from the zip archive', function (ctx) { + ctx.ProjectUploadManager.createProjectFromZipArchive + .calledWith(sinon.match.any, sinon.match.any, ctx.path) + .should.equal(true) + }) + + it('should return a successful response to the FileUploader client', function (ctx) { + expect(ctx.res.body).to.deep.equal( + JSON.stringify({ + success: true, + project_id: ctx.project_id, + }) + ) + }) + + it('should record the time taken to do the upload', function (ctx) { + ctx.metrics.Timer.prototype.done.called.should.equal(true) + }) + + it('should remove the uploaded file', function (ctx) { + ctx.fs.unlink.calledWith(ctx.path).should.equal(true) + }) + }) + + describe('when ProjectUploadManager.createProjectFromZipArchive fails', function () { + beforeEach(function (ctx) { + ctx.ProjectUploadManager.createProjectFromZipArchive = sinon + .stub() + .callsArgWith(3, new Error('Something went wrong'), ctx.project) + ctx.ProjectUploadController.uploadProject(ctx.req, ctx.res) + }) + + it('should return a failed response to the FileUploader client', function (ctx) { + expect(ctx.res.body).to.deep.equal( + JSON.stringify({ success: false, error: 'upload_failed' }) + ) + }) + }) + + describe('when ProjectUploadManager.createProjectFromZipArchive reports the file as invalid', function () { + beforeEach(function (ctx) { + ctx.ProjectUploadManager.createProjectFromZipArchive = sinon + .stub() + .callsArgWith( + 3, + new ArchiveErrors.ZipContentsTooLargeError(), + ctx.project + ) + ctx.ProjectUploadController.uploadProject(ctx.req, ctx.res) + }) + + it('should return the reported error to the FileUploader client', function (ctx) { + expect(JSON.parse(ctx.res.body)).to.deep.equal({ + success: false, + error: 'zip_contents_too_large', + }) + }) + + it("should return an 'unprocessable entity' status code", function (ctx) { + expect(ctx.res.statusCode).to.equal(422) + }) + }) + }) + + describe('uploadFile', function () { + beforeEach(function (ctx) { + ctx.project_id = 'project-id-123' + ctx.folder_id = 'folder-id-123' + ctx.path = '/path/to/file/on/disk.png' + ctx.fileName = 'filename.png' + ctx.req.file = { + path: ctx.path, + } + ctx.req.body = { + name: ctx.fileName, + } + ctx.req.session = { + user: { + _id: ctx.user_id, + }, + } + ctx.req.params = { Project_id: ctx.project_id } + ctx.req.query = { folder_id: ctx.folder_id } + ctx.fs.unlink = sinon.stub() + }) + + describe('successfully', function () { + beforeEach(function (ctx) { + ctx.entity = { + _id: '1234', + type: 'file', + } + ctx.FileSystemImportManager.addEntity = sinon + .stub() + .callsArgWith(6, null, ctx.entity) + ctx.ProjectUploadController.uploadFile(ctx.req, ctx.res) + }) + + it('should insert the file', function (ctx) { + return ctx.FileSystemImportManager.addEntity + .calledWith( + ctx.user_id, + ctx.project_id, + ctx.folder_id, + ctx.fileName, + ctx.path + ) + .should.equal(true) + }) + + it('should return a successful response to the FileUploader client', function (ctx) { + expect(ctx.res.body).to.deep.equal( + JSON.stringify({ + success: true, + entity_id: ctx.entity._id, + entity_type: 'file', + }) + ) + }) + + it('should time the request', function (ctx) { + ctx.metrics.Timer.prototype.done.called.should.equal(true) + }) + + it('should remove the uploaded file', function (ctx) { + ctx.fs.unlink.calledWith(ctx.path).should.equal(true) + }) + }) + + describe('with folder structure', function () { + beforeEach(function (ctx) { + return new Promise(resolve => { + ctx.entity = { + _id: '1234', + type: 'file', + } + ctx.FileSystemImportManager.addEntity = sinon + .stub() + .callsArgWith(6, null, ctx.entity) + ctx.ProjectLocator.promises.findElement = sinon.stub().resolves({ + path: { fileSystem: '/test' }, + }) + ctx.EditorController.promises.mkdirp = sinon.stub().resolves({ + lastFolder: { _id: 'folder-id' }, + }) + ctx.req.body.relativePath = 'foo/bar/' + ctx.fileName + ctx.res.json = data => { + expect(data.success).to.be.true + resolve() + } + ctx.ProjectUploadController.uploadFile(ctx.req, ctx.res) + }) + }) + + it('should insert the file', function (ctx) { + ctx.ProjectLocator.promises.findElement.should.be.calledOnceWithExactly( + { + project_id: ctx.project_id, + element_id: ctx.folder_id, + type: 'folder', + } + ) + + ctx.EditorController.promises.mkdirp.should.be.calledWith( + ctx.project_id, + '/test/foo/bar', + ctx.user_id + ) + + ctx.FileSystemImportManager.addEntity.should.be.calledOnceWith( + ctx.user_id, + ctx.project_id, + 'folder-id', + ctx.fileName, + ctx.path + ) + }) + }) + + describe('when FileSystemImportManager.addEntity returns a generic error', function () { + beforeEach(function (ctx) { + ctx.FileSystemImportManager.addEntity = sinon + .stub() + .callsArgWith(6, new Error('Sorry something went wrong')) + ctx.ProjectUploadController.uploadFile(ctx.req, ctx.res) + }) + + it('should return an unsuccessful response to the FileUploader client', function (ctx) { + expect(ctx.res.body).to.deep.equal( + JSON.stringify({ + success: false, + }) + ) + }) + }) + + describe('when FileSystemImportManager.addEntity returns a too many files error', function () { + beforeEach(function (ctx) { + ctx.FileSystemImportManager.addEntity = sinon + .stub() + .callsArgWith(6, new Error('project_has_too_many_files')) + ctx.ProjectUploadController.uploadFile(ctx.req, ctx.res) + }) + + it('should return an unsuccessful response to the FileUploader client', function (ctx) { + expect(ctx.res.body).to.deep.equal( + JSON.stringify({ + success: false, + error: 'project_has_too_many_files', + }) + ) + }) + }) + + describe('with an invalid filename', function () { + beforeEach(function (ctx) { + ctx.req.body.name = '' + ctx.ProjectUploadController.uploadFile(ctx.req, ctx.res) + }) + + it('should return a a non success response', function (ctx) { + expect(ctx.res.body).to.deep.equal( + JSON.stringify({ + success: false, + error: 'invalid_filename', + }) + ) + }) + }) + }) +}) diff --git a/services/web/test/unit/src/Uploads/ProjectUploadControllerTests.mjs b/services/web/test/unit/src/Uploads/ProjectUploadControllerTests.mjs deleted file mode 100644 index 35682f346c..0000000000 --- a/services/web/test/unit/src/Uploads/ProjectUploadControllerTests.mjs +++ /dev/null @@ -1,335 +0,0 @@ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * DS206: Consider reworking classes to avoid initClass - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -import sinon from 'sinon' - -import { expect } from 'chai' - -import esmock from 'esmock' -import MockRequest from '../helpers/MockRequest.js' -import MockResponse from '../helpers/MockResponse.js' -import ArchiveErrors from '../../../../app/src/Features/Uploads/ArchiveErrors.js' - -const modulePath = - '../../../../app/src/Features/Uploads/ProjectUploadController.mjs' - -describe('ProjectUploadController', function () { - beforeEach(async function () { - let Timer - this.req = new MockRequest() - this.res = new MockResponse() - this.user_id = 'user-id-123' - this.metrics = { - Timer: (Timer = (function () { - Timer = class Timer { - static initClass() { - this.prototype.done = sinon.stub() - } - } - Timer.initClass() - return Timer - })()), - } - this.SessionManager = { - getLoggedInUserId: sinon.stub().returns(this.user_id), - } - this.ProjectLocator = { - promises: {}, - } - this.EditorController = { - promises: {}, - } - - return (this.ProjectUploadController = await esmock.strict(modulePath, { - multer: sinon.stub(), - '@overleaf/settings': { path: {} }, - '../../../../app/src/Features/Uploads/ProjectUploadManager': - (this.ProjectUploadManager = {}), - '../../../../app/src/Features/Uploads/FileSystemImportManager': - (this.FileSystemImportManager = {}), - '@overleaf/metrics': this.metrics, - '../../../../app/src/Features/Authentication/SessionManager': - this.SessionManager, - '../../../../app/src/Features/Uploads/ArchiveErrors': ArchiveErrors, - '../../../../app/src/Features/Project/ProjectLocator': - this.ProjectLocator, - '../../../../app/src/Features/Editor/EditorController': - this.EditorController, - fs: (this.fs = {}), - })) - }) - - describe('uploadProject', function () { - beforeEach(function () { - this.path = '/path/to/file/on/disk.zip' - this.name = 'filename.zip' - this.req.file = { - path: this.path, - } - this.req.body = { - name: this.name, - } - this.req.session = { - user: { - _id: this.user_id, - }, - } - this.project = { _id: (this.project_id = 'project-id-123') } - - return (this.fs.unlink = sinon.stub()) - }) - - describe('successfully', function () { - beforeEach(function () { - this.ProjectUploadManager.createProjectFromZipArchive = sinon - .stub() - .callsArgWith(3, null, this.project) - return this.ProjectUploadController.uploadProject(this.req, this.res) - }) - - it('should create a project owned by the logged in user', function () { - return this.ProjectUploadManager.createProjectFromZipArchive - .calledWith(this.user_id) - .should.equal(true) - }) - - it('should create a project with the same name as the zip archive', function () { - return this.ProjectUploadManager.createProjectFromZipArchive - .calledWith(sinon.match.any, 'filename', sinon.match.any) - .should.equal(true) - }) - - it('should create a project from the zip archive', function () { - return this.ProjectUploadManager.createProjectFromZipArchive - .calledWith(sinon.match.any, sinon.match.any, this.path) - .should.equal(true) - }) - - it('should return a successful response to the FileUploader client', function () { - return expect(this.res.body).to.deep.equal( - JSON.stringify({ - success: true, - project_id: this.project_id, - }) - ) - }) - - it('should record the time taken to do the upload', function () { - return this.metrics.Timer.prototype.done.called.should.equal(true) - }) - - it('should remove the uploaded file', function () { - return this.fs.unlink.calledWith(this.path).should.equal(true) - }) - }) - - describe('when ProjectUploadManager.createProjectFromZipArchive fails', function () { - beforeEach(function () { - this.ProjectUploadManager.createProjectFromZipArchive = sinon - .stub() - .callsArgWith(3, new Error('Something went wrong'), this.project) - return this.ProjectUploadController.uploadProject(this.req, this.res) - }) - - it('should return a failed response to the FileUploader client', function () { - return expect(this.res.body).to.deep.equal( - JSON.stringify({ success: false, error: 'upload_failed' }) - ) - }) - }) - - describe('when ProjectUploadManager.createProjectFromZipArchive reports the file as invalid', function () { - beforeEach(function () { - this.ProjectUploadManager.createProjectFromZipArchive = sinon - .stub() - .callsArgWith( - 3, - new ArchiveErrors.ZipContentsTooLargeError(), - this.project - ) - return this.ProjectUploadController.uploadProject(this.req, this.res) - }) - - it('should return the reported error to the FileUploader client', function () { - expect(JSON.parse(this.res.body)).to.deep.equal({ - success: false, - error: 'zip_contents_too_large', - }) - }) - - it("should return an 'unprocessable entity' status code", function () { - return expect(this.res.statusCode).to.equal(422) - }) - }) - }) - - describe('uploadFile', function () { - beforeEach(function () { - this.project_id = 'project-id-123' - this.folder_id = 'folder-id-123' - this.path = '/path/to/file/on/disk.png' - this.name = 'filename.png' - this.req.file = { - path: this.path, - } - this.req.body = { - name: this.name, - } - this.req.session = { - user: { - _id: this.user_id, - }, - } - this.req.params = { Project_id: this.project_id } - this.req.query = { folder_id: this.folder_id } - return (this.fs.unlink = sinon.stub()) - }) - - describe('successfully', function () { - beforeEach(function () { - this.entity = { - _id: '1234', - type: 'file', - } - this.FileSystemImportManager.addEntity = sinon - .stub() - .callsArgWith(6, null, this.entity) - return this.ProjectUploadController.uploadFile(this.req, this.res) - }) - - it('should insert the file', function () { - return this.FileSystemImportManager.addEntity - .calledWith( - this.user_id, - this.project_id, - this.folder_id, - this.name, - this.path - ) - .should.equal(true) - }) - - it('should return a successful response to the FileUploader client', function () { - return expect(this.res.body).to.deep.equal( - JSON.stringify({ - success: true, - entity_id: this.entity._id, - entity_type: 'file', - }) - ) - }) - - it('should time the request', function () { - return this.metrics.Timer.prototype.done.called.should.equal(true) - }) - - it('should remove the uploaded file', function () { - return this.fs.unlink.calledWith(this.path).should.equal(true) - }) - }) - - describe('with folder structure', function () { - beforeEach(function (done) { - this.entity = { - _id: '1234', - type: 'file', - } - this.FileSystemImportManager.addEntity = sinon - .stub() - .callsArgWith(6, null, this.entity) - this.ProjectLocator.promises.findElement = sinon.stub().resolves({ - path: { fileSystem: '/test' }, - }) - this.EditorController.promises.mkdirp = sinon.stub().resolves({ - lastFolder: { _id: 'folder-id' }, - }) - this.req.body.relativePath = 'foo/bar/' + this.name - this.res.json = data => { - expect(data.success).to.be.true - done() - } - this.ProjectUploadController.uploadFile(this.req, this.res) - }) - - it('should insert the file', function () { - this.ProjectLocator.promises.findElement.should.be.calledOnceWithExactly( - { - project_id: this.project_id, - element_id: this.folder_id, - type: 'folder', - } - ) - - this.EditorController.promises.mkdirp.should.be.calledWith( - this.project_id, - '/test/foo/bar', - this.user_id - ) - - this.FileSystemImportManager.addEntity.should.be.calledOnceWith( - this.user_id, - this.project_id, - 'folder-id', - this.name, - this.path - ) - }) - }) - - describe('when FileSystemImportManager.addEntity returns a generic error', function () { - beforeEach(function () { - this.FileSystemImportManager.addEntity = sinon - .stub() - .callsArgWith(6, new Error('Sorry something went wrong')) - return this.ProjectUploadController.uploadFile(this.req, this.res) - }) - - it('should return an unsuccessful response to the FileUploader client', function () { - return expect(this.res.body).to.deep.equal( - JSON.stringify({ - success: false, - }) - ) - }) - }) - - describe('when FileSystemImportManager.addEntity returns a too many files error', function () { - beforeEach(function () { - this.FileSystemImportManager.addEntity = sinon - .stub() - .callsArgWith(6, new Error('project_has_too_many_files')) - return this.ProjectUploadController.uploadFile(this.req, this.res) - }) - - it('should return an unsuccessful response to the FileUploader client', function () { - return expect(this.res.body).to.deep.equal( - JSON.stringify({ - success: false, - error: 'project_has_too_many_files', - }) - ) - }) - }) - - describe('with an invalid filename', function () { - beforeEach(function () { - this.req.body.name = '' - return this.ProjectUploadController.uploadFile(this.req, this.res) - }) - - it('should return a a non success response', function () { - return expect(this.res.body).to.deep.equal( - JSON.stringify({ - success: false, - error: 'invalid_filename', - }) - ) - }) - }) - }) -}) diff --git a/services/web/test/unit/src/User/UserDeleterTests.js b/services/web/test/unit/src/User/UserDeleterTests.js index 7ffaaede55..0c5e00c0f5 100644 --- a/services/web/test/unit/src/User/UserDeleterTests.js +++ b/services/web/test/unit/src/User/UserDeleterTests.js @@ -314,6 +314,15 @@ describe('UserDeleter', function () { ).to.have.been.calledWith('securityAlert', emailOptions) }) + it('should not email the user with skipEmail === true', async function () { + await this.UserDeleter.promises.deleteUser(this.userId, { + ipAddress: this.ipAddress, + skipEmail: true, + }) + expect(this.EmailHandler.promises.sendEmail).not.to.have.been + .called + }) + it('should fail when the email service fails', async function () { this.EmailHandler.promises.sendEmail = sinon .stub() diff --git a/services/web/test/unit/src/User/UserGetterTests.js b/services/web/test/unit/src/User/UserGetterTests.js index 0e0c170fd6..315a8073d6 100644 --- a/services/web/test/unit/src/User/UserGetterTests.js +++ b/services/web/test/unit/src/User/UserGetterTests.js @@ -119,6 +119,17 @@ describe('UserGetter', function () { }) }) + it('should not call mongo with empty list', function (done) { + const query = [] + const projection = { email: 1 } + this.UserGetter.getUsers(query, projection, (error, users) => { + expect(error).to.not.exist + expect(users).to.deep.equal([]) + expect(this.find).to.not.have.been.called + done() + }) + }) + it('should not allow null query', function (done) { this.UserGetter.getUser(null, {}, error => { error.should.exist diff --git a/services/web/test/unit/src/User/UserPagesController.test.mjs b/services/web/test/unit/src/User/UserPagesController.test.mjs new file mode 100644 index 0000000000..1fa908d1be --- /dev/null +++ b/services/web/test/unit/src/User/UserPagesController.test.mjs @@ -0,0 +1,542 @@ +import { expect, vi } from 'vitest' +import assert from 'assert' +import sinon from 'sinon' +import MockResponse from '../helpers/MockResponse.js' +import MockRequest from '../helpers/MockRequest.js' + +const modulePath = '../../../../app/src/Features/User/UserPagesController' + +describe('UserPagesController', function () { + beforeEach(async function (ctx) { + ctx.settings = { + apis: { + v1: { + url: 'some.host', + user: 'one', + pass: 'two', + }, + }, + } + ctx.user = { + _id: (ctx.user_id = 'kwjewkl'), + features: {}, + email: 'joe@example.com', + ip_address: '1.1.1.1', + session_created: 'timestamp', + thirdPartyIdentifiers: [ + { + providerId: 'google', + externalUserId: 'testId', + }, + ], + refProviders: { + mendeley: { encrypted: 'aaaa' }, + zotero: { encrypted: 'bbbb' }, + papers: { encrypted: 'cccc' }, + }, + } + ctx.adminEmail = 'group-admin-email@overleaf.com' + ctx.subscriptionViewModel = { + memberGroupSubscriptions: [], + } + + ctx.UserGetter = { + getUser: sinon.stub(), + promises: { getUser: sinon.stub() }, + } + ctx.UserSessionsManager = { getAllUserSessions: sinon.stub() } + ctx.dropboxStatus = {} + ctx.ErrorController = { notFound: sinon.stub() } + ctx.SessionManager = { + getLoggedInUserId: sinon.stub().returns(ctx.user._id), + getSessionUser: sinon.stub().returns(ctx.user), + } + ctx.NewsletterManager = { + subscribed: sinon.stub().yields(), + } + ctx.AuthenticationController = { + getRedirectFromSession: sinon.stub(), + setRedirectInSession: sinon.stub(), + } + ctx.Features = { + hasFeature: sinon.stub().returns(false), + } + ctx.PersonalAccessTokenManager = { + listTokens: sinon.stub().returns([]), + } + ctx.SubscriptionLocator = { + promises: { + getAdminEmail: sinon.stub().returns(ctx.adminEmail), + getMemberSubscriptions: sinon.stub().resolves(), + }, + } + ctx.SplitTestHandler = { + promises: { + getAssignment: sinon.stub().returns('default'), + }, + } + ctx.Modules = { + promises: { + hooks: { + fire: sinon.stub().resolves(), + }, + }, + } + + vi.doMock('@overleaf/settings', () => ({ + default: ctx.settings, + })) + + vi.doMock('../../../../app/src/Features/User/UserGetter', () => ({ + default: ctx.UserGetter, + })) + + vi.doMock('../../../../app/src/Features/User/UserSessionsManager', () => ({ + default: ctx.UserSessionsManager, + })) + + vi.doMock( + '../../../../app/src/Features/Newsletter/NewsletterManager', + () => ({ + default: ctx.NewsletterManager, + }) + ) + + vi.doMock('../../../../app/src/Features/Errors/ErrorController', () => ({ + default: ctx.ErrorController, + })) + + vi.doMock( + '../../../../app/src/Features/Authentication/AuthenticationController', + () => ({ + default: ctx.AuthenticationController, + }) + ) + + vi.doMock( + '../../../../app/src/Features/Subscription/SubscriptionLocator', + () => ({ + default: ctx.SubscriptionLocator, + }) + ) + + vi.doMock('../../../../app/src/infrastructure/Features', () => ({ + default: ctx.Features, + })) + + vi.doMock( + '../../../../modules/oauth2-server/app/src/OAuthPersonalAccessTokenManager', + () => ({ + default: ctx.PersonalAccessTokenManager, + }) + ) + + vi.doMock( + '../../../../app/src/Features/Authentication/SessionManager', + () => ({ + default: ctx.SessionManager, + }) + ) + + vi.doMock( + '../../../../app/src/Features/SplitTests/SplitTestHandler', + () => ({ + default: ctx.SplitTestHandler, + }) + ) + + vi.doMock('../../../../app/src/infrastructure/Modules', () => ({ + default: ctx.Modules, + })) + ctx.request = sinon.stub() + vi.doMock('request', () => ({ + default: ctx.request, + })) + + ctx.UserPagesController = (await import(modulePath)).default + ctx.req = new MockRequest() + ctx.req.session.user = ctx.user + ctx.res = new MockResponse() + }) + + describe('registerPage', function () { + it('should render the register page', function (ctx) { + return new Promise(resolve => { + ctx.res.callback = () => { + ctx.res.renderedTemplate.should.equal('user/register') + resolve() + } + ctx.UserPagesController.registerPage(ctx.req, ctx.res, resolve) + }) + }) + + it('should set sharedProjectData', function (ctx) { + return new Promise(resolve => { + ctx.req.session.sharedProjectData = { + project_name: 'myProject', + user_first_name: 'user_first_name_here', + } + + ctx.res.callback = () => { + ctx.res.renderedVariables.sharedProjectData.project_name.should.equal( + 'myProject' + ) + ctx.res.renderedVariables.sharedProjectData.user_first_name.should.equal( + 'user_first_name_here' + ) + resolve() + } + ctx.UserPagesController.registerPage(ctx.req, ctx.res, resolve) + }) + }) + + it('should set newTemplateData', function (ctx) { + return new Promise(resolve => { + ctx.req.session.templateData = { templateName: 'templateName' } + + ctx.res.callback = () => { + ctx.res.renderedVariables.newTemplateData.templateName.should.equal( + 'templateName' + ) + resolve() + } + ctx.UserPagesController.registerPage(ctx.req, ctx.res, resolve) + }) + }) + + it('should not set the newTemplateData if there is nothing in the session', function (ctx) { + return new Promise(resolve => { + ctx.res.callback = () => { + assert.equal( + ctx.res.renderedVariables.newTemplateData.templateName, + undefined + ) + resolve() + } + ctx.UserPagesController.registerPage(ctx.req, ctx.res, resolve) + }) + }) + }) + + describe('loginForm', function () { + it('should render the login page', function (ctx) { + return new Promise(resolve => { + ctx.res.callback = () => { + ctx.res.renderedTemplate.should.equal('user/login') + resolve() + } + ctx.UserPagesController.loginPage(ctx.req, ctx.res, resolve) + }) + }) + + describe('when an explicit redirect is set via query string', function () { + beforeEach(function (ctx) { + ctx.AuthenticationController.getRedirectFromSession = sinon + .stub() + .returns(null) + ctx.AuthenticationController.setRedirectInSession = sinon.stub() + ctx.req.query.redir = '/somewhere/in/particular' + }) + + it('should set a redirect', function (ctx) { + return new Promise(resolve => { + ctx.res.callback = page => { + ctx.AuthenticationController.setRedirectInSession.callCount.should.equal( + 1 + ) + expect( + ctx.AuthenticationController.setRedirectInSession.lastCall.args[1] + ).to.equal(ctx.req.query.redir) + resolve() + } + ctx.UserPagesController.loginPage(ctx.req, ctx.res, resolve) + }) + }) + }) + }) + + describe('sessionsPage', function () { + beforeEach(function (ctx) { + ctx.UserSessionsManager.getAllUserSessions.callsArgWith(2, null, []) + }) + + it('should render user/sessions', function (ctx) { + return new Promise(resolve => { + ctx.res.callback = () => { + ctx.res.renderedTemplate.should.equal('user/sessions') + resolve() + } + ctx.UserPagesController.sessionsPage(ctx.req, ctx.res, resolve) + }) + }) + + it('should include current session data in the view', function (ctx) { + return new Promise(resolve => { + ctx.res.callback = () => { + expect(ctx.res.renderedVariables.currentSession).to.deep.equal({ + ip_address: '1.1.1.1', + session_created: 'timestamp', + }) + resolve() + } + ctx.UserPagesController.sessionsPage(ctx.req, ctx.res, resolve) + }) + }) + + it('should have called getAllUserSessions', function (ctx) { + return new Promise(resolve => { + ctx.res.callback = page => { + ctx.UserSessionsManager.getAllUserSessions.callCount.should.equal(1) + resolve() + } + ctx.UserPagesController.sessionsPage(ctx.req, ctx.res, resolve) + }) + }) + + describe('when getAllUserSessions produces an error', function () { + beforeEach(function (ctx) { + ctx.UserSessionsManager.getAllUserSessions.callsArgWith( + 2, + new Error('woops') + ) + }) + + it('should call next with an error', function (ctx) { + return new Promise(resolve => { + ctx.next = err => { + assert(err !== null) + assert(err instanceof Error) + resolve() + } + ctx.UserPagesController.sessionsPage(ctx.req, ctx.res, ctx.next) + }) + }) + }) + }) + + describe('emailPreferencesPage', function () { + beforeEach(function (ctx) { + ctx.UserGetter.getUser = sinon.stub().yields(null, ctx.user) + }) + + it('render page with subscribed status', function (ctx) { + return new Promise(resolve => { + ctx.NewsletterManager.subscribed.yields(null, true) + ctx.res.callback = () => { + ctx.res.renderedTemplate.should.equal('user/email-preferences') + ctx.res.renderedVariables.title.should.equal('newsletter_info_title') + ctx.res.renderedVariables.subscribed.should.equal(true) + resolve() + } + ctx.UserPagesController.emailPreferencesPage(ctx.req, ctx.res, resolve) + }) + }) + + it('render page with unsubscribed status', function (ctx) { + return new Promise(resolve => { + ctx.NewsletterManager.subscribed.yields(null, false) + ctx.res.callback = () => { + ctx.res.renderedTemplate.should.equal('user/email-preferences') + ctx.res.renderedVariables.title.should.equal('newsletter_info_title') + ctx.res.renderedVariables.subscribed.should.equal(false) + resolve() + } + ctx.UserPagesController.emailPreferencesPage(ctx.req, ctx.res, resolve) + }) + }) + }) + + describe('settingsPage', function () { + beforeEach(function (ctx) { + ctx.request.get = sinon + .stub() + .callsArgWith(1, null, { statusCode: 200 }, { has_password: true }) + ctx.UserGetter.promises.getUser = sinon.stub().resolves(ctx.user) + }) + + it('should render user/settings', function (ctx) { + return new Promise(resolve => { + ctx.res.callback = () => { + ctx.res.renderedTemplate.should.equal('user/settings') + resolve() + } + ctx.UserPagesController.settingsPage(ctx.req, ctx.res, resolve) + }) + }) + + it('should send user', function (ctx) { + return new Promise(resolve => { + ctx.res.callback = () => { + ctx.res.renderedVariables.user.id.should.equal(ctx.user._id) + ctx.res.renderedVariables.user.email.should.equal(ctx.user.email) + resolve() + } + ctx.UserPagesController.settingsPage(ctx.req, ctx.res, resolve) + }) + }) + + it("should set 'shouldAllowEditingDetails' to true", function (ctx) { + return new Promise(resolve => { + ctx.res.callback = () => { + ctx.res.renderedVariables.shouldAllowEditingDetails.should.equal(true) + resolve() + } + ctx.UserPagesController.settingsPage(ctx.req, ctx.res, resolve) + }) + }) + + it('should restructure thirdPartyIdentifiers data for template use', function (ctx) { + return new Promise(resolve => { + const expectedResult = { + google: 'testId', + } + ctx.res.callback = () => { + expect(ctx.res.renderedVariables.thirdPartyIds).to.include( + expectedResult + ) + resolve() + } + ctx.UserPagesController.settingsPage(ctx.req, ctx.res, resolve) + }) + }) + + it("should set and clear 'projectSyncSuccessMessage'", function (ctx) { + return new Promise(resolve => { + ctx.req.session.projectSyncSuccessMessage = 'Some Sync Success' + ctx.res.callback = () => { + ctx.res.renderedVariables.projectSyncSuccessMessage.should.equal( + 'Some Sync Success' + ) + expect(ctx.req.session.projectSyncSuccessMessage).to.not.exist + resolve() + } + ctx.UserPagesController.settingsPage(ctx.req, ctx.res, resolve) + }) + }) + + it('should cast refProviders to booleans', function (ctx) { + return new Promise(resolve => { + ctx.res.callback = () => { + expect(ctx.res.renderedVariables.user.refProviders).to.deep.equal({ + mendeley: true, + papers: true, + zotero: true, + }) + resolve() + } + ctx.UserPagesController.settingsPage(ctx.req, ctx.res, resolve) + }) + }) + + it('should send the correct managed user admin email', function (ctx) { + return new Promise(resolve => { + ctx.res.callback = () => { + expect( + ctx.res.renderedVariables.currentManagedUserAdminEmail + ).to.equal(ctx.adminEmail) + resolve() + } + ctx.UserPagesController.settingsPage(ctx.req, ctx.res, resolve) + }) + }) + + it('should send info for groups with SSO enabled', function (ctx) { + return new Promise(resolve => { + ctx.user.enrollment = { + sso: [ + { + groupId: 'abc123abc123', + primary: true, + linkedAt: new Date(), + }, + ], + } + const group1 = { + _id: 'abc123abc123', + teamName: 'Group SSO Rulz', + admin_id: { + email: 'admin.email@ssolove.com', + }, + linked: true, + } + const group2 = { + _id: 'def456def456', + admin_id: { + email: 'someone.else@noname.co.uk', + }, + linked: false, + } + + ctx.Modules.promises.hooks.fire + .withArgs('getUserGroupsSSOEnrollmentStatus') + .resolves([[group1, group2]]) + + ctx.res.callback = () => { + expect( + ctx.res.renderedVariables.memberOfSSOEnabledGroups + ).to.deep.equal([ + { + groupId: 'abc123abc123', + groupName: 'Group SSO Rulz', + adminEmail: 'admin.email@ssolove.com', + linked: true, + }, + { + groupId: 'def456def456', + groupName: undefined, + adminEmail: 'someone.else@noname.co.uk', + linked: false, + }, + ]) + resolve() + } + + ctx.UserPagesController.settingsPage(ctx.req, ctx.res, resolve) + }) + }) + + describe('when ldap.updateUserDetailsOnLogin is true', function () { + beforeEach(function (ctx) { + ctx.settings.ldap = { updateUserDetailsOnLogin: true } + }) + + afterEach(function (ctx) { + delete ctx.settings.ldap + }) + + it('should set "shouldAllowEditingDetails" to false', function (ctx) { + return new Promise(resolve => { + ctx.res.callback = () => { + ctx.res.renderedVariables.shouldAllowEditingDetails.should.equal( + false + ) + resolve() + } + ctx.UserPagesController.settingsPage(ctx.req, ctx.res, resolve) + }) + }) + }) + + describe('when saml.updateUserDetailsOnLogin is true', function () { + beforeEach(function (ctx) { + ctx.settings.saml = { updateUserDetailsOnLogin: true } + }) + + afterEach(function (ctx) { + delete ctx.settings.saml + }) + + it('should set "shouldAllowEditingDetails" to false', function (ctx) { + return new Promise(resolve => { + ctx.res.callback = () => { + ctx.res.renderedVariables.shouldAllowEditingDetails.should.equal( + false + ) + resolve() + } + ctx.UserPagesController.settingsPage(ctx.req, ctx.res, resolve) + }) + }) + }) + }) +}) diff --git a/services/web/test/unit/src/User/UserPagesControllerTests.mjs b/services/web/test/unit/src/User/UserPagesControllerTests.mjs deleted file mode 100644 index 6b19ef03f5..0000000000 --- a/services/web/test/unit/src/User/UserPagesControllerTests.mjs +++ /dev/null @@ -1,453 +0,0 @@ -import esmock from 'esmock' -import assert from 'assert' -import sinon from 'sinon' -import { expect } from 'chai' -import MockResponse from '../helpers/MockResponse.js' -import MockRequest from '../helpers/MockRequest.js' - -const modulePath = new URL( - '../../../../app/src/Features/User/UserPagesController', - import.meta.url -).pathname - -describe('UserPagesController', function () { - beforeEach(async function () { - this.settings = { - apis: { - v1: { - url: 'some.host', - user: 'one', - pass: 'two', - }, - }, - } - this.user = { - _id: (this.user_id = 'kwjewkl'), - features: {}, - email: 'joe@example.com', - ip_address: '1.1.1.1', - session_created: 'timestamp', - thirdPartyIdentifiers: [ - { - providerId: 'google', - externalUserId: 'testId', - }, - ], - refProviders: { - mendeley: { encrypted: 'aaaa' }, - zotero: { encrypted: 'bbbb' }, - papers: { encrypted: 'cccc' }, - }, - } - this.adminEmail = 'group-admin-email@overleaf.com' - this.subscriptionViewModel = { - memberGroupSubscriptions: [], - } - - this.UserGetter = { - getUser: sinon.stub(), - promises: { getUser: sinon.stub() }, - } - this.UserSessionsManager = { getAllUserSessions: sinon.stub() } - this.dropboxStatus = {} - this.ErrorController = { notFound: sinon.stub() } - this.SessionManager = { - getLoggedInUserId: sinon.stub().returns(this.user._id), - getSessionUser: sinon.stub().returns(this.user), - } - this.NewsletterManager = { - subscribed: sinon.stub().yields(), - } - this.AuthenticationController = { - getRedirectFromSession: sinon.stub(), - setRedirectInSession: sinon.stub(), - } - this.Features = { - hasFeature: sinon.stub().returns(false), - } - this.PersonalAccessTokenManager = { - listTokens: sinon.stub().returns([]), - } - this.SubscriptionLocator = { - promises: { - getAdminEmail: sinon.stub().returns(this.adminEmail), - getMemberSubscriptions: sinon.stub().resolves(), - }, - } - this.SplitTestHandler = { - promises: { - getAssignment: sinon.stub().returns('default'), - }, - } - this.Modules = { - promises: { - hooks: { - fire: sinon.stub().resolves(), - }, - }, - } - this.UserPagesController = await esmock.strict(modulePath, { - '@overleaf/settings': this.settings, - '../../../../app/src/Features/User/UserGetter': this.UserGetter, - '../../../../app/src/Features/User/UserSessionsManager': - this.UserSessionsManager, - '../../../../app/src/Features/Newsletter/NewsletterManager': - this.NewsletterManager, - '../../../../app/src/Features/Errors/ErrorController': - this.ErrorController, - '../../../../app/src/Features/Authentication/AuthenticationController': - this.AuthenticationController, - '../../../../app/src/Features/Subscription/SubscriptionLocator': - this.SubscriptionLocator, - '../../../../app/src/infrastructure/Features': this.Features, - '../../../../modules/oauth2-server/app/src/OAuthPersonalAccessTokenManager': - this.PersonalAccessTokenManager, - '../../../../app/src/Features/Authentication/SessionManager': - this.SessionManager, - '../../../../app/src/Features/SplitTests/SplitTestHandler': - this.SplitTestHandler, - '../../../../app/src/infrastructure/Modules': this.Modules, - request: (this.request = sinon.stub()), - }) - this.req = new MockRequest() - this.req.session.user = this.user - this.res = new MockResponse() - }) - - describe('registerPage', function () { - it('should render the register page', function (done) { - this.res.callback = () => { - this.res.renderedTemplate.should.equal('user/register') - done() - } - this.UserPagesController.registerPage(this.req, this.res, done) - }) - - it('should set sharedProjectData', function (done) { - this.req.session.sharedProjectData = { - project_name: 'myProject', - user_first_name: 'user_first_name_here', - } - - this.res.callback = () => { - this.res.renderedVariables.sharedProjectData.project_name.should.equal( - 'myProject' - ) - this.res.renderedVariables.sharedProjectData.user_first_name.should.equal( - 'user_first_name_here' - ) - done() - } - this.UserPagesController.registerPage(this.req, this.res, done) - }) - - it('should set newTemplateData', function (done) { - this.req.session.templateData = { templateName: 'templateName' } - - this.res.callback = () => { - this.res.renderedVariables.newTemplateData.templateName.should.equal( - 'templateName' - ) - done() - } - this.UserPagesController.registerPage(this.req, this.res, done) - }) - - it('should not set the newTemplateData if there is nothing in the session', function (done) { - this.res.callback = () => { - assert.equal( - this.res.renderedVariables.newTemplateData.templateName, - undefined - ) - done() - } - this.UserPagesController.registerPage(this.req, this.res, done) - }) - }) - - describe('loginForm', function () { - it('should render the login page', function (done) { - this.res.callback = () => { - this.res.renderedTemplate.should.equal('user/login') - done() - } - this.UserPagesController.loginPage(this.req, this.res, done) - }) - - describe('when an explicit redirect is set via query string', function () { - beforeEach(function () { - this.AuthenticationController.getRedirectFromSession = sinon - .stub() - .returns(null) - this.AuthenticationController.setRedirectInSession = sinon.stub() - this.req.query.redir = '/somewhere/in/particular' - }) - - it('should set a redirect', function (done) { - this.res.callback = page => { - this.AuthenticationController.setRedirectInSession.callCount.should.equal( - 1 - ) - expect( - this.AuthenticationController.setRedirectInSession.lastCall.args[1] - ).to.equal(this.req.query.redir) - done() - } - this.UserPagesController.loginPage(this.req, this.res, done) - }) - }) - }) - - describe('sessionsPage', function () { - beforeEach(function () { - this.UserSessionsManager.getAllUserSessions.callsArgWith(2, null, []) - }) - - it('should render user/sessions', function (done) { - this.res.callback = () => { - this.res.renderedTemplate.should.equal('user/sessions') - done() - } - this.UserPagesController.sessionsPage(this.req, this.res, done) - }) - - it('should include current session data in the view', function (done) { - this.res.callback = () => { - expect(this.res.renderedVariables.currentSession).to.deep.equal({ - ip_address: '1.1.1.1', - session_created: 'timestamp', - }) - done() - } - this.UserPagesController.sessionsPage(this.req, this.res, done) - }) - - it('should have called getAllUserSessions', function (done) { - this.res.callback = page => { - this.UserSessionsManager.getAllUserSessions.callCount.should.equal(1) - done() - } - this.UserPagesController.sessionsPage(this.req, this.res, done) - }) - - describe('when getAllUserSessions produces an error', function () { - beforeEach(function () { - this.UserSessionsManager.getAllUserSessions.callsArgWith( - 2, - new Error('woops') - ) - }) - - it('should call next with an error', function (done) { - this.next = err => { - assert(err !== null) - assert(err instanceof Error) - done() - } - this.UserPagesController.sessionsPage(this.req, this.res, this.next) - }) - }) - }) - - describe('emailPreferencesPage', function () { - beforeEach(function () { - this.UserGetter.getUser = sinon.stub().yields(null, this.user) - }) - - it('render page with subscribed status', function (done) { - this.NewsletterManager.subscribed.yields(null, true) - this.res.callback = () => { - this.res.renderedTemplate.should.equal('user/email-preferences') - this.res.renderedVariables.title.should.equal('newsletter_info_title') - this.res.renderedVariables.subscribed.should.equal(true) - done() - } - this.UserPagesController.emailPreferencesPage(this.req, this.res, done) - }) - - it('render page with unsubscribed status', function (done) { - this.NewsletterManager.subscribed.yields(null, false) - this.res.callback = () => { - this.res.renderedTemplate.should.equal('user/email-preferences') - this.res.renderedVariables.title.should.equal('newsletter_info_title') - this.res.renderedVariables.subscribed.should.equal(false) - done() - } - this.UserPagesController.emailPreferencesPage(this.req, this.res, done) - }) - }) - - describe('settingsPage', function () { - beforeEach(function () { - this.request.get = sinon - .stub() - .callsArgWith(1, null, { statusCode: 200 }, { has_password: true }) - this.UserGetter.promises.getUser = sinon.stub().resolves(this.user) - }) - - it('should render user/settings', function (done) { - this.res.callback = () => { - this.res.renderedTemplate.should.equal('user/settings') - done() - } - this.UserPagesController.settingsPage(this.req, this.res, done) - }) - - it('should send user', function (done) { - this.res.callback = () => { - this.res.renderedVariables.user.id.should.equal(this.user._id) - this.res.renderedVariables.user.email.should.equal(this.user.email) - done() - } - this.UserPagesController.settingsPage(this.req, this.res, done) - }) - - it("should set 'shouldAllowEditingDetails' to true", function (done) { - this.res.callback = () => { - this.res.renderedVariables.shouldAllowEditingDetails.should.equal(true) - done() - } - this.UserPagesController.settingsPage(this.req, this.res, done) - }) - - it('should restructure thirdPartyIdentifiers data for template use', function (done) { - const expectedResult = { - google: 'testId', - } - this.res.callback = () => { - expect(this.res.renderedVariables.thirdPartyIds).to.include( - expectedResult - ) - done() - } - this.UserPagesController.settingsPage(this.req, this.res, done) - }) - - it("should set and clear 'projectSyncSuccessMessage'", function (done) { - this.req.session.projectSyncSuccessMessage = 'Some Sync Success' - this.res.callback = () => { - this.res.renderedVariables.projectSyncSuccessMessage.should.equal( - 'Some Sync Success' - ) - expect(this.req.session.projectSyncSuccessMessage).to.not.exist - done() - } - this.UserPagesController.settingsPage(this.req, this.res, done) - }) - - it('should cast refProviders to booleans', function (done) { - this.res.callback = () => { - expect(this.res.renderedVariables.user.refProviders).to.deep.equal({ - mendeley: true, - papers: true, - zotero: true, - }) - done() - } - this.UserPagesController.settingsPage(this.req, this.res, done) - }) - - it('should send the correct managed user admin email', function (done) { - this.res.callback = () => { - expect( - this.res.renderedVariables.currentManagedUserAdminEmail - ).to.equal(this.adminEmail) - done() - } - this.UserPagesController.settingsPage(this.req, this.res, done) - }) - - it('should send info for groups with SSO enabled', function (done) { - this.user.enrollment = { - sso: [ - { - groupId: 'abc123abc123', - primary: true, - linkedAt: new Date(), - }, - ], - } - const group1 = { - _id: 'abc123abc123', - teamName: 'Group SSO Rulz', - admin_id: { - email: 'admin.email@ssolove.com', - }, - linked: true, - } - const group2 = { - _id: 'def456def456', - admin_id: { - email: 'someone.else@noname.co.uk', - }, - linked: false, - } - - this.Modules.promises.hooks.fire - .withArgs('getUserGroupsSSOEnrollmentStatus') - .resolves([[group1, group2]]) - - this.res.callback = () => { - expect( - this.res.renderedVariables.memberOfSSOEnabledGroups - ).to.deep.equal([ - { - groupId: 'abc123abc123', - groupName: 'Group SSO Rulz', - adminEmail: 'admin.email@ssolove.com', - linked: true, - }, - { - groupId: 'def456def456', - groupName: undefined, - adminEmail: 'someone.else@noname.co.uk', - linked: false, - }, - ]) - done() - } - - this.UserPagesController.settingsPage(this.req, this.res, done) - }) - - describe('when ldap.updateUserDetailsOnLogin is true', function () { - beforeEach(function () { - this.settings.ldap = { updateUserDetailsOnLogin: true } - }) - - afterEach(function () { - delete this.settings.ldap - }) - - it('should set "shouldAllowEditingDetails" to false', function (done) { - this.res.callback = () => { - this.res.renderedVariables.shouldAllowEditingDetails.should.equal( - false - ) - done() - } - this.UserPagesController.settingsPage(this.req, this.res, done) - }) - }) - - describe('when saml.updateUserDetailsOnLogin is true', function () { - beforeEach(function () { - this.settings.saml = { updateUserDetailsOnLogin: true } - }) - - afterEach(function () { - delete this.settings.saml - }) - - it('should set "shouldAllowEditingDetails" to false', function (done) { - this.res.callback = () => { - this.res.renderedVariables.shouldAllowEditingDetails.should.equal( - false - ) - done() - } - this.UserPagesController.settingsPage(this.req, this.res, done) - }) - }) - }) -}) diff --git a/services/web/test/unit/src/User/UserUpdaterTests.js b/services/web/test/unit/src/User/UserUpdaterTests.js index 5832bc4656..2803e6d6f2 100644 --- a/services/web/test/unit/src/User/UserUpdaterTests.js +++ b/services/web/test/unit/src/User/UserUpdaterTests.js @@ -59,11 +59,6 @@ describe('UserUpdater', function () { changeEmail: sinon.stub().resolves(), }, } - this.RecurlyWrapper = { - promises: { - updateAccountEmailAddress: sinon.stub().resolves(), - }, - } this.AnalyticsManager = { recordEventForUserInBackground: sinon.stub(), } @@ -264,9 +259,11 @@ describe('UserUpdater', function () { expect( this.NewsletterManager.promises.changeEmail ).to.have.been.calledWith(this.user, this.newEmail) - expect( - this.RecurlyWrapper.promises.updateAccountEmailAddress - ).to.have.been.calledWith(this.user._id, this.newEmail) + expect(this.Modules.promises.hooks.fire).to.have.been.calledWith( + 'updateAccountEmailAddress', + this.user._id, + this.newEmail + ) }) it('validates email', async function () { @@ -615,9 +612,11 @@ describe('UserUpdater', function () { expect( this.NewsletterManager.promises.changeEmail ).to.have.been.calledWith(this.user, this.newEmail) - expect( - this.RecurlyWrapper.promises.updateAccountEmailAddress - ).to.have.been.calledWith(this.user._id, this.newEmail) + expect(this.Modules.promises.hooks.fire).to.have.been.calledWith( + 'updateAccountEmailAddress', + this.user._id, + this.newEmail + ) }) it('handles Mongo errors', async function () { diff --git a/services/web/test/unit/src/UserMembership/UserMembershipController.test.mjs b/services/web/test/unit/src/UserMembership/UserMembershipController.test.mjs new file mode 100644 index 0000000000..18e2d8526b --- /dev/null +++ b/services/web/test/unit/src/UserMembership/UserMembershipController.test.mjs @@ -0,0 +1,448 @@ +import { expect, vi } from 'vitest' +import sinon from 'sinon' +import MockRequest from '../helpers/MockRequest.js' +import MockResponse from '../helpers/MockResponse.js' +import EntityConfigs from '../../../../app/src/Features/UserMembership/UserMembershipEntityConfigs.js' +import Errors from '../../../../app/src/Features/Errors/Errors.js' +import { + UserIsManagerError, + UserNotFoundError, + UserAlreadyAddedError, +} from '../../../../app/src/Features/UserMembership/UserMembershipErrors.js' +const assertCalledWith = sinon.assert.calledWith + +const modulePath = + '../../../../app/src/Features/UserMembership/UserMembershipController.mjs' + +vi.mock( + '../../../../app/src/Features/UserMembership/UserMembershipErrors.js', + () => + vi.importActual( + '../../../../app/src/Features/UserMembership/UserMembershipErrors.js' + ) +) + +vi.mock('../../../../app/src/Features/Errors/Errors.js', () => + vi.importActual('../../../../app/src/Features/Errors/Errors.js') +) + +describe('UserMembershipController', function () { + beforeEach(async function (ctx) { + ctx.req = new MockRequest() + ctx.req.params.id = 'mock-entity-id' + ctx.user = { _id: 'mock-user-id' } + ctx.newUser = { _id: 'mock-new-user-id', email: 'new-user-email@foo.bar' } + ctx.subscription = { + _id: 'mock-subscription-id', + admin_id: 'mock-admin-id', + fetchV1Data: callback => callback(null, ctx.subscription), + } + ctx.institution = { + _id: 'mock-institution-id', + v1Id: 123, + fetchV1Data: callback => { + const institution = Object.assign({}, ctx.institution) + institution.name = 'Test Institution Name' + callback(null, institution) + }, + } + ctx.users = [ + { + _id: 'mock-member-id-1', + email: 'mock-email-1@foo.com', + last_logged_in_at: '2020-08-09T12:43:11.467Z', + last_active_at: '2021-08-09T12:43:11.467Z', + }, + { + _id: 'mock-member-id-2', + email: 'mock-email-2@foo.com', + last_logged_in_at: '2020-05-20T10:41:11.407Z', + last_active_at: '2021-05-20T10:41:11.407Z', + }, + ] + + ctx.Settings = { + managedUsers: { + enabled: false, + }, + } + + ctx.SessionManager = { + getSessionUser: sinon.stub().returns(ctx.user), + getLoggedInUserId: sinon.stub().returns(ctx.user._id), + } + ctx.SSOConfig = { + findById: sinon + .stub() + .returns({ exec: sinon.stub().resolves({ enabled: true }) }), + } + ctx.UserMembershipHandler = { + getEntity: sinon.stub().yields(null, ctx.subscription), + createEntity: sinon.stub().yields(null, ctx.institution), + getUsers: sinon.stub().yields(null, ctx.users), + addUser: sinon.stub().yields(null, ctx.newUser), + removeUser: sinon.stub().yields(null), + promises: { + getUsers: sinon.stub().resolves(ctx.users), + }, + } + ctx.SplitTestHandler = { + promises: { + getAssignment: sinon.stub().resolves({ variant: 'default' }), + }, + getAssignment: sinon.stub().yields(null, { variant: 'default' }), + } + ctx.RecurlyClient = { + promises: { + getSubscription: sinon.stub().resolves({}), + }, + } + + vi.doMock( + '../../../../app/src/Features/UserMembership/UserMembershipErrors', + () => ({ + UserIsManagerError, + UserNotFoundError, + UserAlreadyAddedError, + }) + ) + + vi.doMock( + '../../../../app/src/Features/Authentication/SessionManager', + () => ({ + default: ctx.SessionManager, + }) + ) + + vi.doMock( + '../../../../app/src/Features/SplitTests/SplitTestHandler', + () => ({ + default: ctx.SplitTestHandler, + }) + ) + + vi.doMock( + '../../../../app/src/Features/UserMembership/UserMembershipHandler', + () => ({ + default: ctx.UserMembershipHandler, + }) + ) + + vi.doMock( + '../../../../app/src/Features/Subscription/RecurlyClient', + () => ({ + default: ctx.RecurlyClient, + }) + ) + + vi.doMock('@overleaf/settings', () => ({ + default: ctx.Settings, + })) + + vi.doMock('../../../../app/src/models/SSOConfig', () => ({ + SSOConfig: ctx.SSOConfig, + })) + + ctx.UserMembershipController = (await import(modulePath)).default + }) + + describe('index', function () { + beforeEach(function (ctx) { + ctx.req.entity = ctx.subscription + ctx.req.entityConfig = EntityConfigs.group + }) + + it('get users', async function (ctx) { + await ctx.UserMembershipController.manageGroupMembers(ctx.req, { + render: () => { + sinon.assert.calledWithMatch( + ctx.UserMembershipHandler.promises.getUsers, + ctx.subscription, + { modelName: 'Subscription' } + ) + }, + }) + }) + + it('render group view', async function (ctx) { + ctx.subscription.managedUsersEnabled = false + await ctx.UserMembershipController.manageGroupMembers(ctx.req, { + render: (viewPath, viewParams) => { + expect(viewPath).to.equal('user_membership/group-members-react') + expect(viewParams.users).to.deep.equal(ctx.users) + expect(viewParams.groupSize).to.equal(ctx.subscription.membersLimit) + expect(viewParams.managedUsersActive).to.equal(false) + }, + }) + }) + + it('render group view with managed users', async function (ctx) { + ctx.subscription.managedUsersEnabled = true + await ctx.UserMembershipController.manageGroupMembers(ctx.req, { + render: (viewPath, viewParams) => { + expect(viewPath).to.equal('user_membership/group-members-react') + expect(viewParams.users).to.deep.equal(ctx.users) + expect(viewParams.groupSize).to.equal(ctx.subscription.membersLimit) + expect(viewParams.managedUsersActive).to.equal(true) + expect(viewParams.isUserGroupManager).to.equal(false) + }, + }) + }) + + it('render group managers view', async function (ctx) { + ctx.req.entityConfig = EntityConfigs.groupManagers + await ctx.UserMembershipController.manageGroupManagers(ctx.req, { + render: (viewPath, viewParams) => { + expect(viewPath).to.equal('user_membership/group-managers-react') + expect(viewParams.groupSize).to.equal(undefined) + }, + }) + }) + + it('render institution view', async function (ctx) { + ctx.req.entity = ctx.institution + ctx.req.entityConfig = EntityConfigs.institution + await ctx.UserMembershipController.manageInstitutionManagers(ctx.req, { + render: (viewPath, viewParams) => { + expect(viewPath).to.equal( + 'user_membership/institution-managers-react' + ) + expect(viewParams.name).to.equal('Test Institution Name') + expect(viewParams.groupSize).to.equal(undefined) + }, + }) + }) + }) + + describe('add', function () { + beforeEach(function (ctx) { + ctx.req.body.email = ctx.newUser.email + ctx.req.entity = ctx.subscription + ctx.req.entityConfig = EntityConfigs.groupManagers + }) + + it('add user', function (ctx) { + return new Promise(resolve => { + ctx.UserMembershipController.add(ctx.req, { + json: () => { + sinon.assert.calledWithMatch( + ctx.UserMembershipHandler.addUser, + ctx.subscription, + { modelName: 'Subscription' }, + ctx.newUser.email + ) + resolve() + }, + }) + }) + }) + + it('return user object', function (ctx) { + return new Promise(resolve => { + ctx.UserMembershipController.add(ctx.req, { + json: payload => { + payload.user.should.equal(ctx.newUser) + resolve() + }, + }) + }) + }) + + it('handle readOnly entity', function (ctx) { + return new Promise(resolve => { + ctx.req.entityConfig = EntityConfigs.group + ctx.UserMembershipController.add(ctx.req, null, error => { + expect(error).to.exist + expect(error).to.be.an.instanceof(Errors.NotFoundError) + resolve() + }) + }) + }) + + it('handle user already added', function (ctx) { + return new Promise(resolve => { + ctx.UserMembershipHandler.addUser.yields(new UserAlreadyAddedError()) + ctx.UserMembershipController.add(ctx.req, { + status: () => ({ + json: payload => { + expect(payload.error.code).to.equal('user_already_added') + resolve() + }, + }), + }) + }) + }) + + it('handle user not found', function (ctx) { + return new Promise(resolve => { + ctx.UserMembershipHandler.addUser.yields(new UserNotFoundError()) + ctx.UserMembershipController.add(ctx.req, { + status: () => ({ + json: payload => { + expect(payload.error.code).to.equal('user_not_found') + resolve() + }, + }), + }) + }) + }) + + it('handle invalid email', function (ctx) { + return new Promise(resolve => { + ctx.req.body.email = 'not_valid_email' + ctx.UserMembershipController.add(ctx.req, { + status: () => ({ + json: payload => { + expect(payload.error.code).to.equal('invalid_email') + resolve() + }, + }), + }) + }) + }) + }) + + describe('remove', function () { + beforeEach(function (ctx) { + ctx.req.params.userId = ctx.newUser._id + ctx.req.entity = ctx.subscription + ctx.req.entityConfig = EntityConfigs.groupManagers + }) + + it('remove user', function (ctx) { + return new Promise(resolve => { + ctx.UserMembershipController.remove(ctx.req, { + sendStatus: () => { + sinon.assert.calledWithMatch( + ctx.UserMembershipHandler.removeUser, + ctx.subscription, + { modelName: 'Subscription' }, + ctx.newUser._id + ) + resolve() + }, + }) + }) + }) + + it('handle readOnly entity', function (ctx) { + return new Promise(resolve => { + ctx.req.entityConfig = EntityConfigs.group + ctx.UserMembershipController.remove(ctx.req, null, error => { + expect(error).to.exist + expect(error).to.be.an.instanceof(Errors.NotFoundError) + resolve() + }) + }) + }) + + it('prevent self removal', function (ctx) { + return new Promise(resolve => { + ctx.req.params.userId = ctx.user._id + ctx.UserMembershipController.remove(ctx.req, { + status: () => ({ + json: payload => { + expect(payload.error.code).to.equal('managers_cannot_remove_self') + resolve() + }, + }), + }) + }) + }) + + it('prevent admin removal', function (ctx) { + return new Promise(resolve => { + ctx.UserMembershipHandler.removeUser.yields(new UserIsManagerError()) + ctx.UserMembershipController.remove(ctx.req, { + status: () => ({ + json: payload => { + expect(payload.error.code).to.equal( + 'managers_cannot_remove_admin' + ) + resolve() + }, + }), + }) + }) + }) + }) + + describe('exportCsv', function () { + beforeEach(function (ctx) { + ctx.req.entity = ctx.subscription + ctx.req.entityConfig = EntityConfigs.groupManagers + ctx.res = new MockResponse() + ctx.UserMembershipController.exportCsv(ctx.req, ctx.res) + }) + + it('get users', function (ctx) { + sinon.assert.calledWithMatch( + ctx.UserMembershipHandler.getUsers, + ctx.subscription, + { modelName: 'Subscription' } + ) + }) + + it('should set the correct content type on the request', function (ctx) { + assertCalledWith(ctx.res.contentType, 'text/csv; charset=utf-8') + }) + + it('should name the exported csv file', function (ctx) { + assertCalledWith( + ctx.res.header, + 'Content-Disposition', + 'attachment; filename="Group.csv"' + ) + }) + + it('should export the correct csv', function (ctx) { + assertCalledWith( + ctx.res.send, + '"email","last_logged_in_at","last_active_at"\n"mock-email-1@foo.com","2020-08-09T12:43:11.467Z","2021-08-09T12:43:11.467Z"\n"mock-email-2@foo.com","2020-05-20T10:41:11.407Z","2021-05-20T10:41:11.407Z"' + ) + }) + }) + + describe('new', function () { + beforeEach(function (ctx) { + ctx.req.params.name = 'publisher' + ctx.req.params.id = 'abc' + }) + + it('renders view', function (ctx) { + return new Promise(resolve => { + ctx.UserMembershipController.new(ctx.req, { + render: (viewPath, data) => { + expect(data.entityName).to.eq('publisher') + expect(data.entityId).to.eq('abc') + resolve() + }, + }) + }) + }) + }) + + describe('create', function () { + beforeEach(function (ctx) { + ctx.req.params.name = 'institution' + ctx.req.entityConfig = EntityConfigs.institution + ctx.req.params.id = 123 + }) + + it('creates institution', function (ctx) { + return new Promise(resolve => { + ctx.UserMembershipController.create(ctx.req, { + redirect: path => { + expect(path).to.eq(EntityConfigs.institution.pathsFor(123).index) + sinon.assert.calledWithMatch( + ctx.UserMembershipHandler.createEntity, + 123, + { modelName: 'Institution' } + ) + resolve() + }, + }) + }) + }) + }) +}) diff --git a/services/web/test/unit/src/UserMembership/UserMembershipControllerTests.mjs b/services/web/test/unit/src/UserMembership/UserMembershipControllerTests.mjs deleted file mode 100644 index f6dedf2097..0000000000 --- a/services/web/test/unit/src/UserMembership/UserMembershipControllerTests.mjs +++ /dev/null @@ -1,380 +0,0 @@ -import sinon from 'sinon' -import { expect } from 'chai' -import esmock from 'esmock' -import MockRequest from '../helpers/MockRequest.js' -import MockResponse from '../helpers/MockResponse.js' -import EntityConfigs from '../../../../app/src/Features/UserMembership/UserMembershipEntityConfigs.js' -import Errors from '../../../../app/src/Features/Errors/Errors.js' -import { - UserIsManagerError, - UserNotFoundError, - UserAlreadyAddedError, -} from '../../../../app/src/Features/UserMembership/UserMembershipErrors.js' -const assertCalledWith = sinon.assert.calledWith - -const modulePath = - '../../../../app/src/Features/UserMembership/UserMembershipController.mjs' - -describe('UserMembershipController', function () { - beforeEach(async function () { - this.req = new MockRequest() - this.req.params.id = 'mock-entity-id' - this.user = { _id: 'mock-user-id' } - this.newUser = { _id: 'mock-new-user-id', email: 'new-user-email@foo.bar' } - this.subscription = { - _id: 'mock-subscription-id', - admin_id: 'mock-admin-id', - fetchV1Data: callback => callback(null, this.subscription), - } - this.institution = { - _id: 'mock-institution-id', - v1Id: 123, - fetchV1Data: callback => { - const institution = Object.assign({}, this.institution) - institution.name = 'Test Institution Name' - callback(null, institution) - }, - } - this.users = [ - { - _id: 'mock-member-id-1', - email: 'mock-email-1@foo.com', - last_logged_in_at: '2020-08-09T12:43:11.467Z', - last_active_at: '2021-08-09T12:43:11.467Z', - }, - { - _id: 'mock-member-id-2', - email: 'mock-email-2@foo.com', - last_logged_in_at: '2020-05-20T10:41:11.407Z', - last_active_at: '2021-05-20T10:41:11.407Z', - }, - ] - - this.Settings = { - managedUsers: { - enabled: false, - }, - } - - this.SessionManager = { - getSessionUser: sinon.stub().returns(this.user), - getLoggedInUserId: sinon.stub().returns(this.user._id), - } - this.SSOConfig = { - findById: sinon - .stub() - .returns({ exec: sinon.stub().resolves({ enabled: true }) }), - } - this.UserMembershipHandler = { - getEntity: sinon.stub().yields(null, this.subscription), - createEntity: sinon.stub().yields(null, this.institution), - getUsers: sinon.stub().yields(null, this.users), - addUser: sinon.stub().yields(null, this.newUser), - removeUser: sinon.stub().yields(null), - promises: { - getUsers: sinon.stub().resolves(this.users), - }, - } - this.SplitTestHandler = { - promises: { - getAssignment: sinon.stub().resolves({ variant: 'default' }), - }, - getAssignment: sinon.stub().yields(null, { variant: 'default' }), - } - this.RecurlyClient = { - promises: { - getSubscription: sinon.stub().resolves({}), - }, - } - this.UserMembershipController = await esmock.strict(modulePath, { - '../../../../app/src/Features/UserMembership/UserMembershipErrors': { - UserIsManagerError, - UserNotFoundError, - UserAlreadyAddedError, - }, - '../../../../app/src/Features/Authentication/SessionManager': - this.SessionManager, - '../../../../app/src/Features/SplitTests/SplitTestHandler': - this.SplitTestHandler, - '../../../../app/src/Features/UserMembership/UserMembershipHandler': - this.UserMembershipHandler, - '../../../../app/src/Features/Subscription/RecurlyClient': - this.RecurlyClient, - '@overleaf/settings': this.Settings, - '../../../../app/src/models/SSOConfig': { SSOConfig: this.SSOConfig }, - }) - }) - - describe('index', function () { - beforeEach(function () { - this.req.entity = this.subscription - this.req.entityConfig = EntityConfigs.group - }) - - it('get users', async function () { - await this.UserMembershipController.manageGroupMembers(this.req, { - render: () => { - sinon.assert.calledWithMatch( - this.UserMembershipHandler.promises.getUsers, - this.subscription, - { modelName: 'Subscription' } - ) - }, - }) - }) - - it('render group view', async function () { - this.subscription.managedUsersEnabled = false - await this.UserMembershipController.manageGroupMembers(this.req, { - render: (viewPath, viewParams) => { - expect(viewPath).to.equal('user_membership/group-members-react') - expect(viewParams.users).to.deep.equal(this.users) - expect(viewParams.groupSize).to.equal(this.subscription.membersLimit) - expect(viewParams.managedUsersActive).to.equal(false) - }, - }) - }) - - it('render group view with managed users', async function () { - this.subscription.managedUsersEnabled = true - await this.UserMembershipController.manageGroupMembers(this.req, { - render: (viewPath, viewParams) => { - expect(viewPath).to.equal('user_membership/group-members-react') - expect(viewParams.users).to.deep.equal(this.users) - expect(viewParams.groupSize).to.equal(this.subscription.membersLimit) - expect(viewParams.managedUsersActive).to.equal(true) - }, - }) - }) - - it('render group managers view', async function () { - this.req.entityConfig = EntityConfigs.groupManagers - await this.UserMembershipController.manageGroupManagers(this.req, { - render: (viewPath, viewParams) => { - expect(viewPath).to.equal('user_membership/group-managers-react') - expect(viewParams.groupSize).to.equal(undefined) - }, - }) - }) - - it('render institution view', async function () { - this.req.entity = this.institution - this.req.entityConfig = EntityConfigs.institution - await this.UserMembershipController.manageInstitutionManagers(this.req, { - render: (viewPath, viewParams) => { - expect(viewPath).to.equal( - 'user_membership/institution-managers-react' - ) - expect(viewParams.name).to.equal('Test Institution Name') - expect(viewParams.groupSize).to.equal(undefined) - }, - }) - }) - }) - - describe('add', function () { - beforeEach(function () { - this.req.body.email = this.newUser.email - this.req.entity = this.subscription - this.req.entityConfig = EntityConfigs.groupManagers - }) - - it('add user', function (done) { - this.UserMembershipController.add(this.req, { - json: () => { - sinon.assert.calledWithMatch( - this.UserMembershipHandler.addUser, - this.subscription, - { modelName: 'Subscription' }, - this.newUser.email - ) - done() - }, - }) - }) - - it('return user object', function (done) { - this.UserMembershipController.add(this.req, { - json: payload => { - payload.user.should.equal(this.newUser) - done() - }, - }) - }) - - it('handle readOnly entity', function (done) { - this.req.entityConfig = EntityConfigs.group - this.UserMembershipController.add(this.req, null, error => { - expect(error).to.exist - expect(error).to.be.an.instanceof(Errors.NotFoundError) - done() - }) - }) - - it('handle user already added', function (done) { - this.UserMembershipHandler.addUser.yields(new UserAlreadyAddedError()) - this.UserMembershipController.add(this.req, { - status: () => ({ - json: payload => { - expect(payload.error.code).to.equal('user_already_added') - done() - }, - }), - }) - }) - - it('handle user not found', function (done) { - this.UserMembershipHandler.addUser.yields(new UserNotFoundError()) - this.UserMembershipController.add(this.req, { - status: () => ({ - json: payload => { - expect(payload.error.code).to.equal('user_not_found') - done() - }, - }), - }) - }) - - it('handle invalid email', function (done) { - this.req.body.email = 'not_valid_email' - this.UserMembershipController.add(this.req, { - status: () => ({ - json: payload => { - expect(payload.error.code).to.equal('invalid_email') - done() - }, - }), - }) - }) - }) - - describe('remove', function () { - beforeEach(function () { - this.req.params.userId = this.newUser._id - this.req.entity = this.subscription - this.req.entityConfig = EntityConfigs.groupManagers - }) - - it('remove user', function (done) { - this.UserMembershipController.remove(this.req, { - sendStatus: () => { - sinon.assert.calledWithMatch( - this.UserMembershipHandler.removeUser, - this.subscription, - { modelName: 'Subscription' }, - this.newUser._id - ) - done() - }, - }) - }) - - it('handle readOnly entity', function (done) { - this.req.entityConfig = EntityConfigs.group - this.UserMembershipController.remove(this.req, null, error => { - expect(error).to.exist - expect(error).to.be.an.instanceof(Errors.NotFoundError) - done() - }) - }) - - it('prevent self removal', function (done) { - this.req.params.userId = this.user._id - this.UserMembershipController.remove(this.req, { - status: () => ({ - json: payload => { - expect(payload.error.code).to.equal('managers_cannot_remove_self') - done() - }, - }), - }) - }) - - it('prevent admin removal', function (done) { - this.UserMembershipHandler.removeUser.yields(new UserIsManagerError()) - this.UserMembershipController.remove(this.req, { - status: () => ({ - json: payload => { - expect(payload.error.code).to.equal('managers_cannot_remove_admin') - done() - }, - }), - }) - }) - }) - - describe('exportCsv', function () { - beforeEach(function () { - this.req.entity = this.subscription - this.req.entityConfig = EntityConfigs.groupManagers - this.res = new MockResponse() - this.UserMembershipController.exportCsv(this.req, this.res) - }) - - it('get users', function () { - sinon.assert.calledWithMatch( - this.UserMembershipHandler.getUsers, - this.subscription, - { modelName: 'Subscription' } - ) - }) - - it('should set the correct content type on the request', function () { - assertCalledWith(this.res.contentType, 'text/csv; charset=utf-8') - }) - - it('should name the exported csv file', function () { - assertCalledWith( - this.res.header, - 'Content-Disposition', - 'attachment; filename="Group.csv"' - ) - }) - - it('should export the correct csv', function () { - assertCalledWith( - this.res.send, - '"email","last_logged_in_at","last_active_at"\n"mock-email-1@foo.com","2020-08-09T12:43:11.467Z","2021-08-09T12:43:11.467Z"\n"mock-email-2@foo.com","2020-05-20T10:41:11.407Z","2021-05-20T10:41:11.407Z"' - ) - }) - }) - - describe('new', function () { - beforeEach(function () { - this.req.params.name = 'publisher' - this.req.params.id = 'abc' - }) - - it('renders view', function (done) { - this.UserMembershipController.new(this.req, { - render: (viewPath, data) => { - expect(data.entityName).to.eq('publisher') - expect(data.entityId).to.eq('abc') - done() - }, - }) - }) - }) - - describe('create', function () { - beforeEach(function () { - this.req.params.name = 'institution' - this.req.entityConfig = EntityConfigs.institution - this.req.params.id = 123 - }) - - it('creates institution', function (done) { - this.UserMembershipController.create(this.req, { - redirect: path => { - expect(path).to.eq(EntityConfigs.institution.pathsFor(123).index) - sinon.assert.calledWithMatch( - this.UserMembershipHandler.createEntity, - 123, - { modelName: 'Institution' } - ) - done() - }, - }) - }) - }) -}) diff --git a/services/web/test/unit/src/helpers/models/DeletedFile.js b/services/web/test/unit/src/helpers/models/DeletedFile.js deleted file mode 100644 index 8e0b6a43b8..0000000000 --- a/services/web/test/unit/src/helpers/models/DeletedFile.js +++ /dev/null @@ -1,3 +0,0 @@ -const mockModel = require('../MockModel') - -module.exports = mockModel('DeletedFile') diff --git a/services/web/test/unit/src/infrastructure/ServeStaticWrapper.test.mjs b/services/web/test/unit/src/infrastructure/ServeStaticWrapper.test.mjs new file mode 100644 index 0000000000..619fe74a2b --- /dev/null +++ b/services/web/test/unit/src/infrastructure/ServeStaticWrapper.test.mjs @@ -0,0 +1,62 @@ +import { expect, vi } from 'vitest' +import Path from 'node:path' +import sinon from 'sinon' +import MockResponse from '../helpers/MockResponse.js' +import MockRequest from '../helpers/MockRequest.js' + +const modulePath = Path.join( + import.meta.dirname, + '../../../../app/src/infrastructure/ServeStaticWrapper' +) + +describe('ServeStaticWrapperTests', function () { + let error = null + + beforeEach(async function (ctx) { + ctx.req = new MockRequest() + ctx.res = new MockResponse() + ctx.express = { + static: () => (req, res, next) => { + if (error) { + next(error) + } else { + next() + } + }, + } + + vi.doMock('express', () => ({ + default: ctx.express, + })) + + ctx.serveStaticWrapper = (await import(modulePath)).default + }) + + afterEach(() => { + error = null + }) + + it('Premature close error thrown', async function (ctx) { + error = new Error() + error.code = 'ERR_STREAM_PREMATURE_CLOSE' + const middleware = ctx.serveStaticWrapper('test_folder', {}) + const next = sinon.stub() + middleware(ctx.req, ctx.res, next) + expect(next.called).to.be.false + }) + + it('No error thrown', async function (ctx) { + const middleware = ctx.serveStaticWrapper('test_folder', {}) + const next = sinon.stub() + middleware(ctx.req, ctx.res, next) + expect(next).to.be.calledWith() + }) + + it('Other error thrown', async function (ctx) { + error = new Error() + const middleware = ctx.serveStaticWrapper('test_folder', {}) + const next = sinon.stub() + middleware(ctx.req, ctx.res, next) + expect(next).to.be.calledWith(error) + }) +}) diff --git a/services/web/test/unit/src/infrastructure/ServeStaticWrapperTests.mjs b/services/web/test/unit/src/infrastructure/ServeStaticWrapperTests.mjs deleted file mode 100644 index 01fe5d7a0d..0000000000 --- a/services/web/test/unit/src/infrastructure/ServeStaticWrapperTests.mjs +++ /dev/null @@ -1,62 +0,0 @@ -import { strict as esmock } from 'esmock' -import { expect } from 'chai' -import Path from 'node:path' -import { fileURLToPath } from 'node:url' -import sinon from 'sinon' -import MockResponse from '../helpers/MockResponse.js' -import MockRequest from '../helpers/MockRequest.js' - -const __dirname = fileURLToPath(new URL('.', import.meta.url)) -const modulePath = Path.join( - __dirname, - '../../../../app/src/infrastructure/ServeStaticWrapper' -) - -describe('ServeStaticWrapperTests', function () { - let error = null - - beforeEach(async function () { - this.req = new MockRequest() - this.res = new MockResponse() - this.express = { - static: () => (req, res, next) => { - if (error) { - next(error) - } else { - next() - } - }, - } - this.serveStaticWrapper = await esmock(modulePath, { - express: this.express, - }) - }) - - this.afterEach(() => { - error = null - }) - - it('Premature close error thrown', async function () { - error = new Error() - error.code = 'ERR_STREAM_PREMATURE_CLOSE' - const middleware = this.serveStaticWrapper('test_folder', {}) - const next = sinon.stub() - middleware(this.req, this.res, next) - expect(next.called).to.be.false - }) - - it('No error thrown', async function () { - const middleware = this.serveStaticWrapper('test_folder', {}) - const next = sinon.stub() - middleware(this.req, this.res, next) - expect(next).to.be.calledWith() - }) - - it('Other error thrown', async function () { - error = new Error() - const middleware = this.serveStaticWrapper('test_folder', {}) - const next = sinon.stub() - middleware(this.req, this.res, next) - expect(next).to.be.calledWith(error) - }) -}) diff --git a/services/web/test/unit/vitest_bootstrap.mjs b/services/web/test/unit/vitest_bootstrap.mjs new file mode 100644 index 0000000000..5a39b2d587 --- /dev/null +++ b/services/web/test/unit/vitest_bootstrap.mjs @@ -0,0 +1,46 @@ +import { chai, vi } from 'vitest' +import './common_bootstrap.js' +import sinon from 'sinon' +import logger from '@overleaf/logger' +import sinonChai from 'sinon-chai' +import chaiAsPromised from 'chai-as-promised' + +/* + * Chai configuration + */ + +// add chai.should() +chai.should() + +// Load sinon-chai assertions so expect(stubFn).to.have.been.calledWith('abc') +// has a nicer failure messages +chai.use(sinonChai) + +// Load promise support for chai +chai.use(chaiAsPromised) + +// Do not truncate assertion errors +chai.config.truncateThreshold = 0 +vi.mock('@overleaf/logger', async () => { + return { + default: { + debug: vi.fn(), + info: vi.fn(), + log: vi.fn(), + warn: vi.fn(), + err: vi.fn(), + error: vi.fn(), + fatal: vi.fn(), + }, + } +}) + +beforeEach(ctx => { + ctx.logger = logger +}) + +afterEach(() => { + vi.restoreAllMocks() + vi.resetModules() + sinon.restore() +}) diff --git a/services/web/types/admin/subscription.ts b/services/web/types/admin/subscription.ts index bbcdd3b953..811ebf54bf 100644 --- a/services/web/types/admin/subscription.ts +++ b/services/web/types/admin/subscription.ts @@ -1,7 +1,15 @@ -import { GroupPolicy } from '../subscription/dashboard/subscription' +import { + GroupPolicy, + PaymentProvider, +} from '../subscription/dashboard/subscription' import { SSOConfig } from '../subscription/sso' import { TeamInvite } from '../team-invite' +type RecurlyAdminClientPaymentProvider = Record +type StripeAdminClientPaymentProvider = PaymentProvider & { + service: 'stripe-us' | 'stripe-uk' +} + export type Subscription = { _id: string teamInvites: TeamInvite[] @@ -13,4 +21,8 @@ export type Subscription = { managedUsersEnabled: boolean v1_id: number salesforce_id: string + recurlySubscription_id?: string + paymentProvider: + | RecurlyAdminClientPaymentProvider + | StripeAdminClientPaymentProvider } diff --git a/services/web/types/group-management/group-audit-log.ts b/services/web/types/group-management/group-audit-log.ts new file mode 100644 index 0000000000..c96c12e7cd --- /dev/null +++ b/services/web/types/group-management/group-audit-log.ts @@ -0,0 +1,7 @@ +export type GroupAuditLog = { + groupId: string + operation: string + ipAddress?: string + initiatorId?: string + info?: object +} diff --git a/services/web/types/onboarding.ts b/services/web/types/onboarding.ts new file mode 100644 index 0000000000..11ae3e51d0 --- /dev/null +++ b/services/web/types/onboarding.ts @@ -0,0 +1,25 @@ +export type UsedLatex = 'never' | 'occasionally' | 'often' +export type Occupation = + | 'university' + | 'company' + | 'nonprofitngo' + | 'government' + | 'other' + +export type OnboardingFormData = { + firstName: string + lastName: string + primaryOccupation: Occupation | null + usedLatex: UsedLatex | null + companyDivisionDepartment: string + companyJobTitle: string + governmentJobTitle: string + institutionName: string + otherJobTitle: string + nonprofitDivisionDepartment: string + nonprofitJobTitle: string + role: string + subjectArea: string + updatedAt?: Date + shouldReceiveUpdates?: boolean +} diff --git a/services/web/types/project.ts b/services/web/types/project.ts index 83a28533b3..0fc21533e2 100644 --- a/services/web/types/project.ts +++ b/services/web/types/project.ts @@ -25,7 +25,8 @@ export type Project = { owner: MongoUser members: ProjectMember[] invites: ProjectInvite[] - rootDoc_id?: string + // `rootDoc_id` in the backend; `rootDocId` in the frontend + rootDocId?: string rootFolder?: Folder[] deletedByExternalDataSource?: boolean } diff --git a/services/web/types/project/dashboard/subscription.ts b/services/web/types/project/dashboard/subscription.ts index e8b595c49f..c8f8835b34 100644 --- a/services/web/types/project/dashboard/subscription.ts +++ b/services/web/types/project/dashboard/subscription.ts @@ -1,4 +1,7 @@ -import { SubscriptionState } from '../../subscription/dashboard/subscription' +import { + SubscriptionState, + PaymentProvider, +} from '../../subscription/dashboard/subscription' type SubscriptionBase = { featuresPageURL: string @@ -22,6 +25,7 @@ type PaidSubscriptionBase = { teamName?: string name: string recurlyStatus?: RecurlyStatus + paymentProvider?: PaymentProvider } } & SubscriptionBase diff --git a/services/web/types/share-doc.ts b/services/web/types/share-doc.ts index d071c97f28..7c75e6d0de 100644 --- a/services/web/types/share-doc.ts +++ b/services/web/types/share-doc.ts @@ -1,9 +1,23 @@ import EventEmitter from 'events' +import { StringFileData } from 'overleaf-editor-core' // type for the Doc class in vendor/libs/sharejs.js -export interface ShareDoc extends EventEmitter { +export interface ShareLatexOTShareDoc extends EventEmitter { + otType: 'sharejs-text-ot' + snapshot: string detach_cm6?: () => void getText: () => string insert: (pos: number, insert: string, fromUndo: boolean) => void del: (pos: number, length: number, fromUndo: boolean) => void + submitOp(op: any[]): void } + +export interface HistoryOTShareDoc extends EventEmitter { + otType: 'history-ot' + snapshot: StringFileData + detach_cm6?: () => void + getText: () => string + submitOp(op: any[]): void +} + +export type ShareDoc = ShareLatexOTShareDoc | HistoryOTShareDoc diff --git a/services/web/types/subscription/currency.ts b/services/web/types/subscription/currency.ts index 8d6b88dc0b..d63d71be74 100644 --- a/services/web/types/subscription/currency.ts +++ b/services/web/types/subscription/currency.ts @@ -20,3 +20,4 @@ export const currencies = { type Currency = typeof currencies export type CurrencyCode = keyof Currency +export type StripeCurrencyCode = Lowercase diff --git a/services/web/types/subscription/dashboard/subscription.ts b/services/web/types/subscription/dashboard/subscription.ts index a1ee934423..db17b25684 100644 --- a/services/web/types/subscription/dashboard/subscription.ts +++ b/services/web/types/subscription/dashboard/subscription.ts @@ -64,7 +64,6 @@ export type Subscription = { membersLimit: number teamInvites: object[] planCode: string - recurlySubscription_id: string plan: Plan pendingPlan?: PendingPaymentProviderPlan addOns?: AddOn[] @@ -103,7 +102,7 @@ export type MemberGroupSubscription = Omit & { admin_id: User } -type PaymentProviderService = 'stripe' | 'recurly' +type PaymentProviderService = 'stripe-us' | 'stripe-uk' | 'recurly' export type PaymentProvider = { service: PaymentProviderService diff --git a/services/web/types/subscription/plan.ts b/services/web/types/subscription/plan.ts index c5e8f7e820..d6f3008a19 100644 --- a/services/web/types/subscription/plan.ts +++ b/services/web/types/subscription/plan.ts @@ -1,3 +1,5 @@ +import { StripeCurrencyCode } from './currency' + type Features = { collaborators: number compileGroup: string @@ -60,6 +62,7 @@ export type Plan = { name: string planCode: string price_in_cents: number + canUseFlexibleLicensing?: boolean } export type PriceForDisplayData = { @@ -85,15 +88,27 @@ export type RecurlyPlanCode = | 'group_professional_educational' | 'group_collaborator' | 'group_collaborator_educational' + | 'assistant' + | 'assistant-annual' -export type StripeLookupKey = +export type RecurlyAddOnCode = 'assistant' + +export type StripeBaseLookupKey = | 'standard_monthly' | 'standard_annual' | 'professional_monthly' | 'professional_annual' | 'student_monthly' | 'student_annual' + | 'assistant_annual' + | 'assistant_monthly' + // TODO: change all group plans' lookup_keys to match the UK account after they have been added | 'group_standard_enterprise' | 'group_professional_enterprise' | 'group_standard_educational' | 'group_professional_educational' + +export type StripeLookupKeyVersion = 'jun2025' + +export type StripeLookupKey = + `${StripeBaseLookupKey}_${StripeLookupKeyVersion}_${StripeCurrencyCode}` diff --git a/services/web/types/subscription/sso.ts b/services/web/types/subscription/sso.ts index cf869ec741..6500817407 100644 --- a/services/web/types/subscription/sso.ts +++ b/services/web/types/subscription/sso.ts @@ -13,6 +13,7 @@ export type SSOConfig = { userLastNameAttribute?: string validated?: boolean enabled?: boolean + useSettingsUKAMF?: boolean } export type GroupSSOLinkingStatus = { diff --git a/services/web/types/user-settings.ts b/services/web/types/user-settings.ts index 3e748d937e..add460edfa 100644 --- a/services/web/types/user-settings.ts +++ b/services/web/types/user-settings.ts @@ -17,4 +17,5 @@ export type UserSettings = { mathPreview: boolean referencesSearchMode: 'advanced' | 'simple' enableNewEditor: boolean + breadcrumbs: boolean } diff --git a/services/web/types/user.ts b/services/web/types/user.ts index 8d00ea803f..2fce1ce46b 100644 --- a/services/web/types/user.ts +++ b/services/web/types/user.ts @@ -39,7 +39,7 @@ export type User = { isAdmin?: boolean email: string allowedFreeTrial?: boolean - hasRecurlySubscription?: boolean + hasPaidSubscription?: boolean first_name?: string last_name?: string alphaProgram?: boolean diff --git a/services/web/types/window.ts b/services/web/types/window.ts index 1150bf1e50..d2856e7179 100644 --- a/services/web/types/window.ts +++ b/services/web/types/window.ts @@ -1,20 +1,11 @@ import 'recurly__recurly-js' import { ScopeValueStore } from './ide/scope-value-store' import { MetaAttributesCache } from '@/utils/meta' -import { Socket } from '@/features/ide-react/connection/types/socket' declare global { // eslint-disable-next-line no-unused-vars interface Window { metaAttributesCache: MetaAttributesCache - _ide: Record & { - $scope: Record & { - pdf?: { - logEntryAnnotations: Record - } - } - socket: Socket - } MathJax: Record // For react-google-recaptcha recaptchaOptions?: { diff --git a/services/web/vitest.config.js b/services/web/vitest.config.js new file mode 100644 index 0000000000..51f4ed811f --- /dev/null +++ b/services/web/vitest.config.js @@ -0,0 +1,13 @@ +const { defineConfig } = require('vitest/config') + +module.exports = defineConfig({ + test: { + include: [ + 'modules/*/test/unit/**/*.test.mjs', + 'test/unit/src/**/*.test.mjs', + ], + setupFiles: ['./test/unit/vitest_bootstrap.mjs'], + globals: true, + isolate: false, + }, +})