Compare commits

..

No commits in common. "09f47e75e02568154c5edb474738bce46ca11a0e" and "52f1e463431b6f8f484f920bf7c1129b14687a2b" have entirely different histories.

802 changed files with 22126 additions and 39983 deletions

View file

@ -1,19 +1,10 @@
---
name: Bug report
about: Report a bug
title: ''
labels: type:bug
assignees: ''
---
<!-- <!--
Note: If you are using www.overleaf.com and have a problem, Note: If you are using www.overleaf.com and have a problem,
or if you would like to request a new feature please contact or if you would like to request a new feature please contact
the support team at support@overleaf.com the support team at support@overleaf.com
This form should only be used to report bugs in the This form should only be used to report bugs in the
Community Edition release of Overleaf. Community Edition release of Overleaf.
--> -->

View file

@ -25,10 +25,10 @@ services:
env_file: env_file:
- dev.env - dev.env
environment: environment:
- DOCKER_RUNNER=true
- TEXLIVE_IMAGE=texlive-full # docker build texlive -t texlive-full - TEXLIVE_IMAGE=texlive-full # docker build texlive -t texlive-full
- SANDBOXED_COMPILES=true - COMPILES_HOST_DIR=${PWD}/compiles
- SANDBOXED_COMPILES_HOST_DIR_COMPILES=${PWD}/compiles - OUTPUT_HOST_DIR=${PWD}/output
- SANDBOXED_COMPILES_HOST_DIR_OUTPUT=${PWD}/output
user: root user: root
volumes: volumes:
- ${PWD}/compiles:/overleaf/services/clsi/compiles - ${PWD}/compiles:/overleaf/services/clsi/compiles

View file

@ -1,6 +1,6 @@
access-token-encryptor access-token-encryptor
--dependencies=None --dependencies=None
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker --docker-repos=gcr.io/overleaf-ops
--env-add= --env-add=
--env-pass-through= --env-pass-through=
--esmock-loader=False --esmock-loader=False

View file

@ -1,6 +1,6 @@
fetch-utils fetch-utils
--dependencies=None --dependencies=None
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker --docker-repos=gcr.io/overleaf-ops
--env-add= --env-add=
--env-pass-through= --env-pass-through=
--esmock-loader=False --esmock-loader=False

View file

@ -1,6 +1,6 @@
logger logger
--dependencies=None --dependencies=None
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker --docker-repos=gcr.io/overleaf-ops
--env-add= --env-add=
--env-pass-through= --env-pass-through=
--esmock-loader=False --esmock-loader=False

View file

@ -1,6 +1,6 @@
metrics metrics
--dependencies=None --dependencies=None
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker --docker-repos=gcr.io/overleaf-ops
--env-add= --env-add=
--env-pass-through= --env-pass-through=
--esmock-loader=False --esmock-loader=False

View file

@ -35,7 +35,6 @@ let BATCHED_UPDATE_RUNNING = false
* @property {string} [BATCH_RANGE_START] * @property {string} [BATCH_RANGE_START]
* @property {string} [BATCH_SIZE] * @property {string} [BATCH_SIZE]
* @property {string} [VERBOSE_LOGGING] * @property {string} [VERBOSE_LOGGING]
* @property {(progress: string) => Promise<void>} [trackProgress]
*/ */
/** /**
@ -211,7 +210,7 @@ async function batchedUpdate(
update, update,
projection, projection,
findOptions, findOptions,
batchedUpdateOptions = {} batchedUpdateOptions
) { ) {
// only a single batchedUpdate can run at a time due to global variables // only a single batchedUpdate can run at a time due to global variables
if (BATCHED_UPDATE_RUNNING) { if (BATCHED_UPDATE_RUNNING) {
@ -227,8 +226,6 @@ async function batchedUpdate(
return 0 return 0
} }
refreshGlobalOptionsForBatchedUpdate(batchedUpdateOptions) refreshGlobalOptionsForBatchedUpdate(batchedUpdateOptions)
const { trackProgress = async progress => console.warn(progress) } =
batchedUpdateOptions
findOptions = findOptions || {} findOptions = findOptions || {}
findOptions.readPreference = READ_PREFERENCE_SECONDARY findOptions.readPreference = READ_PREFERENCE_SECONDARY
@ -258,10 +255,9 @@ async function batchedUpdate(
nextBatch.map(entry => entry._id) nextBatch.map(entry => entry._id)
)}` )}`
) )
} else {
console.error(`Running update on batch ending ${renderObjectId(end)}`)
} }
await trackProgress(
`Running update on batch ending ${renderObjectId(end)}`
)
if (typeof update === 'function') { if (typeof update === 'function') {
await update(nextBatch) await update(nextBatch)
@ -269,7 +265,7 @@ async function batchedUpdate(
await performUpdate(collection, nextBatch, update) await performUpdate(collection, nextBatch, update)
} }
} }
await trackProgress(`Completed batch ending ${renderObjectId(end)}`) console.error(`Completed batch ending ${renderObjectId(end)}`)
start = end start = end
} }
return updated return updated

View file

@ -1,6 +1,6 @@
mongo-utils mongo-utils
--dependencies=None --dependencies=None
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker --docker-repos=gcr.io/overleaf-ops
--env-add= --env-add=
--env-pass-through= --env-pass-through=
--esmock-loader=False --esmock-loader=False

View file

@ -1,6 +1,6 @@
o-error o-error
--dependencies=None --dependencies=None
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker --docker-repos=gcr.io/overleaf-ops
--env-add= --env-add=
--env-pass-through= --env-pass-through=
--esmock-loader=False --esmock-loader=False

View file

@ -1,6 +1,6 @@
object-persistor object-persistor
--dependencies=None --dependencies=None
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker --docker-repos=gcr.io/overleaf-ops
--env-add= --env-add=
--env-pass-through= --env-pass-through=
--esmock-loader=False --esmock-loader=False

View file

@ -1,6 +1,6 @@
overleaf-editor-core overleaf-editor-core
--dependencies=None --dependencies=None
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker --docker-repos=gcr.io/overleaf-ops
--env-add= --env-add=
--env-pass-through= --env-pass-through=
--esmock-loader=False --esmock-loader=False

View file

@ -1,7 +1,7 @@
// @ts-check // @ts-check
/** /**
* @import { ClearTrackingPropsRawData, TrackingDirective } from '../types' * @import { ClearTrackingPropsRawData } from '../types'
*/ */
class ClearTrackingProps { class ClearTrackingProps {
@ -11,27 +11,12 @@ class ClearTrackingProps {
/** /**
* @param {any} other * @param {any} other
* @returns {other is ClearTrackingProps} * @returns {boolean}
*/ */
equals(other) { equals(other) {
return other instanceof ClearTrackingProps return other instanceof ClearTrackingProps
} }
/**
* @param {TrackingDirective} other
* @returns {other is ClearTrackingProps}
*/
canMergeWith(other) {
return other instanceof ClearTrackingProps
}
/**
* @param {TrackingDirective} other
*/
mergeWith(other) {
return this
}
/** /**
* @returns {ClearTrackingPropsRawData} * @returns {ClearTrackingPropsRawData}
*/ */

View file

@ -11,7 +11,7 @@ const EditOperation = require('../operation/edit_operation')
const EditOperationBuilder = require('../operation/edit_operation_builder') const EditOperationBuilder = require('../operation/edit_operation_builder')
/** /**
* @import { BlobStore, ReadonlyBlobStore, RangesBlob, RawHashFileData, RawLazyStringFileData } from '../types' * @import { BlobStore, ReadonlyBlobStore, RangesBlob, RawFileData, RawLazyStringFileData } from '../types'
*/ */
class LazyStringFileData extends FileData { class LazyStringFileData extends FileData {
@ -159,11 +159,11 @@ class LazyStringFileData extends FileData {
/** @inheritdoc /** @inheritdoc
* @param {BlobStore} blobStore * @param {BlobStore} blobStore
* @return {Promise<RawHashFileData>} * @return {Promise<RawFileData>}
*/ */
async store(blobStore) { async store(blobStore) {
if (this.operations.length === 0) { if (this.operations.length === 0) {
/** @type RawHashFileData */ /** @type RawFileData */
const raw = { hash: this.hash } const raw = { hash: this.hash }
if (this.rangesHash) { if (this.rangesHash) {
raw.rangesHash = this.rangesHash raw.rangesHash = this.rangesHash
@ -171,11 +171,9 @@ class LazyStringFileData extends FileData {
return raw return raw
} }
const eager = await this.toEager(blobStore) const eager = await this.toEager(blobStore)
const raw = await eager.store(blobStore)
this.hash = raw.hash
this.rangesHash = raw.rangesHash
this.operations.length = 0 this.operations.length = 0
return raw /** @type RawFileData */
return await eager.store(blobStore)
} }
} }

View file

@ -8,7 +8,7 @@ const CommentList = require('./comment_list')
const TrackedChangeList = require('./tracked_change_list') const TrackedChangeList = require('./tracked_change_list')
/** /**
* @import { StringFileRawData, RawHashFileData, BlobStore, CommentRawData } from "../types" * @import { StringFileRawData, RawFileData, BlobStore, CommentRawData } from "../types"
* @import { TrackedChangeRawData, RangesBlob } from "../types" * @import { TrackedChangeRawData, RangesBlob } from "../types"
* @import EditOperation from "../operation/edit_operation" * @import EditOperation from "../operation/edit_operation"
*/ */
@ -139,7 +139,7 @@ class StringFileData extends FileData {
/** /**
* @inheritdoc * @inheritdoc
* @param {BlobStore} blobStore * @param {BlobStore} blobStore
* @return {Promise<RawHashFileData>} * @return {Promise<RawFileData>}
*/ */
async store(blobStore) { async store(blobStore) {
const blob = await blobStore.putString(this.content) const blob = await blobStore.putString(this.content)

View file

@ -84,21 +84,6 @@ class TrackedChange {
) )
) )
} }
/**
* Return an equivalent tracked change whose extent is limited to the given
* range
*
* @param {Range} range
* @returns {TrackedChange | null} - the result or null if the intersection is empty
*/
intersectRange(range) {
const intersection = this.range.intersect(range)
if (intersection == null) {
return null
}
return new TrackedChange(intersection, this.tracking)
}
} }
module.exports = TrackedChange module.exports = TrackedChange

View file

@ -2,11 +2,9 @@
const Range = require('../range') const Range = require('../range')
const TrackedChange = require('./tracked_change') const TrackedChange = require('./tracked_change')
const TrackingProps = require('../file_data/tracking_props') const TrackingProps = require('../file_data/tracking_props')
const { InsertOp, RemoveOp, RetainOp } = require('../operation/scan_op')
/** /**
* @import { TrackingDirective, TrackedChangeRawData } from "../types" * @import { TrackingDirective, TrackedChangeRawData } from "../types"
* @import TextOperation from "../operation/text_operation"
*/ */
class TrackedChangeList { class TrackedChangeList {
@ -60,22 +58,6 @@ class TrackedChangeList {
return this._trackedChanges.filter(change => range.contains(change.range)) return this._trackedChanges.filter(change => range.contains(change.range))
} }
/**
* Returns tracked changes that overlap with the given range
* @param {Range} range
* @returns {TrackedChange[]}
*/
intersectRange(range) {
const changes = []
for (const change of this._trackedChanges) {
const intersection = change.intersectRange(range)
if (intersection != null) {
changes.push(intersection)
}
}
return changes
}
/** /**
* Returns the tracking props for a given range. * Returns the tracking props for a given range.
* @param {Range} range * @param {Range} range
@ -107,8 +89,6 @@ class TrackedChangeList {
/** /**
* Collapses consecutive (and compatible) ranges * Collapses consecutive (and compatible) ranges
*
* @private
* @returns {void} * @returns {void}
*/ */
_mergeRanges() { _mergeRanges() {
@ -137,28 +117,12 @@ class TrackedChangeList {
} }
/** /**
* Apply an insert operation
* *
* @param {number} cursor * @param {number} cursor
* @param {string} insertedText * @param {string} insertedText
* @param {{tracking?: TrackingProps}} opts * @param {{tracking?: TrackingProps}} opts
*/ */
applyInsert(cursor, insertedText, opts = {}) { applyInsert(cursor, insertedText, opts = {}) {
this._applyInsert(cursor, insertedText, opts)
this._mergeRanges()
}
/**
* Apply an insert operation
*
* This method will not merge ranges at the end
*
* @private
* @param {number} cursor
* @param {string} insertedText
* @param {{tracking?: TrackingProps}} [opts]
*/
_applyInsert(cursor, insertedText, opts = {}) {
const newTrackedChanges = [] const newTrackedChanges = []
for (const trackedChange of this._trackedChanges) { for (const trackedChange of this._trackedChanges) {
if ( if (
@ -207,29 +171,15 @@ class TrackedChangeList {
newTrackedChanges.push(newTrackedChange) newTrackedChanges.push(newTrackedChange)
} }
this._trackedChanges = newTrackedChanges this._trackedChanges = newTrackedChanges
this._mergeRanges()
} }
/** /**
* Apply a delete operation to the list of tracked changes
* *
* @param {number} cursor * @param {number} cursor
* @param {number} length * @param {number} length
*/ */
applyDelete(cursor, length) { applyDelete(cursor, length) {
this._applyDelete(cursor, length)
this._mergeRanges()
}
/**
* Apply a delete operation to the list of tracked changes
*
* This method will not merge ranges at the end
*
* @private
* @param {number} cursor
* @param {number} length
*/
_applyDelete(cursor, length) {
const newTrackedChanges = [] const newTrackedChanges = []
for (const trackedChange of this._trackedChanges) { for (const trackedChange of this._trackedChanges) {
const deletedRange = new Range(cursor, length) const deletedRange = new Range(cursor, length)
@ -255,31 +205,15 @@ class TrackedChangeList {
} }
} }
this._trackedChanges = newTrackedChanges this._trackedChanges = newTrackedChanges
}
/**
* Apply a retain operation to the list of tracked changes
*
* @param {number} cursor
* @param {number} length
* @param {{tracking?: TrackingDirective}} [opts]
*/
applyRetain(cursor, length, opts = {}) {
this._applyRetain(cursor, length, opts)
this._mergeRanges() this._mergeRanges()
} }
/** /**
* Apply a retain operation to the list of tracked changes
*
* This method will not merge ranges at the end
*
* @private
* @param {number} cursor * @param {number} cursor
* @param {number} length * @param {number} length
* @param {{tracking?: TrackingDirective}} opts * @param {{tracking?: TrackingDirective}} opts
*/ */
_applyRetain(cursor, length, opts = {}) { applyRetain(cursor, length, opts = {}) {
// If there's no tracking info, leave everything as-is // If there's no tracking info, leave everything as-is
if (!opts.tracking) { if (!opts.tracking) {
return return
@ -335,31 +269,6 @@ class TrackedChangeList {
newTrackedChanges.push(newTrackedChange) newTrackedChanges.push(newTrackedChange)
} }
this._trackedChanges = newTrackedChanges this._trackedChanges = newTrackedChanges
}
/**
* Apply a text operation to the list of tracked changes
*
* Ranges are merged only once at the end, for performance and to avoid
* problematic edge cases where intermediate ranges get incorrectly merged.
*
* @param {TextOperation} operation
*/
applyTextOperation(operation) {
// this cursor tracks the destination document that gets modified as
// operations are applied to it.
let cursor = 0
for (const op of operation.ops) {
if (op instanceof InsertOp) {
this._applyInsert(cursor, op.insertion, { tracking: op.tracking })
cursor += op.insertion.length
} else if (op instanceof RemoveOp) {
this._applyDelete(cursor, op.length)
} else if (op instanceof RetainOp) {
this._applyRetain(cursor, op.length, { tracking: op.tracking })
cursor += op.length
}
}
this._mergeRanges() this._mergeRanges()
} }
} }

View file

@ -62,35 +62,6 @@ class TrackingProps {
this.ts.getTime() === other.ts.getTime() this.ts.getTime() === other.ts.getTime()
) )
} }
/**
* Are these tracking props compatible with the other tracking props for merging
* ranges?
*
* @param {TrackingDirective} other
* @returns {other is TrackingProps}
*/
canMergeWith(other) {
if (!(other instanceof TrackingProps)) {
return false
}
return this.type === other.type && this.userId === other.userId
}
/**
* Merge two tracking props
*
* Assumes that `canMerge(other)` returns true
*
* @param {TrackingDirective} other
*/
mergeWith(other) {
if (!this.canMergeWith(other)) {
throw new Error('Cannot merge with incompatible tracking props')
}
const ts = this.ts <= other.ts ? this.ts : other.ts
return new TrackingProps(this.type, this.userId, ts)
}
} }
module.exports = TrackingProps module.exports = TrackingProps

View file

@ -175,7 +175,7 @@ class InsertOp extends ScanOp {
return false return false
} }
if (this.tracking) { if (this.tracking) {
if (!other.tracking || !this.tracking.canMergeWith(other.tracking)) { if (!this.tracking.equals(other.tracking)) {
return false return false
} }
} else if (other.tracking) { } else if (other.tracking) {
@ -198,10 +198,7 @@ class InsertOp extends ScanOp {
throw new Error('Cannot merge with incompatible operation') throw new Error('Cannot merge with incompatible operation')
} }
this.insertion += other.insertion this.insertion += other.insertion
if (this.tracking != null && other.tracking != null) { // We already have the same tracking info and commentIds
this.tracking = this.tracking.mergeWith(other.tracking)
}
// We already have the same commentIds
} }
/** /**
@ -309,13 +306,9 @@ class RetainOp extends ScanOp {
return false return false
} }
if (this.tracking) { if (this.tracking) {
if (!other.tracking || !this.tracking.canMergeWith(other.tracking)) { return this.tracking.equals(other.tracking)
return false
}
} else if (other.tracking) {
return false
} }
return true return !other.tracking
} }
/** /**
@ -326,9 +319,6 @@ class RetainOp extends ScanOp {
throw new Error('Cannot merge with incompatible operation') throw new Error('Cannot merge with incompatible operation')
} }
this.length += other.length this.length += other.length
if (this.tracking != null && other.tracking != null) {
this.tracking = this.tracking.mergeWith(other.tracking)
}
} }
/** /**

View file

@ -314,18 +314,25 @@ class TextOperation extends EditOperation {
str str
) )
} }
file.trackedChanges.applyRetain(result.length, op.length, {
tracking: op.tracking,
})
result += str.slice(inputCursor, inputCursor + op.length) result += str.slice(inputCursor, inputCursor + op.length)
inputCursor += op.length inputCursor += op.length
} else if (op instanceof InsertOp) { } else if (op instanceof InsertOp) {
if (containsNonBmpChars(op.insertion)) { if (containsNonBmpChars(op.insertion)) {
throw new InvalidInsertionError(str, op.toJSON()) throw new InvalidInsertionError(str, op.toJSON())
} }
file.trackedChanges.applyInsert(result.length, op.insertion, {
tracking: op.tracking,
})
file.comments.applyInsert( file.comments.applyInsert(
new Range(result.length, op.insertion.length), new Range(result.length, op.insertion.length),
{ commentIds: op.commentIds } { commentIds: op.commentIds }
) )
result += op.insertion result += op.insertion
} else if (op instanceof RemoveOp) { } else if (op instanceof RemoveOp) {
file.trackedChanges.applyDelete(result.length, op.length)
file.comments.applyDelete(new Range(result.length, op.length)) file.comments.applyDelete(new Range(result.length, op.length))
inputCursor += op.length inputCursor += op.length
} else { } else {
@ -345,8 +352,6 @@ class TextOperation extends EditOperation {
throw new TextOperation.TooLongError(operation, result.length) throw new TextOperation.TooLongError(operation, result.length)
} }
file.trackedChanges.applyTextOperation(this)
file.content = result file.content = result
} }
@ -395,36 +400,44 @@ class TextOperation extends EditOperation {
for (let i = 0, l = ops.length; i < l; i++) { for (let i = 0, l = ops.length; i < l; i++) {
const op = ops[i] const op = ops[i]
if (op instanceof RetainOp) { if (op instanceof RetainOp) {
if (op.tracking) { // Where we need to end up after the retains
// Where we need to end up after the retains const target = strIndex + op.length
const target = strIndex + op.length // A previous retain could have overriden some tracking info. Now we
// A previous retain could have overriden some tracking info. Now we // need to restore it.
// need to restore it. const previousRanges = previousState.trackedChanges.inRange(
const previousChanges = previousState.trackedChanges.intersectRange( new Range(strIndex, op.length)
new Range(strIndex, op.length) )
)
for (const change of previousChanges) { let removeTrackingInfoIfNeeded
if (strIndex < change.range.start) { if (op.tracking) {
inverse.retain(change.range.start - strIndex, { removeTrackingInfoIfNeeded = new ClearTrackingProps()
tracking: new ClearTrackingProps(), }
})
strIndex = change.range.start for (const trackedChange of previousRanges) {
} if (strIndex < trackedChange.range.start) {
inverse.retain(change.range.length, { inverse.retain(trackedChange.range.start - strIndex, {
tracking: change.tracking, tracking: removeTrackingInfoIfNeeded,
}) })
strIndex += change.range.length strIndex = trackedChange.range.start
} }
if (strIndex < target) { if (trackedChange.range.end < strIndex + op.length) {
inverse.retain(target - strIndex, { inverse.retain(trackedChange.range.length, {
tracking: new ClearTrackingProps(), tracking: trackedChange.tracking,
}) })
strIndex = target strIndex = trackedChange.range.end
} }
} else { if (trackedChange.range.end !== strIndex) {
inverse.retain(op.length) // No need to split the range at the end
strIndex += op.length const [left] = trackedChange.range.splitAt(strIndex)
inverse.retain(left.length, { tracking: trackedChange.tracking })
strIndex = left.end
}
}
if (strIndex < target) {
inverse.retain(target - strIndex, {
tracking: removeTrackingInfoIfNeeded,
})
strIndex = target
} }
} else if (op instanceof InsertOp) { } else if (op instanceof InsertOp) {
inverse.remove(op.insertion.length) inverse.remove(op.insertion.length)

View file

@ -86,32 +86,10 @@ class Range {
} }
/** /**
* Does this range overlap another range? * @param {Range} range
*
* Overlapping means that the two ranges have at least one character in common
*
* @param {Range} other - the other range
*/ */
overlaps(other) { overlaps(range) {
return this.start < other.end && this.end > other.start return this.start < range.end && this.end > range.start
}
/**
* Does this range overlap the start of another range?
*
* @param {Range} other - the other range
*/
overlapsStart(other) {
return this.start <= other.start && this.end > other.start
}
/**
* Does this range overlap the end of another range?
*
* @param {Range} other - the other range
*/
overlapsEnd(other) {
return this.start < other.end && this.end >= other.end
} }
/** /**
@ -249,26 +227,6 @@ class Range {
) )
return [rangeUpToCursor, rangeAfterCursor] return [rangeUpToCursor, rangeAfterCursor]
} }
/**
* Returns the intersection of this range with another range
*
* @param {Range} other - the other range
* @return {Range | null} the intersection or null if the intersection is empty
*/
intersect(other) {
if (this.contains(other)) {
return other
} else if (other.contains(this)) {
return this
} else if (other.overlapsStart(this)) {
return new Range(this.pos, other.end - this.start)
} else if (other.overlapsEnd(this)) {
return new Range(other.pos, this.end - other.start)
} else {
return null
}
}
} }
module.exports = Range module.exports = Range

View file

@ -193,13 +193,4 @@ describe('LazyStringFileData', function () {
expect(fileData.getStringLength()).to.equal(longString.length) expect(fileData.getStringLength()).to.equal(longString.length)
expect(fileData.getOperations()).to.have.length(1) expect(fileData.getOperations()).to.have.length(1)
}) })
it('truncates its operations after being stored', async function () {
const testHash = File.EMPTY_FILE_HASH
const fileData = new LazyStringFileData(testHash, undefined, 0)
fileData.edit(new TextOperation().insert('abc'))
const stored = await fileData.store(this.blobStore)
expect(fileData.hash).to.equal(stored.hash)
expect(fileData.operations).to.deep.equal([])
})
}) })

View file

@ -1,3 +1,4 @@
// @ts-check
'use strict' 'use strict'
const { expect } = require('chai') const { expect } = require('chai')
@ -448,44 +449,4 @@ describe('Range', function () {
expect(() => range.insertAt(16, 3)).to.throw() expect(() => range.insertAt(16, 3)).to.throw()
}) })
}) })
describe('intersect', function () {
it('should handle partially overlapping ranges', function () {
const range1 = new Range(5, 10)
const range2 = new Range(3, 6)
const intersection1 = range1.intersect(range2)
expect(intersection1.pos).to.equal(5)
expect(intersection1.length).to.equal(4)
const intersection2 = range2.intersect(range1)
expect(intersection2.pos).to.equal(5)
expect(intersection2.length).to.equal(4)
})
it('should intersect with itself', function () {
const range = new Range(5, 10)
const intersection = range.intersect(range)
expect(intersection.pos).to.equal(5)
expect(intersection.length).to.equal(10)
})
it('should handle nested ranges', function () {
const range1 = new Range(5, 10)
const range2 = new Range(7, 2)
const intersection1 = range1.intersect(range2)
expect(intersection1.pos).to.equal(7)
expect(intersection1.length).to.equal(2)
const intersection2 = range2.intersect(range1)
expect(intersection2.pos).to.equal(7)
expect(intersection2.length).to.equal(2)
})
it('should handle disconnected ranges', function () {
const range1 = new Range(5, 10)
const range2 = new Range(20, 30)
const intersection1 = range1.intersect(range2)
expect(intersection1).to.be.null
const intersection2 = range2.intersect(range1)
expect(intersection2).to.be.null
})
})
}) })

View file

@ -107,7 +107,7 @@ describe('RetainOp', function () {
expect(op1.equals(new RetainOp(3))).to.be.true expect(op1.equals(new RetainOp(3))).to.be.true
}) })
it('cannot merge with another RetainOp if the tracking user is different', function () { it('cannot merge with another RetainOp if tracking info is different', function () {
const op1 = new RetainOp( const op1 = new RetainOp(
4, 4,
new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z')) new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z'))
@ -120,14 +120,14 @@ describe('RetainOp', function () {
expect(() => op1.mergeWith(op2)).to.throw(Error) expect(() => op1.mergeWith(op2)).to.throw(Error)
}) })
it('can merge with another RetainOp if the tracking user is the same', function () { it('can merge with another RetainOp if tracking info is the same', function () {
const op1 = new RetainOp( const op1 = new RetainOp(
4, 4,
new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z')) new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z'))
) )
const op2 = new RetainOp( const op2 = new RetainOp(
4, 4,
new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:01.000Z')) new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z'))
) )
op1.mergeWith(op2) op1.mergeWith(op2)
expect( expect(
@ -310,7 +310,7 @@ describe('InsertOp', function () {
expect(() => op1.mergeWith(op2)).to.throw(Error) expect(() => op1.mergeWith(op2)).to.throw(Error)
}) })
it('cannot merge with another InsertOp if tracking user is different', function () { it('cannot merge with another InsertOp if tracking info is different', function () {
const op1 = new InsertOp( const op1 = new InsertOp(
'a', 'a',
new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z')) new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z'))
@ -323,7 +323,7 @@ describe('InsertOp', function () {
expect(() => op1.mergeWith(op2)).to.throw(Error) expect(() => op1.mergeWith(op2)).to.throw(Error)
}) })
it('can merge with another InsertOp if tracking user and comment info is the same', function () { it('can merge with another InsertOp if tracking and comment info is the same', function () {
const op1 = new InsertOp( const op1 = new InsertOp(
'a', 'a',
new TrackingProps( new TrackingProps(
@ -338,7 +338,7 @@ describe('InsertOp', function () {
new TrackingProps( new TrackingProps(
'insert', 'insert',
'user1', 'user1',
new Date('2024-01-01T00:00:01.000Z') new Date('2024-01-01T00:00:00.000Z')
), ),
['1', '2'] ['1', '2']
) )

View file

@ -322,47 +322,6 @@ describe('TextOperation', function () {
new TextOperation().retain(4).remove(4).retain(3) new TextOperation().retain(4).remove(4).retain(3)
) )
}) })
it('undoing a tracked delete restores the tracked changes', function () {
expectInverseToLeadToInitialState(
new StringFileData(
'the quick brown fox jumps over the lazy dog',
undefined,
[
{
range: { pos: 5, length: 5 },
tracking: {
ts: '2023-01-01T00:00:00.000Z',
type: 'insert',
userId: 'user1',
},
},
{
range: { pos: 12, length: 3 },
tracking: {
ts: '2023-01-01T00:00:00.000Z',
type: 'delete',
userId: 'user1',
},
},
{
range: { pos: 18, length: 5 },
tracking: {
ts: '2023-01-01T00:00:00.000Z',
type: 'insert',
userId: 'user1',
},
},
]
),
new TextOperation()
.retain(7)
.retain(13, {
tracking: new TrackingProps('delete', 'user1', new Date()),
})
.retain(23)
)
})
}) })
describe('compose', function () { describe('compose', function () {

View file

@ -1,6 +1,6 @@
promise-utils promise-utils
--dependencies=None --dependencies=None
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker --docker-repos=gcr.io/overleaf-ops
--env-add= --env-add=
--env-pass-through= --env-pass-through=
--esmock-loader=False --esmock-loader=False

View file

@ -1,6 +1,6 @@
ranges-tracker ranges-tracker
--dependencies=None --dependencies=None
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker --docker-repos=gcr.io/overleaf-ops
--env-add= --env-add=
--env-pass-through= --env-pass-through=
--esmock-loader=False --esmock-loader=False

View file

@ -1,6 +1,6 @@
redis-wrapper redis-wrapper
--dependencies=None --dependencies=None
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker --docker-repos=gcr.io/overleaf-ops
--env-add= --env-add=
--env-pass-through= --env-pass-through=
--esmock-loader=False --esmock-loader=False

View file

@ -1,6 +1,6 @@
settings settings
--dependencies=None --dependencies=None
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker --docker-repos=gcr.io/overleaf-ops
--env-add= --env-add=
--env-pass-through= --env-pass-through=
--esmock-loader=False --esmock-loader=False

View file

@ -1,6 +1,6 @@
stream-utils stream-utils
--dependencies=None --dependencies=None
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker --docker-repos=gcr.io/overleaf-ops
--env-add= --env-add=
--env-pass-through= --env-pass-through=
--esmock-loader=False --esmock-loader=False

2454
package-lock.json generated

File diff suppressed because it is too large Load diff

View file

@ -37,7 +37,7 @@
}, },
"swagger-tools": { "swagger-tools": {
"body-parser": "1.20.3", "body-parser": "1.20.3",
"multer": "2.0.1", "multer": "2.0.0",
"path-to-regexp": "3.3.0", "path-to-regexp": "3.3.0",
"qs": "6.13.0" "qs": "6.13.0"
} }

View file

@ -140,7 +140,6 @@ const settings = {
api: redisConfig, api: redisConfig,
pubsub: redisConfig, pubsub: redisConfig,
project_history: redisConfig, project_history: redisConfig,
references: redisConfig,
project_history_migration: { project_history_migration: {
host: redisConfig.host, host: redisConfig.host,

View file

@ -1,28 +0,0 @@
FROM sharelatex/sharelatex:5.5.0
# fix tls configuration in redis for history-v1
COPY pr_25168.patch .
RUN patch -p1 < pr_25168.patch && rm pr_25168.patch
# improve logging in history system
COPY pr_26086.patch .
RUN patch -p1 < pr_26086.patch && rm pr_26086.patch
# fix create-user.mjs script
COPY pr_26152.patch .
RUN patch -p1 < pr_26152.patch && rm pr_26152.patch
# check mongo featureCompatibilityVersion
COPY pr_26091.patch .
RUN patch -p1 < pr_26091.patch && rm pr_26091.patch
# update multer and tar-fs
RUN sed -i 's/"multer": "2.0.0"/"multer": "2.0.1"/g' package.json
RUN sed -i 's/"dockerode": "^4.0.5"/"dockerode": "^4.0.7"/g' services/clsi/package.json
RUN sed -i 's/"tar-fs": "^3.0.4"/"tar-fs": "^3.0.9"/g' services/clsi/package.json
RUN sed -i 's/199c5ff05bd375c508f4074498237baead7f5148/4dbceda355efc3fc8ac3cf5c66c3778c8a6fdb23/g' services/web/package.json
COPY package-lock.json.diff .
RUN patch package-lock.json < package-lock.json.diff
RUN npm install --omit=dev
RUN npm install @paralleldrive/cuid2@2.2.2 -w services/history-v1

File diff suppressed because it is too large Load diff

View file

@ -1,19 +0,0 @@
--- a/services/history-v1/config/custom-environment-variables.json
+++ b/services/history-v1/config/custom-environment-variables.json
@@ -50,12 +50,14 @@
"history": {
"host": "OVERLEAF_REDIS_HOST",
"password": "OVERLEAF_REDIS_PASS",
- "port": "OVERLEAF_REDIS_PORT"
+ "port": "OVERLEAF_REDIS_PORT",
+ "tls": "OVERLEAF_REDIS_TLS"
},
"lock": {
"host": "OVERLEAF_REDIS_HOST",
"password": "OVERLEAF_REDIS_PASS",
- "port": "OVERLEAF_REDIS_PORT"
+ "port": "OVERLEAF_REDIS_PORT",
+ "tls": "OVERLEAF_REDIS_TLS"
}
}
}

View file

@ -1,200 +0,0 @@
--- a/services/history-v1/api/controllers/project_import.js
+++ b/services/history-v1/api/controllers/project_import.js
@@ -35,6 +35,7 @@ async function importSnapshot(req, res) {
try {
snapshot = Snapshot.fromRaw(rawSnapshot)
} catch (err) {
+ logger.warn({ err, projectId }, 'failed to import snapshot')
return render.unprocessableEntity(res)
}
@@ -43,6 +44,7 @@ async function importSnapshot(req, res) {
historyId = await chunkStore.initializeProject(projectId, snapshot)
} catch (err) {
if (err instanceof chunkStore.AlreadyInitialized) {
+ logger.warn({ err, projectId }, 'already initialized')
return render.conflict(res)
} else {
throw err
--- a/services/history-v1/api/controllers/projects.js
+++ b/services/history-v1/api/controllers/projects.js
@@ -34,6 +34,7 @@ async function initializeProject(req, res, next) {
res.status(HTTPStatus.OK).json({ projectId })
} catch (err) {
if (err instanceof chunkStore.AlreadyInitialized) {
+ logger.warn({ err, projectId }, 'failed to initialize')
render.conflict(res)
} else {
throw err
@@ -242,11 +243,15 @@ async function createProjectBlob(req, res, next) {
const sizeLimit = new StreamSizeLimit(maxUploadSize)
await pipeline(req, sizeLimit, fs.createWriteStream(tmpPath))
if (sizeLimit.sizeLimitExceeded) {
+ logger.warn(
+ { projectId, expectedHash, maxUploadSize },
+ 'blob exceeds size threshold'
+ )
return render.requestEntityTooLarge(res)
}
const hash = await blobHash.fromFile(tmpPath)
if (hash !== expectedHash) {
- logger.debug({ hash, expectedHash }, 'Hash mismatch')
+ logger.warn({ projectId, hash, expectedHash }, 'Hash mismatch')
return render.conflict(res, 'File hash mismatch')
}
@@ -343,6 +348,10 @@ async function copyProjectBlob(req, res, next) {
targetBlobStore.getBlob(blobHash),
])
if (!sourceBlob) {
+ logger.warn(
+ { sourceProjectId, targetProjectId, blobHash },
+ 'missing source blob when copying across projects'
+ )
return render.notFound(res)
}
// Exit early if the blob exists in the target project.
--- a/services/history-v1/app.js
+++ b/services/history-v1/app.js
@@ -100,11 +100,13 @@ function setupErrorHandling() {
})
}
if (err.code === 'ENUM_MISMATCH') {
+ logger.warn({ err, projectId }, err.message)
return res.status(HTTPStatus.UNPROCESSABLE_ENTITY).json({
message: 'invalid enum value: ' + err.paramName,
})
}
if (err.code === 'REQUIRED') {
+ logger.warn({ err, projectId }, err.message)
return res.status(HTTPStatus.UNPROCESSABLE_ENTITY).json({
message: err.message,
})
--- a/services/project-history/app/js/HistoryStoreManager.js
+++ b/services/project-history/app/js/HistoryStoreManager.js
@@ -35,7 +35,10 @@ class StringStream extends stream.Readable {
_mocks.getMostRecentChunk = (projectId, historyId, callback) => {
const path = `projects/${historyId}/latest/history`
logger.debug({ projectId, historyId }, 'getting chunk from history service')
- _requestChunk({ path, json: true }, callback)
+ _requestChunk({ path, json: true }, (err, chunk) => {
+ if (err) return callback(OError.tag(err))
+ callback(null, chunk)
+ })
}
/**
@@ -54,7 +57,10 @@ export function getChunkAtVersion(projectId, historyId, version, callback) {
{ projectId, historyId, version },
'getting chunk from history service for version'
)
- _requestChunk({ path, json: true }, callback)
+ _requestChunk({ path, json: true }, (err, chunk) => {
+ if (err) return callback(OError.tag(err))
+ callback(null, chunk)
+ })
}
export function getMostRecentVersion(projectId, historyId, callback) {
@@ -68,8 +74,10 @@ export function getMostRecentVersion(projectId, historyId, callback) {
_.sortBy(chunk.chunk.history.changes || [], x => x.timestamp)
)
// find the latest project and doc versions in the chunk
- _getLatestProjectVersion(projectId, chunk, (err1, projectVersion) =>
+ _getLatestProjectVersion(projectId, chunk, (err1, projectVersion) => {
+ if (err1) err1 = OError.tag(err1)
_getLatestV2DocVersions(projectId, chunk, (err2, v2DocVersions) => {
+ if (err2) err2 = OError.tag(err2)
// return the project and doc versions
const projectStructureAndDocVersions = {
project: projectVersion,
@@ -83,7 +91,7 @@ export function getMostRecentVersion(projectId, historyId, callback) {
chunk
)
})
- )
+ })
})
}
@@ -211,7 +219,10 @@ export function getProjectBlob(historyId, blobHash, callback) {
logger.debug({ historyId, blobHash }, 'getting blob from history service')
_requestHistoryService(
{ path: `projects/${historyId}/blobs/${blobHash}` },
- callback
+ (err, blob) => {
+ if (err) return callback(OError.tag(err))
+ callback(null, blob)
+ }
)
}
@@ -277,7 +288,10 @@ function createBlobFromString(historyId, data, fileId, callback) {
(fsPath, cb) => {
_createBlob(historyId, fsPath, cb)
},
- callback
+ (err, hash) => {
+ if (err) return callback(OError.tag(err))
+ callback(null, hash)
+ }
)
}
@@ -330,7 +344,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) {
try {
ranges = HistoryBlobTranslator.createRangeBlobDataFromUpdate(update)
} catch (error) {
- return callback(error)
+ return callback(OError.tag(error))
}
createBlobFromString(
historyId,
@@ -338,7 +352,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) {
`project-${projectId}-doc-${update.doc}`,
(err, fileHash) => {
if (err) {
- return callback(err)
+ return callback(OError.tag(err))
}
if (ranges) {
createBlobFromString(
@@ -347,7 +361,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) {
`project-${projectId}-doc-${update.doc}-ranges`,
(err, rangesHash) => {
if (err) {
- return callback(err)
+ return callback(OError.tag(err))
}
logger.debug(
{ fileHash, rangesHash },
@@ -415,7 +429,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) {
},
(err, fileHash) => {
if (err) {
- return callback(err)
+ return callback(OError.tag(err))
}
if (update.hash && update.hash !== fileHash) {
logger.warn(
@@ -447,7 +461,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) {
},
(err, fileHash) => {
if (err) {
- return callback(err)
+ return callback(OError.tag(err))
}
logger.debug({ fileHash }, 'created empty blob for file')
callback(null, { file: fileHash })
@@ -520,7 +534,10 @@ export function initializeProject(historyId, callback) {
export function deleteProject(projectId, callback) {
_requestHistoryService(
{ method: 'DELETE', path: `projects/${projectId}` },
- callback
+ err => {
+ if (err) return callback(OError.tag(err))
+ callback(null)
+ }
)
}

View file

@ -1,60 +0,0 @@
--- a/services/web/modules/server-ce-scripts/scripts/check-mongodb.mjs
+++ b/services/web/modules/server-ce-scripts/scripts/check-mongodb.mjs
@@ -7,6 +7,7 @@ import {
const { ObjectId } = mongodb
const MIN_MONGO_VERSION = [6, 0]
+const MIN_MONGO_FEATURE_COMPATIBILITY_VERSION = [6, 0]
async function main() {
let mongoClient
@@ -18,6 +19,7 @@ async function main() {
}
await checkMongoVersion(mongoClient)
+ await checkFeatureCompatibilityVersion(mongoClient)
try {
await testTransactions(mongoClient)
@@ -53,6 +55,41 @@ async function checkMongoVersion(mongoClient) {
}
}
+async function checkFeatureCompatibilityVersion(mongoClient) {
+ const {
+ featureCompatibilityVersion: { version },
+ } = await mongoClient
+ .db()
+ .admin()
+ .command({ getParameter: 1, featureCompatibilityVersion: 1 })
+ const [major, minor] = version.split('.').map(v => parseInt(v))
+ const [minMajor, minMinor] = MIN_MONGO_FEATURE_COMPATIBILITY_VERSION
+
+ if (major < minMajor || (major === minMajor && minor < minMinor)) {
+ const minVersion = MIN_MONGO_FEATURE_COMPATIBILITY_VERSION.join('.')
+ console.error(`
+The MongoDB server has featureCompatibilityVersion=${version}, but Overleaf requires at least version ${minVersion}.
+
+Open a mongo shell:
+- Overleaf Toolkit deployments: $ bin/mongo
+- Legacy docker-compose.yml deployments: $ docker exec -it mongo mongosh localhost/sharelatex
+
+In the mongo shell:
+> db.adminCommand( { setFeatureCompatibilityVersion: "${minMajor}.${minMinor}" } )
+
+Verify the new value:
+> db.adminCommand( { getParameter: 1, featureCompatibilityVersion: 1 } )
+ ...
+ {
+ featureCompatibilityVersion: { version: ${minMajor}.${minMinor}' },
+...
+
+Aborting.
+`)
+ process.exit(1)
+ }
+}
+
main()
.then(() => {
console.error('Mongodb is up.')

View file

@ -1,16 +0,0 @@
--- a/services/web/modules/server-ce-scripts/scripts/create-user.mjs
+++ b/services/web/modules/server-ce-scripts/scripts/create-user.mjs
@@ -48,3 +48,13 @@ Please visit the following URL to set a password for ${email} and log in:
)
})
}
+
+if (filename === process.argv[1]) {
+ try {
+ await main()
+ process.exit(0)
+ } catch (error) {
+ console.error({ error })
+ process.exit(1)
+ }
+}

View file

@ -6,8 +6,8 @@ all: test-e2e
# Notable the container labels com.docker.compose.project.working_dir and com.docker.compose.project.config_files need to match when creating containers from the docker host (how you started things) and from host-admin (how tests reconfigure the instance). # Notable the container labels com.docker.compose.project.working_dir and com.docker.compose.project.config_files need to match when creating containers from the docker host (how you started things) and from host-admin (how tests reconfigure the instance).
export PWD = $(shell pwd) export PWD = $(shell pwd)
export TEX_LIVE_DOCKER_IMAGE ?= us-east1-docker.pkg.dev/overleaf-ops/ol-docker/texlive-full:2023.1 export TEX_LIVE_DOCKER_IMAGE ?= gcr.io/overleaf-ops/texlive-full:2023.1
export ALL_TEX_LIVE_DOCKER_IMAGES ?= us-east1-docker.pkg.dev/overleaf-ops/ol-docker/texlive-full:2023.1,us-east1-docker.pkg.dev/overleaf-ops/ol-docker/texlive-full:2022.1 export ALL_TEX_LIVE_DOCKER_IMAGES ?= gcr.io/overleaf-ops/texlive-full:2023.1,gcr.io/overleaf-ops/texlive-full:2022.1
export IMAGE_TAG_PRO ?= us-east1-docker.pkg.dev/overleaf-ops/ol-docker/pro:latest export IMAGE_TAG_PRO ?= us-east1-docker.pkg.dev/overleaf-ops/ol-docker/pro:latest
export CYPRESS_SHARD ?= export CYPRESS_SHARD ?=
export COMPOSE_PROJECT_NAME ?= test export COMPOSE_PROJECT_NAME ?= test
@ -20,7 +20,6 @@ test-e2e-native:
npm run cypress:open npm run cypress:open
test-e2e: test-e2e:
docker compose build host-admin
docker compose up --no-log-prefix --exit-code-from=e2e e2e docker compose up --no-log-prefix --exit-code-from=e2e e2e
test-e2e-open: test-e2e-open:
@ -46,7 +45,7 @@ prefetch_custom_compose_pull:
prefetch_custom: prefetch_custom_texlive prefetch_custom: prefetch_custom_texlive
prefetch_custom_texlive: prefetch_custom_texlive:
echo "$$ALL_TEX_LIVE_DOCKER_IMAGES" | tr ',' '\n' | xargs -I% \ echo "$$ALL_TEX_LIVE_DOCKER_IMAGES" | tr ',' '\n' | xargs -I% \
sh -exc 'tag=%; re_tag=quay.io/sharelatex/$${tag#*/*/*/}; docker pull $$tag; docker tag $$tag $$re_tag' sh -exc 'tag=%; re_tag=quay.io/sharelatex/$${tag#*/*/}; docker pull $$tag; docker tag $$tag $$re_tag'
prefetch_custom: prefetch_old prefetch_custom: prefetch_old
prefetch_old: prefetch_old:

View file

@ -179,21 +179,6 @@ describe('admin panel', function () {
cy.get('nav').findByText('Manage Users').click() cy.get('nav').findByText('Manage Users').click()
}) })
it('displays expected tabs', () => {
const tabs = ['Users', 'License Usage']
cy.get('[role="tab"]').each((el, index) => {
cy.wrap(el).findByText(tabs[index]).click()
})
cy.get('[role="tab"]').should('have.length', tabs.length)
})
it('license usage tab', () => {
cy.get('a').contains('License Usage').click()
cy.findByText(
'An active user is one who has opened a project in this Server Pro instance in the last 12 months.'
)
})
describe('create users', () => { describe('create users', () => {
beforeEach(() => { beforeEach(() => {
cy.get('a').contains('New User').click() cy.get('a').contains('New User').click()

View file

@ -131,7 +131,7 @@ services:
saml: saml:
restart: always restart: always
image: us-east1-docker.pkg.dev/overleaf-ops/ol-docker/saml-test image: gcr.io/overleaf-ops/saml-test
environment: environment:
SAML_TEST_SP_ENTITY_ID: 'sharelatex-test-saml' SAML_TEST_SP_ENTITY_ID: 'sharelatex-test-saml'
SAML_BASE_URL_PATH: 'http://saml/simplesaml/' SAML_BASE_URL_PATH: 'http://saml/simplesaml/'

View file

@ -107,7 +107,7 @@ describe('git-bridge', function () {
cy.get('code').contains(`git clone ${gitURL(id.toString())}`) cy.get('code').contains(`git clone ${gitURL(id.toString())}`)
}) })
cy.findByText('Generate token').should('not.exist') cy.findByText('Generate token').should('not.exist')
cy.findByText(/generate a new one in Account settings/) cy.findByText(/generate a new one in Account settings/i)
cy.findByText('Go to settings') cy.findByText('Go to settings')
.should('have.attr', 'target', '_blank') .should('have.attr', 'target', '_blank')
.and('have.attr', 'href', '/user/settings') .and('have.attr', 'href', '/user/settings')

View file

@ -37,8 +37,7 @@ export function createProject(
} }
cy.findAllByRole('button').contains(newProjectButtonMatcher).click() cy.findAllByRole('button').contains(newProjectButtonMatcher).click()
// FIXME: This should only look in the left menu // FIXME: This should only look in the left menu
// The upgrading tests create projects in older versions of Server Pro which used different casing of the project type. Use case-insensitive match. cy.findAllByText(new RegExp(type, 'i')).first().click()
cy.findAllByText(type, { exact: false }).first().click()
cy.findByRole('dialog').within(() => { cy.findByRole('dialog').within(() => {
cy.get('input').type(name) cy.get('input').type(name)
cy.findByText('Create').click() cy.findByText('Create').click()

View file

@ -24,13 +24,10 @@ services:
MOCHA_GREP: ${MOCHA_GREP} MOCHA_GREP: ${MOCHA_GREP}
NODE_ENV: test NODE_ENV: test
NODE_OPTIONS: "--unhandled-rejections=strict" NODE_OPTIONS: "--unhandled-rejections=strict"
volumes:
- ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it
depends_on: depends_on:
mongo: mongo:
condition: service_started condition: service_started
user: node user: node
entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 --
command: npm run test:acceptance command: npm run test:acceptance

View file

@ -26,7 +26,6 @@ services:
- .:/overleaf/services/chat - .:/overleaf/services/chat
- ../../node_modules:/overleaf/node_modules - ../../node_modules:/overleaf/node_modules
- ../../libraries:/overleaf/libraries - ../../libraries:/overleaf/libraries
- ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it
working_dir: /overleaf/services/chat working_dir: /overleaf/services/chat
environment: environment:
ELASTIC_SEARCH_DSN: es:9200 ELASTIC_SEARCH_DSN: es:9200
@ -40,7 +39,6 @@ services:
depends_on: depends_on:
mongo: mongo:
condition: service_started condition: service_started
entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 --
command: npm run --silent test:acceptance command: npm run --silent test:acceptance
mongo: mongo:

View file

@ -19,18 +19,18 @@ The CLSI can be configured through the following environment variables:
* `ALLOWED_IMAGES` - Space separated list of allowed Docker TeX Live images * `ALLOWED_IMAGES` - Space separated list of allowed Docker TeX Live images
* `CATCH_ERRORS` - Set to `true` to log uncaught exceptions * `CATCH_ERRORS` - Set to `true` to log uncaught exceptions
* `COMPILE_GROUP_DOCKER_CONFIGS` - JSON string of Docker configs for compile groups * `COMPILE_GROUP_DOCKER_CONFIGS` - JSON string of Docker configs for compile groups
* `SANDBOXED_COMPILES` - Set to true to use sibling containers * `COMPILES_HOST_DIR` - Working directory for LaTeX compiles
* `SANDBOXED_COMPILES_HOST_DIR_COMPILES` - Working directory for LaTeX compiles * `OUTPUT_HOST_DIR` - Output directory for LaTeX compiles
* `SANDBOXED_COMPILES_HOST_DIR_OUTPUT` - Output directory for LaTeX compiles
* `COMPILE_SIZE_LIMIT` - Sets the body-parser [limit](https://github.com/expressjs/body-parser#limit) * `COMPILE_SIZE_LIMIT` - Sets the body-parser [limit](https://github.com/expressjs/body-parser#limit)
* `DOCKER_RUNNER` - Set to true to use sibling containers
* `DOCKER_RUNTIME` - * `DOCKER_RUNTIME` -
* `FILESTORE_DOMAIN_OVERRIDE` - The url for the filestore service e.g.`http://$FILESTORE_HOST:3009` * `FILESTORE_DOMAIN_OVERRIDE` - The url for the filestore service e.g.`http://$FILESTORE_HOST:3009`
* `FILESTORE_PARALLEL_FILE_DOWNLOADS` - Number of parallel file downloads * `FILESTORE_PARALLEL_FILE_DOWNLOADS` - Number of parallel file downloads
* `LISTEN_ADDRESS` - The address for the RESTful service to listen on. Set to `0.0.0.0` to listen on all network interfaces * `LISTEN_ADDRESS` - The address for the RESTful service to listen on. Set to `0.0.0.0` to listen on all network interfaces
* `PROCESS_LIFE_SPAN_LIMIT_MS` - Process life span limit in milliseconds * `PROCESS_LIFE_SPAN_LIMIT_MS` - Process life span limit in milliseconds
* `SMOKE_TEST` - Whether to run smoke tests * `SMOKE_TEST` - Whether to run smoke tests
* `TEXLIVE_IMAGE` - The TeX Live Docker image to use for sibling containers, e.g. `us-east1-docker.pkg.dev/overleaf-ops/ol-docker/texlive-full:2017.1` * `TEXLIVE_IMAGE` - The TeX Live Docker image to use for sibling containers, e.g. `gcr.io/overleaf-ops/texlive-full:2017.1`
* `TEX_LIVE_IMAGE_NAME_OVERRIDE` - The name of the registry for the Docker image e.g. `us-east1-docker.pkg.dev/overleaf-ops/ol-docker` * `TEX_LIVE_IMAGE_NAME_OVERRIDE` - The name of the registry for the Docker image e.g. `gcr.io/overleaf-ops`
* `TEXLIVE_IMAGE_USER` - When using sibling containers, the user to run as in the TeX Live image. Defaults to `tex` * `TEXLIVE_IMAGE_USER` - When using sibling containers, the user to run as in the TeX Live image. Defaults to `tex`
* `TEXLIVE_OPENOUT_ANY` - Sets the `openout_any` environment variable for TeX Live (see the `\openout` primitive [documentation](http://tug.org/texinfohtml/web2c.html#tex-invocation)) * `TEXLIVE_OPENOUT_ANY` - Sets the `openout_any` environment variable for TeX Live (see the `\openout` primitive [documentation](http://tug.org/texinfohtml/web2c.html#tex-invocation))
@ -63,10 +63,10 @@ Then start the Docker container:
docker run --rm \ docker run --rm \
-p 127.0.0.1:3013:3013 \ -p 127.0.0.1:3013:3013 \
-e LISTEN_ADDRESS=0.0.0.0 \ -e LISTEN_ADDRESS=0.0.0.0 \
-e SANDBOXED_COMPILES=true \ -e DOCKER_RUNNER=true \
-e TEXLIVE_IMAGE=texlive/texlive \ -e TEXLIVE_IMAGE=texlive/texlive \
-e TEXLIVE_IMAGE_USER=root \ -e TEXLIVE_IMAGE_USER=root \
-e SANDBOXED_COMPILES_HOST_DIR_COMPILES="$PWD/compiles" \ -e COMPILES_HOST_DIR="$PWD/compiles" \
-v "$PWD/compiles:/overleaf/services/clsi/compiles" \ -v "$PWD/compiles:/overleaf/services/clsi/compiles" \
-v "$PWD/cache:/overleaf/services/clsi/cache" \ -v "$PWD/cache:/overleaf/services/clsi/cache" \
-v /var/run/docker.sock:/var/run/docker.sock \ -v /var/run/docker.sock:/var/run/docker.sock \

View file

@ -2,7 +2,7 @@ clsi
--data-dirs=cache,compiles,output --data-dirs=cache,compiles,output
--dependencies= --dependencies=
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker --docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
--env-add=ENABLE_PDF_CACHING="true",PDF_CACHING_ENABLE_WORKER_POOL="true",ALLOWED_IMAGES=quay.io/sharelatex/texlive-full:2017.1,TEXLIVE_IMAGE=quay.io/sharelatex/texlive-full:2017.1,TEX_LIVE_IMAGE_NAME_OVERRIDE=us-east1-docker.pkg.dev/overleaf-ops/ol-docker,TEXLIVE_IMAGE_USER="tex",SANDBOXED_COMPILES="true",SANDBOXED_COMPILES_HOST_DIR_COMPILES=$PWD/compiles,SANDBOXED_COMPILES_HOST_DIR_OUTPUT=$PWD/output --env-add=ENABLE_PDF_CACHING="true",PDF_CACHING_ENABLE_WORKER_POOL="true",ALLOWED_IMAGES=quay.io/sharelatex/texlive-full:2017.1,TEXLIVE_IMAGE=quay.io/sharelatex/texlive-full:2017.1,TEX_LIVE_IMAGE_NAME_OVERRIDE=us-east1-docker.pkg.dev/overleaf-ops/ol-docker,TEXLIVE_IMAGE_USER="tex",DOCKER_RUNNER="true",COMPILES_HOST_DIR=$PWD/compiles,OUTPUT_HOST_DIR=$PWD/output
--env-pass-through= --env-pass-through=
--esmock-loader=False --esmock-loader=False
--node-version=22.15.1 --node-version=22.15.1

View file

@ -141,11 +141,9 @@ if ((process.env.DOCKER_RUNNER || process.env.SANDBOXED_COMPILES) === 'true') {
let seccompProfilePath let seccompProfilePath
try { try {
seccompProfilePath = Path.resolve(__dirname, '../seccomp/clsi-profile.json') seccompProfilePath = Path.resolve(__dirname, '../seccomp/clsi-profile.json')
module.exports.clsi.docker.seccomp_profile = module.exports.clsi.docker.seccomp_profile = JSON.stringify(
process.env.SECCOMP_PROFILE || JSON.parse(require('node:fs').readFileSync(seccompProfilePath))
JSON.stringify( )
JSON.parse(require('node:fs').readFileSync(seccompProfilePath))
)
} catch (error) { } catch (error) {
console.error( console.error(
error, error,

View file

@ -29,9 +29,9 @@ services:
TEXLIVE_IMAGE: quay.io/sharelatex/texlive-full:2017.1 TEXLIVE_IMAGE: quay.io/sharelatex/texlive-full:2017.1
TEX_LIVE_IMAGE_NAME_OVERRIDE: us-east1-docker.pkg.dev/overleaf-ops/ol-docker TEX_LIVE_IMAGE_NAME_OVERRIDE: us-east1-docker.pkg.dev/overleaf-ops/ol-docker
TEXLIVE_IMAGE_USER: "tex" TEXLIVE_IMAGE_USER: "tex"
SANDBOXED_COMPILES: "true" DOCKER_RUNNER: "true"
SANDBOXED_COMPILES_HOST_DIR_COMPILES: $PWD/compiles COMPILES_HOST_DIR: $PWD/compiles
SANDBOXED_COMPILES_HOST_DIR_OUTPUT: $PWD/output OUTPUT_HOST_DIR: $PWD/output
volumes: volumes:
- ./compiles:/overleaf/services/clsi/compiles - ./compiles:/overleaf/services/clsi/compiles
- /var/run/docker.sock:/var/run/docker.sock - /var/run/docker.sock:/var/run/docker.sock

View file

@ -47,8 +47,8 @@ services:
TEXLIVE_IMAGE: quay.io/sharelatex/texlive-full:2017.1 TEXLIVE_IMAGE: quay.io/sharelatex/texlive-full:2017.1
TEX_LIVE_IMAGE_NAME_OVERRIDE: us-east1-docker.pkg.dev/overleaf-ops/ol-docker TEX_LIVE_IMAGE_NAME_OVERRIDE: us-east1-docker.pkg.dev/overleaf-ops/ol-docker
TEXLIVE_IMAGE_USER: "tex" TEXLIVE_IMAGE_USER: "tex"
SANDBOXED_COMPILES: "true" DOCKER_RUNNER: "true"
SANDBOXED_COMPILES_HOST_DIR_COMPILES: $PWD/compiles COMPILES_HOST_DIR: $PWD/compiles
SANDBOXED_COMPILES_HOST_DIR_OUTPUT: $PWD/output OUTPUT_HOST_DIR: $PWD/output
command: npm run --silent test:acceptance command: npm run --silent test:acceptance

View file

@ -27,13 +27,13 @@
"async": "^3.2.5", "async": "^3.2.5",
"body-parser": "^1.20.3", "body-parser": "^1.20.3",
"bunyan": "^1.8.15", "bunyan": "^1.8.15",
"dockerode": "^4.0.7", "dockerode": "^4.0.5",
"express": "^4.21.2", "express": "^4.21.2",
"lodash": "^4.17.21", "lodash": "^4.17.21",
"p-limit": "^3.1.0", "p-limit": "^3.1.0",
"request": "^2.88.2", "request": "^2.88.2",
"send": "^0.19.0", "send": "^0.19.0",
"tar-fs": "^3.0.9", "tar-fs": "^3.0.4",
"workerpool": "^6.1.5" "workerpool": "^6.1.5"
}, },
"devDependencies": { "devDependencies": {

View file

@ -24,13 +24,10 @@ services:
MOCHA_GREP: ${MOCHA_GREP} MOCHA_GREP: ${MOCHA_GREP}
NODE_ENV: test NODE_ENV: test
NODE_OPTIONS: "--unhandled-rejections=strict" NODE_OPTIONS: "--unhandled-rejections=strict"
volumes:
- ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it
depends_on: depends_on:
mongo: mongo:
condition: service_started condition: service_started
user: node user: node
entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 --
command: npm run test:acceptance command: npm run test:acceptance

View file

@ -26,7 +26,6 @@ services:
- .:/overleaf/services/contacts - .:/overleaf/services/contacts
- ../../node_modules:/overleaf/node_modules - ../../node_modules:/overleaf/node_modules
- ../../libraries:/overleaf/libraries - ../../libraries:/overleaf/libraries
- ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it
working_dir: /overleaf/services/contacts working_dir: /overleaf/services/contacts
environment: environment:
ELASTIC_SEARCH_DSN: es:9200 ELASTIC_SEARCH_DSN: es:9200
@ -40,7 +39,6 @@ services:
depends_on: depends_on:
mongo: mongo:
condition: service_started condition: service_started
entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 --
command: npm run --silent test:acceptance command: npm run --silent test:acceptance
mongo: mongo:

View file

@ -6,9 +6,9 @@
"main": "app.js", "main": "app.js",
"scripts": { "scripts": {
"start": "node app.js", "start": "node app.js",
"test:acceptance:_run": "mocha --loader=esmock --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js", "test:acceptance:_run": "LOG_LEVEL=fatal mocha --loader=esmock --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js",
"test:acceptance": "npm run test:acceptance:_run -- --grep=$MOCHA_GREP", "test:acceptance": "npm run test:acceptance:_run -- --grep=$MOCHA_GREP",
"test:unit:_run": "mocha --loader=esmock --recursive --reporter spec $@ test/unit/js", "test:unit:_run": "LOG_LEVEL=fatal mocha --loader=esmock --recursive --reporter spec $@ test/unit/js",
"test:unit": "npm run test:unit:_run -- --grep=$MOCHA_GREP", "test:unit": "npm run test:unit:_run -- --grep=$MOCHA_GREP",
"nodemon": "node --watch app.js", "nodemon": "node --watch app.js",
"lint": "eslint --max-warnings 0 --format unix .", "lint": "eslint --max-warnings 0 --format unix .",

View file

@ -50,14 +50,6 @@ app.param('doc_id', function (req, res, next, docId) {
app.get('/project/:project_id/doc-deleted', HttpController.getAllDeletedDocs) app.get('/project/:project_id/doc-deleted', HttpController.getAllDeletedDocs)
app.get('/project/:project_id/doc', HttpController.getAllDocs) app.get('/project/:project_id/doc', HttpController.getAllDocs)
app.get('/project/:project_id/ranges', HttpController.getAllRanges) app.get('/project/:project_id/ranges', HttpController.getAllRanges)
app.get(
'/project/:project_id/comment-thread-ids',
HttpController.getCommentThreadIds
)
app.get(
'/project/:project_id/tracked-changes-user-ids',
HttpController.getTrackedChangesUserIds
)
app.get('/project/:project_id/has-ranges', HttpController.projectHasRanges) app.get('/project/:project_id/has-ranges', HttpController.projectHasRanges)
app.get('/project/:project_id/doc/:doc_id', HttpController.getDoc) app.get('/project/:project_id/doc/:doc_id', HttpController.getDoc)
app.get('/project/:project_id/doc/:doc_id/deleted', HttpController.isDocDeleted) app.get('/project/:project_id/doc/:doc_id/deleted', HttpController.isDocDeleted)

View file

@ -1,4 +1,5 @@
const MongoManager = require('./MongoManager') const { callbackify } = require('node:util')
const MongoManager = require('./MongoManager').promises
const Errors = require('./Errors') const Errors = require('./Errors')
const logger = require('@overleaf/logger') const logger = require('@overleaf/logger')
const Settings = require('@overleaf/settings') const Settings = require('@overleaf/settings')
@ -7,12 +8,29 @@ const { ReadableString } = require('@overleaf/stream-utils')
const RangeManager = require('./RangeManager') const RangeManager = require('./RangeManager')
const PersistorManager = require('./PersistorManager') const PersistorManager = require('./PersistorManager')
const pMap = require('p-map') const pMap = require('p-map')
const { streamToBuffer } = require('./StreamToBuffer') const { streamToBuffer } = require('./StreamToBuffer').promises
const { BSON } = require('mongodb-legacy') const { BSON } = require('mongodb-legacy')
const PARALLEL_JOBS = Settings.parallelArchiveJobs const PARALLEL_JOBS = Settings.parallelArchiveJobs
const UN_ARCHIVE_BATCH_SIZE = Settings.unArchiveBatchSize const UN_ARCHIVE_BATCH_SIZE = Settings.unArchiveBatchSize
module.exports = {
archiveAllDocs: callbackify(archiveAllDocs),
archiveDoc: callbackify(archiveDoc),
unArchiveAllDocs: callbackify(unArchiveAllDocs),
unarchiveDoc: callbackify(unarchiveDoc),
destroyProject: callbackify(destroyProject),
getDoc: callbackify(getDoc),
promises: {
archiveAllDocs,
archiveDoc,
unArchiveAllDocs,
unarchiveDoc,
destroyProject,
getDoc,
},
}
async function archiveAllDocs(projectId) { async function archiveAllDocs(projectId) {
if (!_isArchivingEnabled()) { if (!_isArchivingEnabled()) {
return return
@ -44,8 +62,6 @@ async function archiveDoc(projectId, docId) {
throw new Error('doc has no lines') throw new Error('doc has no lines')
} }
RangeManager.fixCommentIds(doc)
// warn about any oversized docs already in mongo // warn about any oversized docs already in mongo
const linesSize = BSON.calculateObjectSize(doc.lines || {}) const linesSize = BSON.calculateObjectSize(doc.lines || {})
const rangesSize = BSON.calculateObjectSize(doc.ranges || {}) const rangesSize = BSON.calculateObjectSize(doc.ranges || {})
@ -209,12 +225,3 @@ function _isArchivingEnabled() {
return true return true
} }
module.exports = {
archiveAllDocs,
archiveDoc,
unArchiveAllDocs,
unarchiveDoc,
destroyProject,
getDoc,
}

View file

@ -5,6 +5,7 @@ const _ = require('lodash')
const DocArchive = require('./DocArchiveManager') const DocArchive = require('./DocArchiveManager')
const RangeManager = require('./RangeManager') const RangeManager = require('./RangeManager')
const Settings = require('@overleaf/settings') const Settings = require('@overleaf/settings')
const { callbackifyAll } = require('@overleaf/promise-utils')
const { setTimeout } = require('node:timers/promises') const { setTimeout } = require('node:timers/promises')
/** /**
@ -28,7 +29,7 @@ const DocManager = {
throw new Error('must include inS3 when getting doc') throw new Error('must include inS3 when getting doc')
} }
const doc = await MongoManager.findDoc(projectId, docId, filter) const doc = await MongoManager.promises.findDoc(projectId, docId, filter)
if (doc == null) { if (doc == null) {
throw new Errors.NotFoundError( throw new Errors.NotFoundError(
@ -37,19 +38,15 @@ const DocManager = {
} }
if (doc.inS3) { if (doc.inS3) {
await DocArchive.unarchiveDoc(projectId, docId) await DocArchive.promises.unarchiveDoc(projectId, docId)
return await DocManager._getDoc(projectId, docId, filter) return await DocManager._getDoc(projectId, docId, filter)
} }
if (filter.ranges) {
RangeManager.fixCommentIds(doc)
}
return doc return doc
}, },
async isDocDeleted(projectId, docId) { async isDocDeleted(projectId, docId) {
const doc = await MongoManager.findDoc(projectId, docId, { const doc = await MongoManager.promises.findDoc(projectId, docId, {
deleted: true, deleted: true,
}) })
@ -77,7 +74,7 @@ const DocManager = {
// returns the doc without any version information // returns the doc without any version information
async _peekRawDoc(projectId, docId) { async _peekRawDoc(projectId, docId) {
const doc = await MongoManager.findDoc(projectId, docId, { const doc = await MongoManager.promises.findDoc(projectId, docId, {
lines: true, lines: true,
rev: true, rev: true,
deleted: true, deleted: true,
@ -94,7 +91,7 @@ const DocManager = {
if (doc.inS3) { if (doc.inS3) {
// skip the unarchiving to mongo when getting a doc // skip the unarchiving to mongo when getting a doc
const archivedDoc = await DocArchive.getDoc(projectId, docId) const archivedDoc = await DocArchive.promises.getDoc(projectId, docId)
Object.assign(doc, archivedDoc) Object.assign(doc, archivedDoc)
} }
@ -105,7 +102,7 @@ const DocManager = {
// without unarchiving it (avoids unnecessary writes to mongo) // without unarchiving it (avoids unnecessary writes to mongo)
async peekDoc(projectId, docId) { async peekDoc(projectId, docId) {
const doc = await DocManager._peekRawDoc(projectId, docId) const doc = await DocManager._peekRawDoc(projectId, docId)
await MongoManager.checkRevUnchanged(doc) await MongoManager.promises.checkRevUnchanged(doc)
return doc return doc
}, },
@ -114,18 +111,16 @@ const DocManager = {
lines: true, lines: true,
inS3: true, inS3: true,
}) })
if (!doc) throw new Errors.NotFoundError() return doc
if (!Array.isArray(doc.lines)) throw new Errors.DocWithoutLinesError()
return doc.lines.join('\n')
}, },
async getAllDeletedDocs(projectId, filter) { async getAllDeletedDocs(projectId, filter) {
return await MongoManager.getProjectsDeletedDocs(projectId, filter) return await MongoManager.promises.getProjectsDeletedDocs(projectId, filter)
}, },
async getAllNonDeletedDocs(projectId, filter) { async getAllNonDeletedDocs(projectId, filter) {
await DocArchive.unArchiveAllDocs(projectId) await DocArchive.promises.unArchiveAllDocs(projectId)
const docs = await MongoManager.getProjectsDocs( const docs = await MongoManager.promises.getProjectsDocs(
projectId, projectId,
{ include_deleted: false }, { include_deleted: false },
filter filter
@ -133,46 +128,15 @@ const DocManager = {
if (docs == null) { if (docs == null) {
throw new Errors.NotFoundError(`No docs for project ${projectId}`) throw new Errors.NotFoundError(`No docs for project ${projectId}`)
} }
if (filter.ranges) {
for (const doc of docs) {
RangeManager.fixCommentIds(doc)
}
}
return docs return docs
}, },
async getCommentThreadIds(projectId) {
const docs = await DocManager.getAllNonDeletedDocs(projectId, {
_id: true,
ranges: true,
})
const byDoc = new Map()
for (const doc of docs) {
const ids = new Set()
for (const comment of doc.ranges?.comments || []) {
ids.add(comment.op.t)
}
if (ids.size > 0) byDoc.set(doc._id.toString(), Array.from(ids))
}
return Object.fromEntries(byDoc.entries())
},
async getTrackedChangesUserIds(projectId) {
const docs = await DocManager.getAllNonDeletedDocs(projectId, {
ranges: true,
})
const userIds = new Set()
for (const doc of docs) {
for (const change of doc.ranges?.changes || []) {
if (change.metadata.user_id === 'anonymous-user') continue
userIds.add(change.metadata.user_id)
}
}
return Array.from(userIds)
},
async projectHasRanges(projectId) { async projectHasRanges(projectId) {
const docs = await MongoManager.getProjectsDocs(projectId, {}, { _id: 1 }) const docs = await MongoManager.promises.getProjectsDocs(
projectId,
{},
{ _id: 1 }
)
const docIds = docs.map(doc => doc._id) const docIds = docs.map(doc => doc._id)
for (const docId of docIds) { for (const docId of docIds) {
const doc = await DocManager.peekDoc(projectId, docId) const doc = await DocManager.peekDoc(projectId, docId)
@ -283,7 +247,7 @@ const DocManager = {
} }
modified = true modified = true
await MongoManager.upsertIntoDocCollection( await MongoManager.promises.upsertIntoDocCollection(
projectId, projectId,
docId, docId,
doc?.rev, doc?.rev,
@ -298,7 +262,11 @@ const DocManager = {
async patchDoc(projectId, docId, meta) { async patchDoc(projectId, docId, meta) {
const projection = { _id: 1, deleted: true } const projection = { _id: 1, deleted: true }
const doc = await MongoManager.findDoc(projectId, docId, projection) const doc = await MongoManager.promises.findDoc(
projectId,
docId,
projection
)
if (!doc) { if (!doc) {
throw new Errors.NotFoundError( throw new Errors.NotFoundError(
`No such project/doc to delete: ${projectId}/${docId}` `No such project/doc to delete: ${projectId}/${docId}`
@ -307,7 +275,7 @@ const DocManager = {
if (meta.deleted && Settings.docstore.archiveOnSoftDelete) { if (meta.deleted && Settings.docstore.archiveOnSoftDelete) {
// The user will not read this doc anytime soon. Flush it out of mongo. // The user will not read this doc anytime soon. Flush it out of mongo.
DocArchive.archiveDoc(projectId, docId).catch(err => { DocArchive.promises.archiveDoc(projectId, docId).catch(err => {
logger.warn( logger.warn(
{ projectId, docId, err }, { projectId, docId, err },
'archiving a single doc in the background failed' 'archiving a single doc in the background failed'
@ -315,8 +283,15 @@ const DocManager = {
}) })
} }
await MongoManager.patchDoc(projectId, docId, meta) await MongoManager.promises.patchDoc(projectId, docId, meta)
}, },
} }
module.exports = DocManager module.exports = {
...callbackifyAll(DocManager, {
multiResult: {
updateDoc: ['modified', 'rev'],
},
}),
promises: DocManager,
}

View file

@ -10,13 +10,10 @@ class DocRevValueError extends OError {}
class DocVersionDecrementedError extends OError {} class DocVersionDecrementedError extends OError {}
class DocWithoutLinesError extends OError {}
module.exports = { module.exports = {
Md5MismatchError, Md5MismatchError,
DocModifiedError, DocModifiedError,
DocRevValueError, DocRevValueError,
DocVersionDecrementedError, DocVersionDecrementedError,
DocWithoutLinesError,
...Errors, ...Errors,
} }

View file

@ -1,35 +1,67 @@
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
const { db, ObjectId } = require('./mongodb') const { db, ObjectId } = require('./mongodb')
const request = require('request')
const async = require('async')
const _ = require('lodash') const _ = require('lodash')
const crypto = require('node:crypto') const crypto = require('node:crypto')
const settings = require('@overleaf/settings') const settings = require('@overleaf/settings')
const { port } = settings.internal.docstore const { port } = settings.internal.docstore
const logger = require('@overleaf/logger') const logger = require('@overleaf/logger')
const { fetchNothing, fetchJson } = require('@overleaf/fetch-utils')
async function check() {
const docId = new ObjectId()
const projectId = new ObjectId(settings.docstore.healthCheck.project_id)
const url = `http://127.0.0.1:${port}/project/${projectId}/doc/${docId}`
const lines = [
'smoke test - delete me',
`${crypto.randomBytes(32).toString('hex')}`,
]
logger.debug({ lines, url, docId, projectId }, 'running health check')
let body
try {
await fetchNothing(url, {
method: 'POST',
json: { lines, version: 42, ranges: {} },
signal: AbortSignal.timeout(3_000),
})
body = await fetchJson(url, { signal: AbortSignal.timeout(3_000) })
} finally {
await db.docs.deleteOne({ _id: docId, project_id: projectId })
}
if (!_.isEqual(body?.lines, lines)) {
throw new Error(`health check lines not equal ${body.lines} != ${lines}`)
}
}
module.exports = { module.exports = {
check, check(callback) {
const docId = new ObjectId()
const projectId = new ObjectId(settings.docstore.healthCheck.project_id)
const url = `http://127.0.0.1:${port}/project/${projectId}/doc/${docId}`
const lines = [
'smoke test - delete me',
`${crypto.randomBytes(32).toString('hex')}`,
]
const getOpts = () => ({
url,
timeout: 3000,
})
logger.debug({ lines, url, docId, projectId }, 'running health check')
const jobs = [
function (cb) {
const opts = getOpts()
opts.json = { lines, version: 42, ranges: {} }
return request.post(opts, cb)
},
function (cb) {
const opts = getOpts()
opts.json = true
return request.get(opts, function (err, res, body) {
if (err != null) {
logger.err({ err }, 'docstore returned a error in health check get')
return cb(err)
} else if (res == null) {
return cb(new Error('no response from docstore with get check'))
} else if ((res != null ? res.statusCode : undefined) !== 200) {
return cb(new Error(`status code not 200, its ${res.statusCode}`))
} else if (
_.isEqual(body != null ? body.lines : undefined, lines) &&
(body != null ? body._id : undefined) === docId.toString()
) {
return cb()
} else {
return cb(
new Error(
`health check lines not equal ${body.lines} != ${lines}`
)
)
}
})
},
cb => db.docs.deleteOne({ _id: docId, project_id: projectId }, cb),
]
return async.series(jobs, callback)
},
} }

View file

@ -4,104 +4,143 @@ const DocArchive = require('./DocArchiveManager')
const HealthChecker = require('./HealthChecker') const HealthChecker = require('./HealthChecker')
const Errors = require('./Errors') const Errors = require('./Errors')
const Settings = require('@overleaf/settings') const Settings = require('@overleaf/settings')
const { expressify } = require('@overleaf/promise-utils')
async function getDoc(req, res) { function getDoc(req, res, next) {
const { doc_id: docId, project_id: projectId } = req.params const { doc_id: docId, project_id: projectId } = req.params
const includeDeleted = req.query.include_deleted === 'true' const includeDeleted = req.query.include_deleted === 'true'
logger.debug({ projectId, docId }, 'getting doc') logger.debug({ projectId, docId }, 'getting doc')
const doc = await DocManager.getFullDoc(projectId, docId) DocManager.getFullDoc(projectId, docId, function (error, doc) {
logger.debug({ docId, projectId }, 'got doc') if (error) {
if (doc.deleted && !includeDeleted) { return next(error)
res.sendStatus(404) }
} else { logger.debug({ docId, projectId }, 'got doc')
res.json(_buildDocView(doc)) if (doc == null) {
} res.sendStatus(404)
} else if (doc.deleted && !includeDeleted) {
res.sendStatus(404)
} else {
res.json(_buildDocView(doc))
}
})
} }
async function peekDoc(req, res) { function peekDoc(req, res, next) {
const { doc_id: docId, project_id: projectId } = req.params const { doc_id: docId, project_id: projectId } = req.params
logger.debug({ projectId, docId }, 'peeking doc') logger.debug({ projectId, docId }, 'peeking doc')
const doc = await DocManager.peekDoc(projectId, docId) DocManager.peekDoc(projectId, docId, function (error, doc) {
res.setHeader('x-doc-status', doc.inS3 ? 'archived' : 'active') if (error) {
res.json(_buildDocView(doc)) return next(error)
}
if (doc == null) {
res.sendStatus(404)
} else {
res.setHeader('x-doc-status', doc.inS3 ? 'archived' : 'active')
res.json(_buildDocView(doc))
}
})
} }
async function isDocDeleted(req, res) { function isDocDeleted(req, res, next) {
const { doc_id: docId, project_id: projectId } = req.params const { doc_id: docId, project_id: projectId } = req.params
const deleted = await DocManager.isDocDeleted(projectId, docId) DocManager.isDocDeleted(projectId, docId, function (error, deleted) {
res.json({ deleted }) if (error) {
return next(error)
}
res.json({ deleted })
})
} }
async function getRawDoc(req, res) { function getRawDoc(req, res, next) {
const { doc_id: docId, project_id: projectId } = req.params const { doc_id: docId, project_id: projectId } = req.params
logger.debug({ projectId, docId }, 'getting raw doc') logger.debug({ projectId, docId }, 'getting raw doc')
const content = await DocManager.getDocLines(projectId, docId) DocManager.getDocLines(projectId, docId, function (error, doc) {
res.setHeader('content-type', 'text/plain') if (error) {
res.send(content) return next(error)
}
if (doc == null) {
res.sendStatus(404)
} else {
res.setHeader('content-type', 'text/plain')
res.send(_buildRawDocView(doc))
}
})
} }
async function getAllDocs(req, res) { function getAllDocs(req, res, next) {
const { project_id: projectId } = req.params const { project_id: projectId } = req.params
logger.debug({ projectId }, 'getting all docs') logger.debug({ projectId }, 'getting all docs')
const docs = await DocManager.getAllNonDeletedDocs(projectId, { DocManager.getAllNonDeletedDocs(
lines: true, projectId,
rev: true, { lines: true, rev: true },
}) function (error, docs) {
const docViews = _buildDocsArrayView(projectId, docs) if (docs == null) {
for (const docView of docViews) { docs = []
if (!docView.lines) { }
logger.warn({ projectId, docId: docView._id }, 'missing doc lines') if (error) {
docView.lines = [] return next(error)
}
const docViews = _buildDocsArrayView(projectId, docs)
for (const docView of docViews) {
if (!docView.lines) {
logger.warn({ projectId, docId: docView._id }, 'missing doc lines')
docView.lines = []
}
}
res.json(docViews)
} }
}
res.json(docViews)
}
async function getAllDeletedDocs(req, res) {
const { project_id: projectId } = req.params
logger.debug({ projectId }, 'getting all deleted docs')
const docs = await DocManager.getAllDeletedDocs(projectId, {
name: true,
deletedAt: true,
})
res.json(
docs.map(doc => ({
_id: doc._id.toString(),
name: doc.name,
deletedAt: doc.deletedAt,
}))
) )
} }
async function getAllRanges(req, res) { function getAllDeletedDocs(req, res, next) {
const { project_id: projectId } = req.params
logger.debug({ projectId }, 'getting all deleted docs')
DocManager.getAllDeletedDocs(
projectId,
{ name: true, deletedAt: true },
function (error, docs) {
if (error) {
return next(error)
}
res.json(
docs.map(doc => ({
_id: doc._id.toString(),
name: doc.name,
deletedAt: doc.deletedAt,
}))
)
}
)
}
function getAllRanges(req, res, next) {
const { project_id: projectId } = req.params const { project_id: projectId } = req.params
logger.debug({ projectId }, 'getting all ranges') logger.debug({ projectId }, 'getting all ranges')
const docs = await DocManager.getAllNonDeletedDocs(projectId, { DocManager.getAllNonDeletedDocs(
ranges: true, projectId,
{ ranges: true },
function (error, docs) {
if (docs == null) {
docs = []
}
if (error) {
return next(error)
}
res.json(_buildDocsArrayView(projectId, docs))
}
)
}
function projectHasRanges(req, res, next) {
const { project_id: projectId } = req.params
DocManager.projectHasRanges(projectId, (err, projectHasRanges) => {
if (err) {
return next(err)
}
res.json({ projectHasRanges })
}) })
res.json(_buildDocsArrayView(projectId, docs))
} }
async function getCommentThreadIds(req, res) { function updateDoc(req, res, next) {
const { project_id: projectId } = req.params
const threadIds = await DocManager.getCommentThreadIds(projectId)
res.json(threadIds)
}
async function getTrackedChangesUserIds(req, res) {
const { project_id: projectId } = req.params
const userIds = await DocManager.getTrackedChangesUserIds(projectId)
res.json(userIds)
}
async function projectHasRanges(req, res) {
const { project_id: projectId } = req.params
const projectHasRanges = await DocManager.projectHasRanges(projectId)
res.json({ projectHasRanges })
}
async function updateDoc(req, res) {
const { doc_id: docId, project_id: projectId } = req.params const { doc_id: docId, project_id: projectId } = req.params
const lines = req.body?.lines const lines = req.body?.lines
const version = req.body?.version const version = req.body?.version
@ -133,20 +172,25 @@ async function updateDoc(req, res) {
} }
logger.debug({ projectId, docId }, 'got http request to update doc') logger.debug({ projectId, docId }, 'got http request to update doc')
const { modified, rev } = await DocManager.updateDoc( DocManager.updateDoc(
projectId, projectId,
docId, docId,
lines, lines,
version, version,
ranges ranges,
function (error, modified, rev) {
if (error) {
return next(error)
}
res.json({
modified,
rev,
})
}
) )
res.json({
modified,
rev,
})
} }
async function patchDoc(req, res) { function patchDoc(req, res, next) {
const { doc_id: docId, project_id: projectId } = req.params const { doc_id: docId, project_id: projectId } = req.params
logger.debug({ projectId, docId }, 'patching doc') logger.debug({ projectId, docId }, 'patching doc')
@ -159,8 +203,12 @@ async function patchDoc(req, res) {
logger.fatal({ field }, 'joi validation for pathDoc is broken') logger.fatal({ field }, 'joi validation for pathDoc is broken')
} }
}) })
await DocManager.patchDoc(projectId, docId, meta) DocManager.patchDoc(projectId, docId, meta, function (error) {
res.sendStatus(204) if (error) {
return next(error)
}
res.sendStatus(204)
})
} }
function _buildDocView(doc) { function _buildDocView(doc) {
@ -173,6 +221,10 @@ function _buildDocView(doc) {
return docView return docView
} }
function _buildRawDocView(doc) {
return (doc?.lines ?? []).join('\n')
}
function _buildDocsArrayView(projectId, docs) { function _buildDocsArrayView(projectId, docs) {
const docViews = [] const docViews = []
for (const doc of docs) { for (const doc of docs) {
@ -189,69 +241,79 @@ function _buildDocsArrayView(projectId, docs) {
return docViews return docViews
} }
async function archiveAllDocs(req, res) { function archiveAllDocs(req, res, next) {
const { project_id: projectId } = req.params const { project_id: projectId } = req.params
logger.debug({ projectId }, 'archiving all docs') logger.debug({ projectId }, 'archiving all docs')
await DocArchive.archiveAllDocs(projectId) DocArchive.archiveAllDocs(projectId, function (error) {
res.sendStatus(204) if (error) {
return next(error)
}
res.sendStatus(204)
})
} }
async function archiveDoc(req, res) { function archiveDoc(req, res, next) {
const { doc_id: docId, project_id: projectId } = req.params const { doc_id: docId, project_id: projectId } = req.params
logger.debug({ projectId, docId }, 'archiving a doc') logger.debug({ projectId, docId }, 'archiving a doc')
await DocArchive.archiveDoc(projectId, docId) DocArchive.archiveDoc(projectId, docId, function (error) {
res.sendStatus(204) if (error) {
return next(error)
}
res.sendStatus(204)
})
} }
async function unArchiveAllDocs(req, res) { function unArchiveAllDocs(req, res, next) {
const { project_id: projectId } = req.params const { project_id: projectId } = req.params
logger.debug({ projectId }, 'unarchiving all docs') logger.debug({ projectId }, 'unarchiving all docs')
try { DocArchive.unArchiveAllDocs(projectId, function (err) {
await DocArchive.unArchiveAllDocs(projectId) if (err) {
} catch (err) { if (err instanceof Errors.DocRevValueError) {
if (err instanceof Errors.DocRevValueError) { logger.warn({ err }, 'Failed to unarchive doc')
logger.warn({ err }, 'Failed to unarchive doc') return res.sendStatus(409)
return res.sendStatus(409) }
return next(err)
} }
throw err res.sendStatus(200)
} })
res.sendStatus(200)
} }
async function destroyProject(req, res) { function destroyProject(req, res, next) {
const { project_id: projectId } = req.params const { project_id: projectId } = req.params
logger.debug({ projectId }, 'destroying all docs') logger.debug({ projectId }, 'destroying all docs')
await DocArchive.destroyProject(projectId) DocArchive.destroyProject(projectId, function (error) {
res.sendStatus(204) if (error) {
return next(error)
}
res.sendStatus(204)
})
} }
async function healthCheck(req, res) { function healthCheck(req, res) {
try { HealthChecker.check(function (err) {
await HealthChecker.check() if (err) {
} catch (err) { logger.err({ err }, 'error performing health check')
logger.err({ err }, 'error performing health check') res.sendStatus(500)
res.sendStatus(500) } else {
return res.sendStatus(200)
} }
res.sendStatus(200) })
} }
module.exports = { module.exports = {
getDoc: expressify(getDoc), getDoc,
peekDoc: expressify(peekDoc), peekDoc,
isDocDeleted: expressify(isDocDeleted), isDocDeleted,
getRawDoc: expressify(getRawDoc), getRawDoc,
getAllDocs: expressify(getAllDocs), getAllDocs,
getAllDeletedDocs: expressify(getAllDeletedDocs), getAllDeletedDocs,
getAllRanges: expressify(getAllRanges), getAllRanges,
getTrackedChangesUserIds: expressify(getTrackedChangesUserIds), projectHasRanges,
getCommentThreadIds: expressify(getCommentThreadIds), updateDoc,
projectHasRanges: expressify(projectHasRanges), patchDoc,
updateDoc: expressify(updateDoc), archiveAllDocs,
patchDoc: expressify(patchDoc), archiveDoc,
archiveAllDocs: expressify(archiveAllDocs), unArchiveAllDocs,
archiveDoc: expressify(archiveDoc), destroyProject,
unArchiveAllDocs: expressify(unArchiveAllDocs), healthCheck,
destroyProject: expressify(destroyProject),
healthCheck: expressify(healthCheck),
} }

View file

@ -1,6 +1,7 @@
const { db, ObjectId } = require('./mongodb') const { db, ObjectId } = require('./mongodb')
const Settings = require('@overleaf/settings') const Settings = require('@overleaf/settings')
const Errors = require('./Errors') const Errors = require('./Errors')
const { callbackify } = require('node:util')
const ARCHIVING_LOCK_DURATION_MS = Settings.archivingLockDurationMs const ARCHIVING_LOCK_DURATION_MS = Settings.archivingLockDurationMs
@ -240,17 +241,34 @@ async function destroyProject(projectId) {
} }
module.exports = { module.exports = {
findDoc, findDoc: callbackify(findDoc),
getProjectsDeletedDocs, getProjectsDeletedDocs: callbackify(getProjectsDeletedDocs),
getProjectsDocs, getProjectsDocs: callbackify(getProjectsDocs),
getArchivedProjectDocs, getArchivedProjectDocs: callbackify(getArchivedProjectDocs),
getNonArchivedProjectDocIds, getNonArchivedProjectDocIds: callbackify(getNonArchivedProjectDocIds),
getNonDeletedArchivedProjectDocs, getNonDeletedArchivedProjectDocs: callbackify(
upsertIntoDocCollection, getNonDeletedArchivedProjectDocs
restoreArchivedDoc, ),
patchDoc, upsertIntoDocCollection: callbackify(upsertIntoDocCollection),
getDocForArchiving, restoreArchivedDoc: callbackify(restoreArchivedDoc),
markDocAsArchived, patchDoc: callbackify(patchDoc),
checkRevUnchanged, getDocForArchiving: callbackify(getDocForArchiving),
destroyProject, markDocAsArchived: callbackify(markDocAsArchived),
checkRevUnchanged: callbackify(checkRevUnchanged),
destroyProject: callbackify(destroyProject),
promises: {
findDoc,
getProjectsDeletedDocs,
getProjectsDocs,
getArchivedProjectDocs,
getNonArchivedProjectDocIds,
getNonDeletedArchivedProjectDocs,
upsertIntoDocCollection,
restoreArchivedDoc,
patchDoc,
getDocForArchiving,
markDocAsArchived,
checkRevUnchanged,
destroyProject,
},
} }

View file

@ -49,25 +49,15 @@ module.exports = RangeManager = {
updateMetadata(change.metadata) updateMetadata(change.metadata)
} }
for (const comment of Array.from(ranges.comments || [])) { for (const comment of Array.from(ranges.comments || [])) {
// Two bugs resulted in mismatched ids, prefer the thread id from the op: https://github.com/overleaf/internal/issues/23272 comment.id = RangeManager._safeObjectId(comment.id)
comment.id = RangeManager._safeObjectId(comment.op?.t || comment.id) if ((comment.op != null ? comment.op.t : undefined) != null) {
if (comment.op) comment.op.t = comment.id comment.op.t = RangeManager._safeObjectId(comment.op.t)
}
// resolved property is added to comments when they are obtained from history, but this state doesn't belong in mongo docs collection
// more info: https://github.com/overleaf/internal/issues/24371#issuecomment-2913095174
delete comment.op?.resolved
updateMetadata(comment.metadata) updateMetadata(comment.metadata)
} }
return ranges return ranges
}, },
fixCommentIds(doc) {
for (const comment of doc?.ranges?.comments || []) {
// Two bugs resulted in mismatched ids, prefer the thread id from the op: https://github.com/overleaf/internal/issues/23272
if (comment.op?.t) comment.id = comment.op.t
}
},
_safeObjectId(data) { _safeObjectId(data) {
try { try {
return new ObjectId(data) return new ObjectId(data)

View file

@ -2,9 +2,13 @@ const { LoggerStream, WritableBuffer } = require('@overleaf/stream-utils')
const Settings = require('@overleaf/settings') const Settings = require('@overleaf/settings')
const logger = require('@overleaf/logger/logging-manager') const logger = require('@overleaf/logger/logging-manager')
const { pipeline } = require('node:stream/promises') const { pipeline } = require('node:stream/promises')
const { callbackify } = require('node:util')
module.exports = { module.exports = {
streamToBuffer, streamToBuffer: callbackify(streamToBuffer),
promises: {
streamToBuffer,
},
} }
async function streamToBuffer(projectId, docId, stream) { async function streamToBuffer(projectId, docId, stream) {

View file

@ -27,15 +27,12 @@ services:
MOCHA_GREP: ${MOCHA_GREP} MOCHA_GREP: ${MOCHA_GREP}
NODE_ENV: test NODE_ENV: test
NODE_OPTIONS: "--unhandled-rejections=strict" NODE_OPTIONS: "--unhandled-rejections=strict"
volumes:
- ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it
depends_on: depends_on:
mongo: mongo:
condition: service_started condition: service_started
gcs: gcs:
condition: service_healthy condition: service_healthy
user: node user: node
entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 --
command: npm run test:acceptance command: npm run test:acceptance

View file

@ -26,7 +26,6 @@ services:
- .:/overleaf/services/docstore - .:/overleaf/services/docstore
- ../../node_modules:/overleaf/node_modules - ../../node_modules:/overleaf/node_modules
- ../../libraries:/overleaf/libraries - ../../libraries:/overleaf/libraries
- ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it
working_dir: /overleaf/services/docstore working_dir: /overleaf/services/docstore
environment: environment:
ELASTIC_SEARCH_DSN: es:9200 ELASTIC_SEARCH_DSN: es:9200
@ -45,7 +44,6 @@ services:
condition: service_started condition: service_started
gcs: gcs:
condition: service_healthy condition: service_healthy
entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 --
command: npm run --silent test:acceptance command: npm run --silent test:acceptance
mongo: mongo:

View file

@ -17,7 +17,6 @@
"types:check": "tsc --noEmit" "types:check": "tsc --noEmit"
}, },
"dependencies": { "dependencies": {
"@overleaf/fetch-utils": "*",
"@overleaf/logger": "*", "@overleaf/logger": "*",
"@overleaf/metrics": "*", "@overleaf/metrics": "*",
"@overleaf/o-error": "*", "@overleaf/o-error": "*",

View file

@ -1001,15 +1001,6 @@ describe('Archiving', function () {
}, },
version: 2, version: 2,
} }
this.fixedRanges = {
...this.doc.ranges,
comments: [
{
...this.doc.ranges.comments[0],
id: this.doc.ranges.comments[0].op.t,
},
],
}
return DocstoreClient.createDoc( return DocstoreClient.createDoc(
this.project_id, this.project_id,
this.doc._id, this.doc._id,
@ -1057,7 +1048,7 @@ describe('Archiving', function () {
throw error throw error
} }
s3Doc.lines.should.deep.equal(this.doc.lines) s3Doc.lines.should.deep.equal(this.doc.lines)
const ranges = JSON.parse(JSON.stringify(this.fixedRanges)) // ObjectId -> String const ranges = JSON.parse(JSON.stringify(this.doc.ranges)) // ObjectId -> String
s3Doc.ranges.should.deep.equal(ranges) s3Doc.ranges.should.deep.equal(ranges)
return done() return done()
} }
@ -1084,7 +1075,7 @@ describe('Archiving', function () {
throw error throw error
} }
doc.lines.should.deep.equal(this.doc.lines) doc.lines.should.deep.equal(this.doc.lines)
doc.ranges.should.deep.equal(this.fixedRanges) doc.ranges.should.deep.equal(this.doc.ranges)
expect(doc.inS3).not.to.exist expect(doc.inS3).not.to.exist
return done() return done()
}) })

View file

@ -20,73 +20,30 @@ const DocstoreClient = require('./helpers/DocstoreClient')
describe('Getting all docs', function () { describe('Getting all docs', function () {
beforeEach(function (done) { beforeEach(function (done) {
this.project_id = new ObjectId() this.project_id = new ObjectId()
this.threadId1 = new ObjectId().toString()
this.threadId2 = new ObjectId().toString()
this.docs = [ this.docs = [
{ {
_id: new ObjectId(), _id: new ObjectId(),
lines: ['one', 'two', 'three'], lines: ['one', 'two', 'three'],
ranges: { ranges: { mock: 'one' },
comments: [
{ id: new ObjectId().toString(), op: { t: this.threadId1 } },
],
changes: [
{
id: new ObjectId().toString(),
metadata: { user_id: 'user-id-1' },
},
],
},
rev: 2, rev: 2,
}, },
{ {
_id: new ObjectId(), _id: new ObjectId(),
lines: ['aaa', 'bbb', 'ccc'], lines: ['aaa', 'bbb', 'ccc'],
ranges: { ranges: { mock: 'two' },
changes: [
{
id: new ObjectId().toString(),
metadata: { user_id: 'user-id-2' },
},
],
},
rev: 4, rev: 4,
}, },
{ {
_id: new ObjectId(), _id: new ObjectId(),
lines: ['111', '222', '333'], lines: ['111', '222', '333'],
ranges: { ranges: { mock: 'three' },
comments: [
{ id: new ObjectId().toString(), op: { t: this.threadId2 } },
],
changes: [
{
id: new ObjectId().toString(),
metadata: { user_id: 'anonymous-user' },
},
],
},
rev: 6, rev: 6,
}, },
] ]
this.fixedRanges = this.docs.map(doc => {
if (!doc.ranges?.comments?.length) return doc.ranges
return {
...doc.ranges,
comments: [
{ ...doc.ranges.comments[0], id: doc.ranges.comments[0].op.t },
],
}
})
this.deleted_doc = { this.deleted_doc = {
_id: new ObjectId(), _id: new ObjectId(),
lines: ['deleted'], lines: ['deleted'],
ranges: { ranges: { mock: 'four' },
comments: [{ id: new ObjectId().toString(), op: { t: 'thread-id-3' } }],
changes: [
{ id: new ObjectId().toString(), metadata: { user_id: 'user-id-3' } },
],
},
rev: 8, rev: 8,
} }
const version = 42 const version = 42
@ -139,7 +96,7 @@ describe('Getting all docs', function () {
}) })
}) })
it('getAllRanges should return all the (non-deleted) doc ranges', function (done) { return it('getAllRanges should return all the (non-deleted) doc ranges', function (done) {
return DocstoreClient.getAllRanges(this.project_id, (error, res, docs) => { return DocstoreClient.getAllRanges(this.project_id, (error, res, docs) => {
if (error != null) { if (error != null) {
throw error throw error
@ -147,38 +104,9 @@ describe('Getting all docs', function () {
docs.length.should.equal(this.docs.length) docs.length.should.equal(this.docs.length)
for (let i = 0; i < docs.length; i++) { for (let i = 0; i < docs.length; i++) {
const doc = docs[i] const doc = docs[i]
doc.ranges.should.deep.equal(this.fixedRanges[i]) doc.ranges.should.deep.equal(this.docs[i].ranges)
} }
return done() return done()
}) })
}) })
it('getTrackedChangesUserIds should return all the user ids from (non-deleted) ranges', function (done) {
DocstoreClient.getTrackedChangesUserIds(
this.project_id,
(error, res, userIds) => {
if (error != null) {
throw error
}
userIds.should.deep.equal(['user-id-1', 'user-id-2'])
done()
}
)
})
it('getCommentThreadIds should return all the thread ids from (non-deleted) ranges', function (done) {
DocstoreClient.getCommentThreadIds(
this.project_id,
(error, res, threadIds) => {
if (error != null) {
throw error
}
threadIds.should.deep.equal({
[this.docs[0]._id.toString()]: [this.threadId1],
[this.docs[2]._id.toString()]: [this.threadId2],
})
done()
}
)
})
}) })

View file

@ -28,26 +28,10 @@ describe('Getting a doc', function () {
op: { i: 'foo', p: 3 }, op: { i: 'foo', p: 3 },
meta: { meta: {
user_id: new ObjectId().toString(), user_id: new ObjectId().toString(),
ts: new Date().toJSON(), ts: new Date().toString(),
}, },
}, },
], ],
comments: [
{
id: new ObjectId().toString(),
op: { c: 'comment', p: 1, t: new ObjectId().toString() },
metadata: {
user_id: new ObjectId().toString(),
ts: new Date().toJSON(),
},
},
],
}
this.fixedRanges = {
...this.ranges,
comments: [
{ ...this.ranges.comments[0], id: this.ranges.comments[0].op.t },
],
} }
return DocstoreApp.ensureRunning(() => { return DocstoreApp.ensureRunning(() => {
return DocstoreClient.createDoc( return DocstoreClient.createDoc(
@ -76,7 +60,7 @@ describe('Getting a doc', function () {
if (error) return done(error) if (error) return done(error)
doc.lines.should.deep.equal(this.lines) doc.lines.should.deep.equal(this.lines)
doc.version.should.equal(this.version) doc.version.should.equal(this.version)
doc.ranges.should.deep.equal(this.fixedRanges) doc.ranges.should.deep.equal(this.ranges)
return done() return done()
} }
) )
@ -130,7 +114,7 @@ describe('Getting a doc', function () {
if (error) return done(error) if (error) return done(error)
doc.lines.should.deep.equal(this.lines) doc.lines.should.deep.equal(this.lines)
doc.version.should.equal(this.version) doc.version.should.equal(this.version)
doc.ranges.should.deep.equal(this.fixedRanges) doc.ranges.should.deep.equal(this.ranges)
doc.deleted.should.equal(true) doc.deleted.should.equal(true)
return done() return done()
} }

View file

@ -1,28 +0,0 @@
const { db } = require('../../../app/js/mongodb')
const DocstoreApp = require('./helpers/DocstoreApp')
const DocstoreClient = require('./helpers/DocstoreClient')
const { expect } = require('chai')
describe('HealthChecker', function () {
beforeEach('start', function (done) {
DocstoreApp.ensureRunning(done)
})
beforeEach('clear docs collection', async function () {
await db.docs.deleteMany({})
})
let res
beforeEach('run health check', function (done) {
DocstoreClient.healthCheck((err, _res) => {
res = _res
done(err)
})
})
it('should return 200', function () {
res.statusCode.should.equal(200)
})
it('should not leave any cruft behind', async function () {
expect(await db.docs.find({}).toArray()).to.deep.equal([])
})
})

View file

@ -100,26 +100,6 @@ module.exports = DocstoreClient = {
) )
}, },
getCommentThreadIds(projectId, callback) {
request.get(
{
url: `http://127.0.0.1:${settings.internal.docstore.port}/project/${projectId}/comment-thread-ids`,
json: true,
},
callback
)
},
getTrackedChangesUserIds(projectId, callback) {
request.get(
{
url: `http://127.0.0.1:${settings.internal.docstore.port}/project/${projectId}/tracked-changes-user-ids`,
json: true,
},
callback
)
},
updateDoc(projectId, docId, lines, version, ranges, callback) { updateDoc(projectId, docId, lines, version, ranges, callback) {
return request.post( return request.post(
{ {
@ -201,13 +181,6 @@ module.exports = DocstoreClient = {
) )
}, },
healthCheck(callback) {
request.get(
`http://127.0.0.1:${settings.internal.docstore.port}/health_check`,
callback
)
},
getS3Doc(projectId, docId, callback) { getS3Doc(projectId, docId, callback) {
getStringFromPersistor( getStringFromPersistor(
Persistor, Persistor,

View file

@ -4,7 +4,7 @@ const modulePath = '../../../app/js/DocArchiveManager.js'
const SandboxedModule = require('sandboxed-module') const SandboxedModule = require('sandboxed-module')
const { ObjectId } = require('mongodb-legacy') const { ObjectId } = require('mongodb-legacy')
const Errors = require('../../../app/js/Errors') const Errors = require('../../../app/js/Errors')
const StreamToBuffer = require('../../../app/js/StreamToBuffer') const StreamToBuffer = require('../../../app/js/StreamToBuffer').promises
describe('DocArchiveManager', function () { describe('DocArchiveManager', function () {
let DocArchiveManager, let DocArchiveManager,
@ -31,7 +31,6 @@ describe('DocArchiveManager', function () {
RangeManager = { RangeManager = {
jsonRangesToMongo: sinon.stub().returns({ mongo: 'ranges' }), jsonRangesToMongo: sinon.stub().returns({ mongo: 'ranges' }),
fixCommentIds: sinon.stub(),
} }
Settings = { Settings = {
docstore: { docstore: {
@ -143,33 +142,37 @@ describe('DocArchiveManager', function () {
} }
MongoManager = { MongoManager = {
markDocAsArchived: sinon.stub().resolves(), promises: {
restoreArchivedDoc: sinon.stub().resolves(), markDocAsArchived: sinon.stub().resolves(),
upsertIntoDocCollection: sinon.stub().resolves(), restoreArchivedDoc: sinon.stub().resolves(),
getProjectsDocs: sinon.stub().resolves(mongoDocs), upsertIntoDocCollection: sinon.stub().resolves(),
getNonDeletedArchivedProjectDocs: getArchivedProjectDocs, getProjectsDocs: sinon.stub().resolves(mongoDocs),
getNonArchivedProjectDocIds, getNonDeletedArchivedProjectDocs: getArchivedProjectDocs,
getArchivedProjectDocs, getNonArchivedProjectDocIds,
findDoc: sinon.stub().callsFake(fakeGetDoc), getArchivedProjectDocs,
getDocForArchiving: sinon.stub().callsFake(fakeGetDoc), findDoc: sinon.stub().callsFake(fakeGetDoc),
destroyProject: sinon.stub().resolves(), getDocForArchiving: sinon.stub().callsFake(fakeGetDoc),
destroyProject: sinon.stub().resolves(),
},
} }
// Wrap streamToBuffer so that we can pass in something that it expects (in // Wrap streamToBuffer so that we can pass in something that it expects (in
// this case, a Promise) rather than a stubbed stream object // this case, a Promise) rather than a stubbed stream object
streamToBuffer = { streamToBuffer = {
streamToBuffer: async () => { promises: {
const inputStream = new Promise(resolve => { streamToBuffer: async () => {
stream.on('data', data => resolve(data)) const inputStream = new Promise(resolve => {
}) stream.on('data', data => resolve(data))
})
const value = await StreamToBuffer.streamToBuffer( const value = await StreamToBuffer.streamToBuffer(
'testProjectId', 'testProjectId',
'testDocId', 'testDocId',
inputStream inputStream
) )
return value return value
},
}, },
} }
@ -189,13 +192,9 @@ describe('DocArchiveManager', function () {
describe('archiveDoc', function () { describe('archiveDoc', function () {
it('should resolve when passed a valid document', async function () { it('should resolve when passed a valid document', async function () {
await expect(DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id)).to await expect(
.eventually.be.fulfilled DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
}) ).to.eventually.be.fulfilled
it('should fix comment ids', async function () {
await DocArchiveManager.archiveDoc(projectId, mongoDocs[1]._id)
expect(RangeManager.fixCommentIds).to.have.been.called
}) })
it('should throw an error if the doc has no lines', async function () { it('should throw an error if the doc has no lines', async function () {
@ -203,26 +202,26 @@ describe('DocArchiveManager', function () {
doc.lines = null doc.lines = null
await expect( await expect(
DocArchiveManager.archiveDoc(projectId, doc._id) DocArchiveManager.promises.archiveDoc(projectId, doc._id)
).to.eventually.be.rejectedWith('doc has no lines') ).to.eventually.be.rejectedWith('doc has no lines')
}) })
it('should add the schema version', async function () { it('should add the schema version', async function () {
await DocArchiveManager.archiveDoc(projectId, mongoDocs[1]._id) await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[1]._id)
expect(StreamUtils.ReadableString).to.have.been.calledWith( expect(StreamUtils.ReadableString).to.have.been.calledWith(
sinon.match(/"schema_v":1/) sinon.match(/"schema_v":1/)
) )
}) })
it('should calculate the hex md5 sum of the content', async function () { it('should calculate the hex md5 sum of the content', async function () {
await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id) await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
expect(Crypto.createHash).to.have.been.calledWith('md5') expect(Crypto.createHash).to.have.been.calledWith('md5')
expect(HashUpdate).to.have.been.calledWith(archivedDocJson) expect(HashUpdate).to.have.been.calledWith(archivedDocJson)
expect(HashDigest).to.have.been.calledWith('hex') expect(HashDigest).to.have.been.calledWith('hex')
}) })
it('should pass the md5 hash to the object persistor for verification', async function () { it('should pass the md5 hash to the object persistor for verification', async function () {
await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id) await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
expect(PersistorManager.sendStream).to.have.been.calledWith( expect(PersistorManager.sendStream).to.have.been.calledWith(
sinon.match.any, sinon.match.any,
@ -233,7 +232,7 @@ describe('DocArchiveManager', function () {
}) })
it('should pass the correct bucket and key to the persistor', async function () { it('should pass the correct bucket and key to the persistor', async function () {
await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id) await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
expect(PersistorManager.sendStream).to.have.been.calledWith( expect(PersistorManager.sendStream).to.have.been.calledWith(
Settings.docstore.bucket, Settings.docstore.bucket,
@ -242,7 +241,7 @@ describe('DocArchiveManager', function () {
}) })
it('should create a stream from the encoded json and send it', async function () { it('should create a stream from the encoded json and send it', async function () {
await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id) await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
expect(StreamUtils.ReadableString).to.have.been.calledWith( expect(StreamUtils.ReadableString).to.have.been.calledWith(
archivedDocJson archivedDocJson
) )
@ -254,8 +253,8 @@ describe('DocArchiveManager', function () {
}) })
it('should mark the doc as archived', async function () { it('should mark the doc as archived', async function () {
await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id) await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
expect(MongoManager.markDocAsArchived).to.have.been.calledWith( expect(MongoManager.promises.markDocAsArchived).to.have.been.calledWith(
projectId, projectId,
mongoDocs[0]._id, mongoDocs[0]._id,
mongoDocs[0].rev mongoDocs[0].rev
@ -268,8 +267,8 @@ describe('DocArchiveManager', function () {
}) })
it('should bail out early', async function () { it('should bail out early', async function () {
await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id) await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
expect(MongoManager.getDocForArchiving).to.not.have.been.called expect(MongoManager.promises.getDocForArchiving).to.not.have.been.called
}) })
}) })
@ -286,7 +285,7 @@ describe('DocArchiveManager', function () {
it('should return an error', async function () { it('should return an error', async function () {
await expect( await expect(
DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id) DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
).to.eventually.be.rejectedWith('null bytes detected') ).to.eventually.be.rejectedWith('null bytes detected')
}) })
}) })
@ -297,19 +296,21 @@ describe('DocArchiveManager', function () {
describe('when the doc is in S3', function () { describe('when the doc is in S3', function () {
beforeEach(function () { beforeEach(function () {
MongoManager.findDoc = sinon.stub().resolves({ inS3: true, rev }) MongoManager.promises.findDoc = sinon
.stub()
.resolves({ inS3: true, rev })
docId = mongoDocs[0]._id docId = mongoDocs[0]._id
lines = ['doc', 'lines'] lines = ['doc', 'lines']
rev = 123 rev = 123
}) })
it('should resolve when passed a valid document', async function () { it('should resolve when passed a valid document', async function () {
await expect(DocArchiveManager.unarchiveDoc(projectId, docId)).to await expect(DocArchiveManager.promises.unarchiveDoc(projectId, docId))
.eventually.be.fulfilled .to.eventually.be.fulfilled
}) })
it('should test md5 validity with the raw buffer', async function () { it('should test md5 validity with the raw buffer', async function () {
await DocArchiveManager.unarchiveDoc(projectId, docId) await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
expect(HashUpdate).to.have.been.calledWith( expect(HashUpdate).to.have.been.calledWith(
sinon.match.instanceOf(Buffer) sinon.match.instanceOf(Buffer)
) )
@ -318,17 +319,15 @@ describe('DocArchiveManager', function () {
it('should throw an error if the md5 does not match', async function () { it('should throw an error if the md5 does not match', async function () {
PersistorManager.getObjectMd5Hash.resolves('badf00d') PersistorManager.getObjectMd5Hash.resolves('badf00d')
await expect( await expect(
DocArchiveManager.unarchiveDoc(projectId, docId) DocArchiveManager.promises.unarchiveDoc(projectId, docId)
).to.eventually.be.rejected.and.be.instanceof(Errors.Md5MismatchError) ).to.eventually.be.rejected.and.be.instanceof(Errors.Md5MismatchError)
}) })
it('should restore the doc in Mongo', async function () { it('should restore the doc in Mongo', async function () {
await DocArchiveManager.unarchiveDoc(projectId, docId) await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
expect(MongoManager.restoreArchivedDoc).to.have.been.calledWith( expect(
projectId, MongoManager.promises.restoreArchivedDoc
docId, ).to.have.been.calledWith(projectId, docId, archivedDoc)
archivedDoc
)
}) })
describe('when archiving is not configured', function () { describe('when archiving is not configured', function () {
@ -338,15 +337,15 @@ describe('DocArchiveManager', function () {
it('should error out on archived doc', async function () { it('should error out on archived doc', async function () {
await expect( await expect(
DocArchiveManager.unarchiveDoc(projectId, docId) DocArchiveManager.promises.unarchiveDoc(projectId, docId)
).to.eventually.be.rejected.and.match( ).to.eventually.be.rejected.and.match(
/found archived doc, but archiving backend is not configured/ /found archived doc, but archiving backend is not configured/
) )
}) })
it('should return early on non-archived doc', async function () { it('should return early on non-archived doc', async function () {
MongoManager.findDoc = sinon.stub().resolves({ rev }) MongoManager.promises.findDoc = sinon.stub().resolves({ rev })
await DocArchiveManager.unarchiveDoc(projectId, docId) await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
expect(PersistorManager.getObjectMd5Hash).to.not.have.been.called expect(PersistorManager.getObjectMd5Hash).to.not.have.been.called
}) })
}) })
@ -364,12 +363,10 @@ describe('DocArchiveManager', function () {
}) })
it('should return the docs lines', async function () { it('should return the docs lines', async function () {
await DocArchiveManager.unarchiveDoc(projectId, docId) await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
expect(MongoManager.restoreArchivedDoc).to.have.been.calledWith( expect(
projectId, MongoManager.promises.restoreArchivedDoc
docId, ).to.have.been.calledWith(projectId, docId, { lines, rev })
{ lines, rev }
)
}) })
}) })
@ -388,16 +385,14 @@ describe('DocArchiveManager', function () {
}) })
it('should return the doc lines and ranges', async function () { it('should return the doc lines and ranges', async function () {
await DocArchiveManager.unarchiveDoc(projectId, docId) await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
expect(MongoManager.restoreArchivedDoc).to.have.been.calledWith( expect(
projectId, MongoManager.promises.restoreArchivedDoc
docId, ).to.have.been.calledWith(projectId, docId, {
{ lines,
lines, ranges: { mongo: 'ranges' },
ranges: { mongo: 'ranges' }, rev: 456,
rev: 456, })
}
)
}) })
}) })
@ -411,12 +406,10 @@ describe('DocArchiveManager', function () {
}) })
it('should return only the doc lines', async function () { it('should return only the doc lines', async function () {
await DocArchiveManager.unarchiveDoc(projectId, docId) await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
expect(MongoManager.restoreArchivedDoc).to.have.been.calledWith( expect(
projectId, MongoManager.promises.restoreArchivedDoc
docId, ).to.have.been.calledWith(projectId, docId, { lines, rev: 456 })
{ lines, rev: 456 }
)
}) })
}) })
@ -430,12 +423,10 @@ describe('DocArchiveManager', function () {
}) })
it('should use the rev obtained from Mongo', async function () { it('should use the rev obtained from Mongo', async function () {
await DocArchiveManager.unarchiveDoc(projectId, docId) await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
expect(MongoManager.restoreArchivedDoc).to.have.been.calledWith( expect(
projectId, MongoManager.promises.restoreArchivedDoc
docId, ).to.have.been.calledWith(projectId, docId, { lines, rev })
{ lines, rev }
)
}) })
}) })
@ -450,7 +441,7 @@ describe('DocArchiveManager', function () {
it('should throw an error', async function () { it('should throw an error', async function () {
await expect( await expect(
DocArchiveManager.unarchiveDoc(projectId, docId) DocArchiveManager.promises.unarchiveDoc(projectId, docId)
).to.eventually.be.rejectedWith( ).to.eventually.be.rejectedWith(
"I don't understand the doc format in s3" "I don't understand the doc format in s3"
) )
@ -460,8 +451,8 @@ describe('DocArchiveManager', function () {
}) })
it('should not do anything if the file is already unarchived', async function () { it('should not do anything if the file is already unarchived', async function () {
MongoManager.findDoc.resolves({ inS3: false }) MongoManager.promises.findDoc.resolves({ inS3: false })
await DocArchiveManager.unarchiveDoc(projectId, docId) await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
expect(PersistorManager.getObjectStream).not.to.have.been.called expect(PersistorManager.getObjectStream).not.to.have.been.called
}) })
@ -470,7 +461,7 @@ describe('DocArchiveManager', function () {
.stub() .stub()
.rejects(new Errors.NotFoundError()) .rejects(new Errors.NotFoundError())
await expect( await expect(
DocArchiveManager.unarchiveDoc(projectId, docId) DocArchiveManager.promises.unarchiveDoc(projectId, docId)
).to.eventually.be.rejected.and.be.instanceof(Errors.NotFoundError) ).to.eventually.be.rejected.and.be.instanceof(Errors.NotFoundError)
}) })
}) })
@ -478,11 +469,13 @@ describe('DocArchiveManager', function () {
describe('destroyProject', function () { describe('destroyProject', function () {
describe('when archiving is enabled', function () { describe('when archiving is enabled', function () {
beforeEach(async function () { beforeEach(async function () {
await DocArchiveManager.destroyProject(projectId) await DocArchiveManager.promises.destroyProject(projectId)
}) })
it('should delete the project in Mongo', function () { it('should delete the project in Mongo', function () {
expect(MongoManager.destroyProject).to.have.been.calledWith(projectId) expect(MongoManager.promises.destroyProject).to.have.been.calledWith(
projectId
)
}) })
it('should delete the project in the persistor', function () { it('should delete the project in the persistor', function () {
@ -496,11 +489,13 @@ describe('DocArchiveManager', function () {
describe('when archiving is disabled', function () { describe('when archiving is disabled', function () {
beforeEach(async function () { beforeEach(async function () {
Settings.docstore.backend = '' Settings.docstore.backend = ''
await DocArchiveManager.destroyProject(projectId) await DocArchiveManager.promises.destroyProject(projectId)
}) })
it('should delete the project in Mongo', function () { it('should delete the project in Mongo', function () {
expect(MongoManager.destroyProject).to.have.been.calledWith(projectId) expect(MongoManager.promises.destroyProject).to.have.been.calledWith(
projectId
)
}) })
it('should not delete the project in the persistor', function () { it('should not delete the project in the persistor', function () {
@ -511,35 +506,33 @@ describe('DocArchiveManager', function () {
describe('archiveAllDocs', function () { describe('archiveAllDocs', function () {
it('should resolve with valid arguments', async function () { it('should resolve with valid arguments', async function () {
await expect(DocArchiveManager.archiveAllDocs(projectId)).to.eventually.be await expect(DocArchiveManager.promises.archiveAllDocs(projectId)).to
.fulfilled .eventually.be.fulfilled
}) })
it('should archive all project docs which are not in s3', async function () { it('should archive all project docs which are not in s3', async function () {
await DocArchiveManager.archiveAllDocs(projectId) await DocArchiveManager.promises.archiveAllDocs(projectId)
// not inS3 // not inS3
expect(MongoManager.markDocAsArchived).to.have.been.calledWith( expect(MongoManager.promises.markDocAsArchived).to.have.been.calledWith(
projectId, projectId,
mongoDocs[0]._id mongoDocs[0]._id
) )
expect(MongoManager.markDocAsArchived).to.have.been.calledWith( expect(MongoManager.promises.markDocAsArchived).to.have.been.calledWith(
projectId, projectId,
mongoDocs[1]._id mongoDocs[1]._id
) )
expect(MongoManager.markDocAsArchived).to.have.been.calledWith( expect(MongoManager.promises.markDocAsArchived).to.have.been.calledWith(
projectId, projectId,
mongoDocs[4]._id mongoDocs[4]._id
) )
// inS3 // inS3
expect(MongoManager.markDocAsArchived).not.to.have.been.calledWith( expect(
projectId, MongoManager.promises.markDocAsArchived
mongoDocs[2]._id ).not.to.have.been.calledWith(projectId, mongoDocs[2]._id)
) expect(
expect(MongoManager.markDocAsArchived).not.to.have.been.calledWith( MongoManager.promises.markDocAsArchived
projectId, ).not.to.have.been.calledWith(projectId, mongoDocs[3]._id)
mongoDocs[3]._id
)
}) })
describe('when archiving is not configured', function () { describe('when archiving is not configured', function () {
@ -548,20 +541,21 @@ describe('DocArchiveManager', function () {
}) })
it('should bail out early', async function () { it('should bail out early', async function () {
await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id) await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
expect(MongoManager.getNonArchivedProjectDocIds).to.not.have.been.called expect(MongoManager.promises.getNonArchivedProjectDocIds).to.not.have
.been.called
}) })
}) })
}) })
describe('unArchiveAllDocs', function () { describe('unArchiveAllDocs', function () {
it('should resolve with valid arguments', async function () { it('should resolve with valid arguments', async function () {
await expect(DocArchiveManager.unArchiveAllDocs(projectId)).to.eventually await expect(DocArchiveManager.promises.unArchiveAllDocs(projectId)).to
.be.fulfilled .eventually.be.fulfilled
}) })
it('should unarchive all inS3 docs', async function () { it('should unarchive all inS3 docs', async function () {
await DocArchiveManager.unArchiveAllDocs(projectId) await DocArchiveManager.promises.unArchiveAllDocs(projectId)
for (const doc of archivedDocs) { for (const doc of archivedDocs) {
expect(PersistorManager.getObjectStream).to.have.been.calledWith( expect(PersistorManager.getObjectStream).to.have.been.calledWith(
@ -577,9 +571,9 @@ describe('DocArchiveManager', function () {
}) })
it('should bail out early', async function () { it('should bail out early', async function () {
await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id) await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
expect(MongoManager.getNonDeletedArchivedProjectDocs).to.not.have.been expect(MongoManager.promises.getNonDeletedArchivedProjectDocs).to.not
.called .have.been.called
}) })
}) })
}) })

View file

@ -17,22 +17,25 @@ describe('DocManager', function () {
this.version = 42 this.version = 42
this.MongoManager = { this.MongoManager = {
findDoc: sinon.stub(), promises: {
getProjectsDocs: sinon.stub(), findDoc: sinon.stub(),
patchDoc: sinon.stub().resolves(), getProjectsDocs: sinon.stub(),
upsertIntoDocCollection: sinon.stub().resolves(), patchDoc: sinon.stub().resolves(),
upsertIntoDocCollection: sinon.stub().resolves(),
},
} }
this.DocArchiveManager = { this.DocArchiveManager = {
unarchiveDoc: sinon.stub(), promises: {
unArchiveAllDocs: sinon.stub(), unarchiveDoc: sinon.stub(),
archiveDoc: sinon.stub().resolves(), unArchiveAllDocs: sinon.stub(),
archiveDoc: sinon.stub().resolves(),
},
} }
this.RangeManager = { this.RangeManager = {
jsonRangesToMongo(r) { jsonRangesToMongo(r) {
return r return r
}, },
shouldUpdateRanges: sinon.stub().returns(false), shouldUpdateRanges: sinon.stub().returns(false),
fixCommentIds: sinon.stub(),
} }
this.settings = { docstore: {} } this.settings = { docstore: {} }
@ -49,7 +52,7 @@ describe('DocManager', function () {
describe('getFullDoc', function () { describe('getFullDoc', function () {
beforeEach(function () { beforeEach(function () {
this.DocManager._getDoc = sinon.stub() this.DocManager.promises._getDoc = sinon.stub()
this.doc = { this.doc = {
_id: this.doc_id, _id: this.doc_id,
lines: ['2134'], lines: ['2134'],
@ -57,10 +60,13 @@ describe('DocManager', function () {
}) })
it('should call get doc with a quick filter', async function () { it('should call get doc with a quick filter', async function () {
this.DocManager._getDoc.resolves(this.doc) this.DocManager.promises._getDoc.resolves(this.doc)
const doc = await this.DocManager.getFullDoc(this.project_id, this.doc_id) const doc = await this.DocManager.promises.getFullDoc(
this.project_id,
this.doc_id
)
doc.should.equal(this.doc) doc.should.equal(this.doc)
this.DocManager._getDoc this.DocManager.promises._getDoc
.calledWith(this.project_id, this.doc_id, { .calledWith(this.project_id, this.doc_id, {
lines: true, lines: true,
rev: true, rev: true,
@ -73,27 +79,27 @@ describe('DocManager', function () {
}) })
it('should return error when get doc errors', async function () { it('should return error when get doc errors', async function () {
this.DocManager._getDoc.rejects(this.stubbedError) this.DocManager.promises._getDoc.rejects(this.stubbedError)
await expect( await expect(
this.DocManager.getFullDoc(this.project_id, this.doc_id) this.DocManager.promises.getFullDoc(this.project_id, this.doc_id)
).to.be.rejectedWith(this.stubbedError) ).to.be.rejectedWith(this.stubbedError)
}) })
}) })
describe('getRawDoc', function () { describe('getRawDoc', function () {
beforeEach(function () { beforeEach(function () {
this.DocManager._getDoc = sinon.stub() this.DocManager.promises._getDoc = sinon.stub()
this.doc = { lines: ['2134'] } this.doc = { lines: ['2134'] }
}) })
it('should call get doc with a quick filter', async function () { it('should call get doc with a quick filter', async function () {
this.DocManager._getDoc.resolves(this.doc) this.DocManager.promises._getDoc.resolves(this.doc)
const content = await this.DocManager.getDocLines( const doc = await this.DocManager.promises.getDocLines(
this.project_id, this.project_id,
this.doc_id this.doc_id
) )
content.should.equal(this.doc.lines.join('\n')) doc.should.equal(this.doc)
this.DocManager._getDoc this.DocManager.promises._getDoc
.calledWith(this.project_id, this.doc_id, { .calledWith(this.project_id, this.doc_id, {
lines: true, lines: true,
inS3: true, inS3: true,
@ -102,46 +108,11 @@ describe('DocManager', function () {
}) })
it('should return error when get doc errors', async function () { it('should return error when get doc errors', async function () {
this.DocManager._getDoc.rejects(this.stubbedError) this.DocManager.promises._getDoc.rejects(this.stubbedError)
await expect( await expect(
this.DocManager.getDocLines(this.project_id, this.doc_id) this.DocManager.promises.getDocLines(this.project_id, this.doc_id)
).to.be.rejectedWith(this.stubbedError) ).to.be.rejectedWith(this.stubbedError)
}) })
it('should return error when get doc does not exist', async function () {
this.DocManager._getDoc.resolves(null)
await expect(
this.DocManager.getDocLines(this.project_id, this.doc_id)
).to.be.rejectedWith(Errors.NotFoundError)
})
it('should return error when get doc has no lines', async function () {
this.DocManager._getDoc.resolves({})
await expect(
this.DocManager.getDocLines(this.project_id, this.doc_id)
).to.be.rejectedWith(Errors.DocWithoutLinesError)
})
})
describe('_getDoc', function () {
it('should return error when get doc does not exist', async function () {
this.MongoManager.findDoc.resolves(null)
await expect(
this.DocManager._getDoc(this.project_id, this.doc_id, { inS3: true })
).to.be.rejectedWith(Errors.NotFoundError)
})
it('should fix comment ids', async function () {
this.MongoManager.findDoc.resolves({
_id: this.doc_id,
ranges: {},
})
await this.DocManager._getDoc(this.project_id, this.doc_id, {
inS3: true,
ranges: true,
})
expect(this.RangeManager.fixCommentIds).to.have.been.called
})
}) })
describe('getDoc', function () { describe('getDoc', function () {
@ -157,25 +128,26 @@ describe('DocManager', function () {
describe('when using a filter', function () { describe('when using a filter', function () {
beforeEach(function () { beforeEach(function () {
this.MongoManager.findDoc.resolves(this.doc) this.MongoManager.promises.findDoc.resolves(this.doc)
}) })
it('should error if inS3 is not set to true', async function () { it('should error if inS3 is not set to true', async function () {
await expect( await expect(
this.DocManager._getDoc(this.project_id, this.doc_id, { this.DocManager.promises._getDoc(this.project_id, this.doc_id, {
inS3: false, inS3: false,
}) })
).to.be.rejected ).to.be.rejected
}) })
it('should always get inS3 even when no filter is passed', async function () { it('should always get inS3 even when no filter is passed', async function () {
await expect(this.DocManager._getDoc(this.project_id, this.doc_id)).to await expect(
.be.rejected this.DocManager.promises._getDoc(this.project_id, this.doc_id)
this.MongoManager.findDoc.called.should.equal(false) ).to.be.rejected
this.MongoManager.promises.findDoc.called.should.equal(false)
}) })
it('should not error if inS3 is set to true', async function () { it('should not error if inS3 is set to true', async function () {
await this.DocManager._getDoc(this.project_id, this.doc_id, { await this.DocManager.promises._getDoc(this.project_id, this.doc_id, {
inS3: true, inS3: true,
}) })
}) })
@ -183,8 +155,8 @@ describe('DocManager', function () {
describe('when the doc is in the doc collection', function () { describe('when the doc is in the doc collection', function () {
beforeEach(async function () { beforeEach(async function () {
this.MongoManager.findDoc.resolves(this.doc) this.MongoManager.promises.findDoc.resolves(this.doc)
this.result = await this.DocManager._getDoc( this.result = await this.DocManager.promises._getDoc(
this.project_id, this.project_id,
this.doc_id, this.doc_id,
{ version: true, inS3: true } { version: true, inS3: true }
@ -192,7 +164,7 @@ describe('DocManager', function () {
}) })
it('should get the doc from the doc collection', function () { it('should get the doc from the doc collection', function () {
this.MongoManager.findDoc this.MongoManager.promises.findDoc
.calledWith(this.project_id, this.doc_id) .calledWith(this.project_id, this.doc_id)
.should.equal(true) .should.equal(true)
}) })
@ -205,9 +177,9 @@ describe('DocManager', function () {
describe('when MongoManager.findDoc errors', function () { describe('when MongoManager.findDoc errors', function () {
it('should return the error', async function () { it('should return the error', async function () {
this.MongoManager.findDoc.rejects(this.stubbedError) this.MongoManager.promises.findDoc.rejects(this.stubbedError)
await expect( await expect(
this.DocManager._getDoc(this.project_id, this.doc_id, { this.DocManager.promises._getDoc(this.project_id, this.doc_id, {
version: true, version: true,
inS3: true, inS3: true,
}) })
@ -230,15 +202,15 @@ describe('DocManager', function () {
version: 2, version: 2,
inS3: false, inS3: false,
} }
this.MongoManager.findDoc.resolves(this.doc) this.MongoManager.promises.findDoc.resolves(this.doc)
this.DocArchiveManager.unarchiveDoc.callsFake( this.DocArchiveManager.promises.unarchiveDoc.callsFake(
async (projectId, docId) => { async (projectId, docId) => {
this.MongoManager.findDoc.resolves({ this.MongoManager.promises.findDoc.resolves({
...this.unarchivedDoc, ...this.unarchivedDoc,
}) })
} }
) )
this.result = await this.DocManager._getDoc( this.result = await this.DocManager.promises._getDoc(
this.project_id, this.project_id,
this.doc_id, this.doc_id,
{ {
@ -249,13 +221,13 @@ describe('DocManager', function () {
}) })
it('should call the DocArchive to unarchive the doc', function () { it('should call the DocArchive to unarchive the doc', function () {
this.DocArchiveManager.unarchiveDoc this.DocArchiveManager.promises.unarchiveDoc
.calledWith(this.project_id, this.doc_id) .calledWith(this.project_id, this.doc_id)
.should.equal(true) .should.equal(true)
}) })
it('should look up the doc twice', function () { it('should look up the doc twice', function () {
this.MongoManager.findDoc.calledTwice.should.equal(true) this.MongoManager.promises.findDoc.calledTwice.should.equal(true)
}) })
it('should return the doc', function () { it('should return the doc', function () {
@ -267,9 +239,9 @@ describe('DocManager', function () {
describe('when the doc does not exist in the docs collection', function () { describe('when the doc does not exist in the docs collection', function () {
it('should return a NotFoundError', async function () { it('should return a NotFoundError', async function () {
this.MongoManager.findDoc.resolves(null) this.MongoManager.promises.findDoc.resolves(null)
await expect( await expect(
this.DocManager._getDoc(this.project_id, this.doc_id, { this.DocManager.promises._getDoc(this.project_id, this.doc_id, {
version: true, version: true,
inS3: true, inS3: true,
}) })
@ -290,27 +262,23 @@ describe('DocManager', function () {
lines: ['mock-lines'], lines: ['mock-lines'],
}, },
] ]
this.MongoManager.getProjectsDocs.resolves(this.docs) this.MongoManager.promises.getProjectsDocs.resolves(this.docs)
this.DocArchiveManager.unArchiveAllDocs.resolves(this.docs) this.DocArchiveManager.promises.unArchiveAllDocs.resolves(this.docs)
this.filter = { lines: true, ranges: true } this.filter = { lines: true }
this.result = await this.DocManager.getAllNonDeletedDocs( this.result = await this.DocManager.promises.getAllNonDeletedDocs(
this.project_id, this.project_id,
this.filter this.filter
) )
}) })
it('should get the project from the database', function () { it('should get the project from the database', function () {
this.MongoManager.getProjectsDocs.should.have.been.calledWith( this.MongoManager.promises.getProjectsDocs.should.have.been.calledWith(
this.project_id, this.project_id,
{ include_deleted: false }, { include_deleted: false },
this.filter this.filter
) )
}) })
it('should fix comment ids', async function () {
expect(this.RangeManager.fixCommentIds).to.have.been.called
})
it('should return the docs', function () { it('should return the docs', function () {
expect(this.result).to.deep.equal(this.docs) expect(this.result).to.deep.equal(this.docs)
}) })
@ -318,10 +286,13 @@ describe('DocManager', function () {
describe('when there are no docs for the project', function () { describe('when there are no docs for the project', function () {
it('should return a NotFoundError', async function () { it('should return a NotFoundError', async function () {
this.MongoManager.getProjectsDocs.resolves(null) this.MongoManager.promises.getProjectsDocs.resolves(null)
this.DocArchiveManager.unArchiveAllDocs.resolves(null) this.DocArchiveManager.promises.unArchiveAllDocs.resolves(null)
await expect( await expect(
this.DocManager.getAllNonDeletedDocs(this.project_id, this.filter) this.DocManager.promises.getAllNonDeletedDocs(
this.project_id,
this.filter
)
).to.be.rejectedWith(`No docs for project ${this.project_id}`) ).to.be.rejectedWith(`No docs for project ${this.project_id}`)
}) })
}) })
@ -332,7 +303,7 @@ describe('DocManager', function () {
beforeEach(function () { beforeEach(function () {
this.lines = ['mock', 'doc', 'lines'] this.lines = ['mock', 'doc', 'lines']
this.rev = 77 this.rev = 77
this.MongoManager.findDoc.resolves({ this.MongoManager.promises.findDoc.resolves({
_id: new ObjectId(this.doc_id), _id: new ObjectId(this.doc_id),
}) })
this.meta = {} this.meta = {}
@ -340,7 +311,7 @@ describe('DocManager', function () {
describe('standard path', function () { describe('standard path', function () {
beforeEach(async function () { beforeEach(async function () {
await this.DocManager.patchDoc( await this.DocManager.promises.patchDoc(
this.project_id, this.project_id,
this.doc_id, this.doc_id,
this.meta this.meta
@ -348,14 +319,14 @@ describe('DocManager', function () {
}) })
it('should get the doc', function () { it('should get the doc', function () {
expect(this.MongoManager.findDoc).to.have.been.calledWith( expect(this.MongoManager.promises.findDoc).to.have.been.calledWith(
this.project_id, this.project_id,
this.doc_id this.doc_id
) )
}) })
it('should persist the meta', function () { it('should persist the meta', function () {
expect(this.MongoManager.patchDoc).to.have.been.calledWith( expect(this.MongoManager.promises.patchDoc).to.have.been.calledWith(
this.project_id, this.project_id,
this.doc_id, this.doc_id,
this.meta this.meta
@ -368,7 +339,7 @@ describe('DocManager', function () {
this.settings.docstore.archiveOnSoftDelete = false this.settings.docstore.archiveOnSoftDelete = false
this.meta.deleted = true this.meta.deleted = true
await this.DocManager.patchDoc( await this.DocManager.promises.patchDoc(
this.project_id, this.project_id,
this.doc_id, this.doc_id,
this.meta this.meta
@ -376,7 +347,8 @@ describe('DocManager', function () {
}) })
it('should not flush the doc out of mongo', function () { it('should not flush the doc out of mongo', function () {
expect(this.DocArchiveManager.archiveDoc).to.not.have.been.called expect(this.DocArchiveManager.promises.archiveDoc).to.not.have.been
.called
}) })
}) })
@ -384,7 +356,7 @@ describe('DocManager', function () {
beforeEach(async function () { beforeEach(async function () {
this.settings.docstore.archiveOnSoftDelete = false this.settings.docstore.archiveOnSoftDelete = false
this.meta.deleted = false this.meta.deleted = false
await this.DocManager.patchDoc( await this.DocManager.promises.patchDoc(
this.project_id, this.project_id,
this.doc_id, this.doc_id,
this.meta this.meta
@ -392,7 +364,8 @@ describe('DocManager', function () {
}) })
it('should not flush the doc out of mongo', function () { it('should not flush the doc out of mongo', function () {
expect(this.DocArchiveManager.archiveDoc).to.not.have.been.called expect(this.DocArchiveManager.promises.archiveDoc).to.not.have.been
.called
}) })
}) })
@ -404,7 +377,7 @@ describe('DocManager', function () {
describe('when the background flush succeeds', function () { describe('when the background flush succeeds', function () {
beforeEach(async function () { beforeEach(async function () {
await this.DocManager.patchDoc( await this.DocManager.promises.patchDoc(
this.project_id, this.project_id,
this.doc_id, this.doc_id,
this.meta this.meta
@ -416,18 +389,17 @@ describe('DocManager', function () {
}) })
it('should flush the doc out of mongo', function () { it('should flush the doc out of mongo', function () {
expect(this.DocArchiveManager.archiveDoc).to.have.been.calledWith( expect(
this.project_id, this.DocArchiveManager.promises.archiveDoc
this.doc_id ).to.have.been.calledWith(this.project_id, this.doc_id)
)
}) })
}) })
describe('when the background flush fails', function () { describe('when the background flush fails', function () {
beforeEach(async function () { beforeEach(async function () {
this.err = new Error('foo') this.err = new Error('foo')
this.DocArchiveManager.archiveDoc.rejects(this.err) this.DocArchiveManager.promises.archiveDoc.rejects(this.err)
await this.DocManager.patchDoc( await this.DocManager.promises.patchDoc(
this.project_id, this.project_id,
this.doc_id, this.doc_id,
this.meta this.meta
@ -450,9 +422,9 @@ describe('DocManager', function () {
describe('when the doc does not exist', function () { describe('when the doc does not exist', function () {
it('should return a NotFoundError', async function () { it('should return a NotFoundError', async function () {
this.MongoManager.findDoc.resolves(null) this.MongoManager.promises.findDoc.resolves(null)
await expect( await expect(
this.DocManager.patchDoc(this.project_id, this.doc_id, {}) this.DocManager.promises.patchDoc(this.project_id, this.doc_id, {})
).to.be.rejectedWith( ).to.be.rejectedWith(
`No such project/doc to delete: ${this.project_id}/${this.doc_id}` `No such project/doc to delete: ${this.project_id}/${this.doc_id}`
) )
@ -498,13 +470,13 @@ describe('DocManager', function () {
ranges: this.originalRanges, ranges: this.originalRanges,
} }
this.DocManager._getDoc = sinon.stub() this.DocManager.promises._getDoc = sinon.stub()
}) })
describe('when only the doc lines have changed', function () { describe('when only the doc lines have changed', function () {
beforeEach(async function () { beforeEach(async function () {
this.DocManager._getDoc = sinon.stub().resolves(this.doc) this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc)
this.result = await this.DocManager.updateDoc( this.result = await this.DocManager.promises.updateDoc(
this.project_id, this.project_id,
this.doc_id, this.doc_id,
this.newDocLines, this.newDocLines,
@ -514,7 +486,7 @@ describe('DocManager', function () {
}) })
it('should get the existing doc', function () { it('should get the existing doc', function () {
this.DocManager._getDoc this.DocManager.promises._getDoc
.calledWith(this.project_id, this.doc_id, { .calledWith(this.project_id, this.doc_id, {
version: true, version: true,
rev: true, rev: true,
@ -526,7 +498,7 @@ describe('DocManager', function () {
}) })
it('should upsert the document to the doc collection', function () { it('should upsert the document to the doc collection', function () {
this.MongoManager.upsertIntoDocCollection this.MongoManager.promises.upsertIntoDocCollection
.calledWith(this.project_id, this.doc_id, this.rev, { .calledWith(this.project_id, this.doc_id, this.rev, {
lines: this.newDocLines, lines: this.newDocLines,
}) })
@ -540,9 +512,9 @@ describe('DocManager', function () {
describe('when the doc ranges have changed', function () { describe('when the doc ranges have changed', function () {
beforeEach(async function () { beforeEach(async function () {
this.DocManager._getDoc = sinon.stub().resolves(this.doc) this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc)
this.RangeManager.shouldUpdateRanges.returns(true) this.RangeManager.shouldUpdateRanges.returns(true)
this.result = await this.DocManager.updateDoc( this.result = await this.DocManager.promises.updateDoc(
this.project_id, this.project_id,
this.doc_id, this.doc_id,
this.oldDocLines, this.oldDocLines,
@ -552,7 +524,7 @@ describe('DocManager', function () {
}) })
it('should upsert the ranges', function () { it('should upsert the ranges', function () {
this.MongoManager.upsertIntoDocCollection this.MongoManager.promises.upsertIntoDocCollection
.calledWith(this.project_id, this.doc_id, this.rev, { .calledWith(this.project_id, this.doc_id, this.rev, {
ranges: this.newRanges, ranges: this.newRanges,
}) })
@ -566,8 +538,8 @@ describe('DocManager', function () {
describe('when only the version has changed', function () { describe('when only the version has changed', function () {
beforeEach(async function () { beforeEach(async function () {
this.DocManager._getDoc = sinon.stub().resolves(this.doc) this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc)
this.result = await this.DocManager.updateDoc( this.result = await this.DocManager.promises.updateDoc(
this.project_id, this.project_id,
this.doc_id, this.doc_id,
this.oldDocLines, this.oldDocLines,
@ -577,7 +549,7 @@ describe('DocManager', function () {
}) })
it('should update the version', function () { it('should update the version', function () {
this.MongoManager.upsertIntoDocCollection.should.have.been.calledWith( this.MongoManager.promises.upsertIntoDocCollection.should.have.been.calledWith(
this.project_id, this.project_id,
this.doc_id, this.doc_id,
this.rev, this.rev,
@ -592,8 +564,8 @@ describe('DocManager', function () {
describe('when the doc has not changed at all', function () { describe('when the doc has not changed at all', function () {
beforeEach(async function () { beforeEach(async function () {
this.DocManager._getDoc = sinon.stub().resolves(this.doc) this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc)
this.result = await this.DocManager.updateDoc( this.result = await this.DocManager.promises.updateDoc(
this.project_id, this.project_id,
this.doc_id, this.doc_id,
this.oldDocLines, this.oldDocLines,
@ -603,7 +575,9 @@ describe('DocManager', function () {
}) })
it('should not update the ranges or lines or version', function () { it('should not update the ranges or lines or version', function () {
this.MongoManager.upsertIntoDocCollection.called.should.equal(false) this.MongoManager.promises.upsertIntoDocCollection.called.should.equal(
false
)
}) })
it('should return the old rev and modified == false', function () { it('should return the old rev and modified == false', function () {
@ -614,7 +588,7 @@ describe('DocManager', function () {
describe('when the version is null', function () { describe('when the version is null', function () {
it('should return an error', async function () { it('should return an error', async function () {
await expect( await expect(
this.DocManager.updateDoc( this.DocManager.promises.updateDoc(
this.project_id, this.project_id,
this.doc_id, this.doc_id,
this.newDocLines, this.newDocLines,
@ -628,7 +602,7 @@ describe('DocManager', function () {
describe('when the lines are null', function () { describe('when the lines are null', function () {
it('should return an error', async function () { it('should return an error', async function () {
await expect( await expect(
this.DocManager.updateDoc( this.DocManager.promises.updateDoc(
this.project_id, this.project_id,
this.doc_id, this.doc_id,
null, null,
@ -642,7 +616,7 @@ describe('DocManager', function () {
describe('when the ranges are null', function () { describe('when the ranges are null', function () {
it('should return an error', async function () { it('should return an error', async function () {
await expect( await expect(
this.DocManager.updateDoc( this.DocManager.promises.updateDoc(
this.project_id, this.project_id,
this.doc_id, this.doc_id,
this.newDocLines, this.newDocLines,
@ -656,9 +630,9 @@ describe('DocManager', function () {
describe('when there is a generic error getting the doc', function () { describe('when there is a generic error getting the doc', function () {
beforeEach(async function () { beforeEach(async function () {
this.error = new Error('doc could not be found') this.error = new Error('doc could not be found')
this.DocManager._getDoc = sinon.stub().rejects(this.error) this.DocManager.promises._getDoc = sinon.stub().rejects(this.error)
await expect( await expect(
this.DocManager.updateDoc( this.DocManager.promises.updateDoc(
this.project_id, this.project_id,
this.doc_id, this.doc_id,
this.newDocLines, this.newDocLines,
@ -669,15 +643,16 @@ describe('DocManager', function () {
}) })
it('should not upsert the document to the doc collection', function () { it('should not upsert the document to the doc collection', function () {
this.MongoManager.upsertIntoDocCollection.should.not.have.been.called this.MongoManager.promises.upsertIntoDocCollection.should.not.have.been
.called
}) })
}) })
describe('when the version was decremented', function () { describe('when the version was decremented', function () {
it('should return an error', async function () { it('should return an error', async function () {
this.DocManager._getDoc = sinon.stub().resolves(this.doc) this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc)
await expect( await expect(
this.DocManager.updateDoc( this.DocManager.promises.updateDoc(
this.project_id, this.project_id,
this.doc_id, this.doc_id,
this.newDocLines, this.newDocLines,
@ -690,8 +665,8 @@ describe('DocManager', function () {
describe('when the doc lines have not changed', function () { describe('when the doc lines have not changed', function () {
beforeEach(async function () { beforeEach(async function () {
this.DocManager._getDoc = sinon.stub().resolves(this.doc) this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc)
this.result = await this.DocManager.updateDoc( this.result = await this.DocManager.promises.updateDoc(
this.project_id, this.project_id,
this.doc_id, this.doc_id,
this.oldDocLines.slice(), this.oldDocLines.slice(),
@ -701,7 +676,9 @@ describe('DocManager', function () {
}) })
it('should not update the doc', function () { it('should not update the doc', function () {
this.MongoManager.upsertIntoDocCollection.called.should.equal(false) this.MongoManager.promises.upsertIntoDocCollection.called.should.equal(
false
)
}) })
it('should return the existing rev', function () { it('should return the existing rev', function () {
@ -711,8 +688,8 @@ describe('DocManager', function () {
describe('when the doc does not exist', function () { describe('when the doc does not exist', function () {
beforeEach(async function () { beforeEach(async function () {
this.DocManager._getDoc = sinon.stub().resolves(null) this.DocManager.promises._getDoc = sinon.stub().resolves(null)
this.result = await this.DocManager.updateDoc( this.result = await this.DocManager.promises.updateDoc(
this.project_id, this.project_id,
this.doc_id, this.doc_id,
this.newDocLines, this.newDocLines,
@ -722,7 +699,7 @@ describe('DocManager', function () {
}) })
it('should upsert the document to the doc collection', function () { it('should upsert the document to the doc collection', function () {
this.MongoManager.upsertIntoDocCollection.should.have.been.calledWith( this.MongoManager.promises.upsertIntoDocCollection.should.have.been.calledWith(
this.project_id, this.project_id,
this.doc_id, this.doc_id,
undefined, undefined,
@ -741,12 +718,12 @@ describe('DocManager', function () {
describe('when another update is racing', function () { describe('when another update is racing', function () {
beforeEach(async function () { beforeEach(async function () {
this.DocManager._getDoc = sinon.stub().resolves(this.doc) this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc)
this.MongoManager.upsertIntoDocCollection this.MongoManager.promises.upsertIntoDocCollection
.onFirstCall() .onFirstCall()
.rejects(new Errors.DocRevValueError()) .rejects(new Errors.DocRevValueError())
this.RangeManager.shouldUpdateRanges.returns(true) this.RangeManager.shouldUpdateRanges.returns(true)
this.result = await this.DocManager.updateDoc( this.result = await this.DocManager.promises.updateDoc(
this.project_id, this.project_id,
this.doc_id, this.doc_id,
this.newDocLines, this.newDocLines,
@ -756,7 +733,7 @@ describe('DocManager', function () {
}) })
it('should upsert the doc twice', function () { it('should upsert the doc twice', function () {
this.MongoManager.upsertIntoDocCollection.should.have.been.calledWith( this.MongoManager.promises.upsertIntoDocCollection.should.have.been.calledWith(
this.project_id, this.project_id,
this.doc_id, this.doc_id,
this.rev, this.rev,
@ -766,7 +743,8 @@ describe('DocManager', function () {
version: this.version + 1, version: this.version + 1,
} }
) )
this.MongoManager.upsertIntoDocCollection.should.have.been.calledTwice this.MongoManager.promises.upsertIntoDocCollection.should.have.been
.calledTwice
}) })
it('should return the new rev', function () { it('should return the new rev', function () {

View file

@ -14,7 +14,7 @@ describe('HttpController', function () {
max_doc_length: 2 * 1024 * 1024, max_doc_length: 2 * 1024 * 1024,
} }
this.DocArchiveManager = { this.DocArchiveManager = {
unArchiveAllDocs: sinon.stub().returns(), unArchiveAllDocs: sinon.stub().yields(),
} }
this.DocManager = {} this.DocManager = {}
this.HttpController = SandboxedModule.require(modulePath, { this.HttpController = SandboxedModule.require(modulePath, {
@ -54,13 +54,15 @@ describe('HttpController', function () {
describe('getDoc', function () { describe('getDoc', function () {
describe('without deleted docs', function () { describe('without deleted docs', function () {
beforeEach(async function () { beforeEach(function () {
this.req.params = { this.req.params = {
project_id: this.projectId, project_id: this.projectId,
doc_id: this.docId, doc_id: this.docId,
} }
this.DocManager.getFullDoc = sinon.stub().resolves(this.doc) this.DocManager.getFullDoc = sinon
await this.HttpController.getDoc(this.req, this.res, this.next) .stub()
.callsArgWith(2, null, this.doc)
this.HttpController.getDoc(this.req, this.res, this.next)
}) })
it('should get the document with the version (including deleted)', function () { it('should get the document with the version (including deleted)', function () {
@ -87,24 +89,26 @@ describe('HttpController', function () {
project_id: this.projectId, project_id: this.projectId,
doc_id: this.docId, doc_id: this.docId,
} }
this.DocManager.getFullDoc = sinon.stub().resolves(this.deletedDoc) this.DocManager.getFullDoc = sinon
.stub()
.callsArgWith(2, null, this.deletedDoc)
}) })
it('should get the doc from the doc manager', async function () { it('should get the doc from the doc manager', function () {
await this.HttpController.getDoc(this.req, this.res, this.next) this.HttpController.getDoc(this.req, this.res, this.next)
this.DocManager.getFullDoc this.DocManager.getFullDoc
.calledWith(this.projectId, this.docId) .calledWith(this.projectId, this.docId)
.should.equal(true) .should.equal(true)
}) })
it('should return 404 if the query string delete is not set ', async function () { it('should return 404 if the query string delete is not set ', function () {
await this.HttpController.getDoc(this.req, this.res, this.next) this.HttpController.getDoc(this.req, this.res, this.next)
this.res.sendStatus.calledWith(404).should.equal(true) this.res.sendStatus.calledWith(404).should.equal(true)
}) })
it('should return the doc as JSON if include_deleted is set to true', async function () { it('should return the doc as JSON if include_deleted is set to true', function () {
this.req.query.include_deleted = 'true' this.req.query.include_deleted = 'true'
await this.HttpController.getDoc(this.req, this.res, this.next) this.HttpController.getDoc(this.req, this.res, this.next)
this.res.json this.res.json
.calledWith({ .calledWith({
_id: this.docId, _id: this.docId,
@ -119,15 +123,13 @@ describe('HttpController', function () {
}) })
describe('getRawDoc', function () { describe('getRawDoc', function () {
beforeEach(async function () { beforeEach(function () {
this.req.params = { this.req.params = {
project_id: this.projectId, project_id: this.projectId,
doc_id: this.docId, doc_id: this.docId,
} }
this.DocManager.getDocLines = sinon this.DocManager.getDocLines = sinon.stub().callsArgWith(2, null, this.doc)
.stub() this.HttpController.getRawDoc(this.req, this.res, this.next)
.resolves(this.doc.lines.join('\n'))
await this.HttpController.getRawDoc(this.req, this.res, this.next)
}) })
it('should get the document without the version', function () { it('should get the document without the version', function () {
@ -152,7 +154,7 @@ describe('HttpController', function () {
describe('getAllDocs', function () { describe('getAllDocs', function () {
describe('normally', function () { describe('normally', function () {
beforeEach(async function () { beforeEach(function () {
this.req.params = { project_id: this.projectId } this.req.params = { project_id: this.projectId }
this.docs = [ this.docs = [
{ {
@ -166,8 +168,10 @@ describe('HttpController', function () {
rev: 4, rev: 4,
}, },
] ]
this.DocManager.getAllNonDeletedDocs = sinon.stub().resolves(this.docs) this.DocManager.getAllNonDeletedDocs = sinon
await this.HttpController.getAllDocs(this.req, this.res, this.next) .stub()
.callsArgWith(2, null, this.docs)
this.HttpController.getAllDocs(this.req, this.res, this.next)
}) })
it('should get all the (non-deleted) docs', function () { it('should get all the (non-deleted) docs', function () {
@ -195,7 +199,7 @@ describe('HttpController', function () {
}) })
describe('with null lines', function () { describe('with null lines', function () {
beforeEach(async function () { beforeEach(function () {
this.req.params = { project_id: this.projectId } this.req.params = { project_id: this.projectId }
this.docs = [ this.docs = [
{ {
@ -209,8 +213,10 @@ describe('HttpController', function () {
rev: 4, rev: 4,
}, },
] ]
this.DocManager.getAllNonDeletedDocs = sinon.stub().resolves(this.docs) this.DocManager.getAllNonDeletedDocs = sinon
await this.HttpController.getAllDocs(this.req, this.res, this.next) .stub()
.callsArgWith(2, null, this.docs)
this.HttpController.getAllDocs(this.req, this.res, this.next)
}) })
it('should return the doc with fallback lines', function () { it('should return the doc with fallback lines', function () {
@ -232,7 +238,7 @@ describe('HttpController', function () {
}) })
describe('with a null doc', function () { describe('with a null doc', function () {
beforeEach(async function () { beforeEach(function () {
this.req.params = { project_id: this.projectId } this.req.params = { project_id: this.projectId }
this.docs = [ this.docs = [
{ {
@ -247,8 +253,10 @@ describe('HttpController', function () {
rev: 4, rev: 4,
}, },
] ]
this.DocManager.getAllNonDeletedDocs = sinon.stub().resolves(this.docs) this.DocManager.getAllNonDeletedDocs = sinon
await this.HttpController.getAllDocs(this.req, this.res, this.next) .stub()
.callsArgWith(2, null, this.docs)
this.HttpController.getAllDocs(this.req, this.res, this.next)
}) })
it('should return the non null docs as JSON', function () { it('should return the non null docs as JSON', function () {
@ -284,7 +292,7 @@ describe('HttpController', function () {
describe('getAllRanges', function () { describe('getAllRanges', function () {
describe('normally', function () { describe('normally', function () {
beforeEach(async function () { beforeEach(function () {
this.req.params = { project_id: this.projectId } this.req.params = { project_id: this.projectId }
this.docs = [ this.docs = [
{ {
@ -296,8 +304,10 @@ describe('HttpController', function () {
ranges: { mock_ranges: 'two' }, ranges: { mock_ranges: 'two' },
}, },
] ]
this.DocManager.getAllNonDeletedDocs = sinon.stub().resolves(this.docs) this.DocManager.getAllNonDeletedDocs = sinon
await this.HttpController.getAllRanges(this.req, this.res, this.next) .stub()
.callsArgWith(2, null, this.docs)
this.HttpController.getAllRanges(this.req, this.res, this.next)
}) })
it('should get all the (non-deleted) doc ranges', function () { it('should get all the (non-deleted) doc ranges', function () {
@ -332,17 +342,16 @@ describe('HttpController', function () {
}) })
describe('when the doc lines exist and were updated', function () { describe('when the doc lines exist and were updated', function () {
beforeEach(async function () { beforeEach(function () {
this.req.body = { this.req.body = {
lines: (this.lines = ['hello', 'world']), lines: (this.lines = ['hello', 'world']),
version: (this.version = 42), version: (this.version = 42),
ranges: (this.ranges = { changes: 'mock' }), ranges: (this.ranges = { changes: 'mock' }),
} }
this.rev = 5
this.DocManager.updateDoc = sinon this.DocManager.updateDoc = sinon
.stub() .stub()
.resolves({ modified: true, rev: this.rev }) .yields(null, true, (this.rev = 5))
await this.HttpController.updateDoc(this.req, this.res, this.next) this.HttpController.updateDoc(this.req, this.res, this.next)
}) })
it('should update the document', function () { it('should update the document', function () {
@ -365,17 +374,16 @@ describe('HttpController', function () {
}) })
describe('when the doc lines exist and were not updated', function () { describe('when the doc lines exist and were not updated', function () {
beforeEach(async function () { beforeEach(function () {
this.req.body = { this.req.body = {
lines: (this.lines = ['hello', 'world']), lines: (this.lines = ['hello', 'world']),
version: (this.version = 42), version: (this.version = 42),
ranges: {}, ranges: {},
} }
this.rev = 5
this.DocManager.updateDoc = sinon this.DocManager.updateDoc = sinon
.stub() .stub()
.resolves({ modified: false, rev: this.rev }) .yields(null, false, (this.rev = 5))
await this.HttpController.updateDoc(this.req, this.res, this.next) this.HttpController.updateDoc(this.req, this.res, this.next)
}) })
it('should return a modified status', function () { it('should return a modified status', function () {
@ -386,12 +394,10 @@ describe('HttpController', function () {
}) })
describe('when the doc lines are not provided', function () { describe('when the doc lines are not provided', function () {
beforeEach(async function () { beforeEach(function () {
this.req.body = { version: 42, ranges: {} } this.req.body = { version: 42, ranges: {} }
this.DocManager.updateDoc = sinon this.DocManager.updateDoc = sinon.stub().yields(null, false)
.stub() this.HttpController.updateDoc(this.req, this.res, this.next)
.resolves({ modified: false, rev: 0 })
await this.HttpController.updateDoc(this.req, this.res, this.next)
}) })
it('should not update the document', function () { it('should not update the document', function () {
@ -404,12 +410,10 @@ describe('HttpController', function () {
}) })
describe('when the doc version are not provided', function () { describe('when the doc version are not provided', function () {
beforeEach(async function () { beforeEach(function () {
this.req.body = { version: 42, lines: ['hello world'] } this.req.body = { version: 42, lines: ['hello world'] }
this.DocManager.updateDoc = sinon this.DocManager.updateDoc = sinon.stub().yields(null, false)
.stub() this.HttpController.updateDoc(this.req, this.res, this.next)
.resolves({ modified: false, rev: 0 })
await this.HttpController.updateDoc(this.req, this.res, this.next)
}) })
it('should not update the document', function () { it('should not update the document', function () {
@ -422,12 +426,10 @@ describe('HttpController', function () {
}) })
describe('when the doc ranges is not provided', function () { describe('when the doc ranges is not provided', function () {
beforeEach(async function () { beforeEach(function () {
this.req.body = { lines: ['foo'], version: 42 } this.req.body = { lines: ['foo'], version: 42 }
this.DocManager.updateDoc = sinon this.DocManager.updateDoc = sinon.stub().yields(null, false)
.stub() this.HttpController.updateDoc(this.req, this.res, this.next)
.resolves({ modified: false, rev: 0 })
await this.HttpController.updateDoc(this.req, this.res, this.next)
}) })
it('should not update the document', function () { it('should not update the document', function () {
@ -440,20 +442,13 @@ describe('HttpController', function () {
}) })
describe('when the doc body is too large', function () { describe('when the doc body is too large', function () {
beforeEach(async function () { beforeEach(function () {
this.req.body = { this.req.body = {
lines: (this.lines = Array(2049).fill('a'.repeat(1024))), lines: (this.lines = Array(2049).fill('a'.repeat(1024))),
version: (this.version = 42), version: (this.version = 42),
ranges: (this.ranges = { changes: 'mock' }), ranges: (this.ranges = { changes: 'mock' }),
} }
this.DocManager.updateDoc = sinon this.HttpController.updateDoc(this.req, this.res, this.next)
.stub()
.resolves({ modified: false, rev: 0 })
await this.HttpController.updateDoc(this.req, this.res, this.next)
})
it('should not update the document', function () {
this.DocManager.updateDoc.called.should.equal(false)
}) })
it('should return a 413 (too large) response', function () { it('should return a 413 (too large) response', function () {
@ -467,14 +462,14 @@ describe('HttpController', function () {
}) })
describe('patchDoc', function () { describe('patchDoc', function () {
beforeEach(async function () { beforeEach(function () {
this.req.params = { this.req.params = {
project_id: this.projectId, project_id: this.projectId,
doc_id: this.docId, doc_id: this.docId,
} }
this.req.body = { name: 'foo.tex' } this.req.body = { name: 'foo.tex' }
this.DocManager.patchDoc = sinon.stub().resolves() this.DocManager.patchDoc = sinon.stub().yields(null)
await this.HttpController.patchDoc(this.req, this.res, this.next) this.HttpController.patchDoc(this.req, this.res, this.next)
}) })
it('should delete the document', function () { it('should delete the document', function () {
@ -489,11 +484,11 @@ describe('HttpController', function () {
}) })
describe('with an invalid payload', function () { describe('with an invalid payload', function () {
beforeEach(async function () { beforeEach(function () {
this.req.body = { cannot: 'happen' } this.req.body = { cannot: 'happen' }
this.DocManager.patchDoc = sinon.stub().resolves() this.DocManager.patchDoc = sinon.stub().yields(null)
await this.HttpController.patchDoc(this.req, this.res, this.next) this.HttpController.patchDoc(this.req, this.res, this.next)
}) })
it('should log a message', function () { it('should log a message', function () {
@ -514,10 +509,10 @@ describe('HttpController', function () {
}) })
describe('archiveAllDocs', function () { describe('archiveAllDocs', function () {
beforeEach(async function () { beforeEach(function () {
this.req.params = { project_id: this.projectId } this.req.params = { project_id: this.projectId }
this.DocArchiveManager.archiveAllDocs = sinon.stub().resolves() this.DocArchiveManager.archiveAllDocs = sinon.stub().callsArg(1)
await this.HttpController.archiveAllDocs(this.req, this.res, this.next) this.HttpController.archiveAllDocs(this.req, this.res, this.next)
}) })
it('should archive the project', function () { it('should archive the project', function () {
@ -537,12 +532,9 @@ describe('HttpController', function () {
}) })
describe('on success', function () { describe('on success', function () {
beforeEach(async function () { beforeEach(function (done) {
await this.HttpController.unArchiveAllDocs( this.res.sendStatus.callsFake(() => done())
this.req, this.HttpController.unArchiveAllDocs(this.req, this.res, this.next)
this.res,
this.next
)
}) })
it('returns a 200', function () { it('returns a 200', function () {
@ -551,15 +543,12 @@ describe('HttpController', function () {
}) })
describe("when the archived rev doesn't match", function () { describe("when the archived rev doesn't match", function () {
beforeEach(async function () { beforeEach(function (done) {
this.DocArchiveManager.unArchiveAllDocs.rejects( this.res.sendStatus.callsFake(() => done())
this.DocArchiveManager.unArchiveAllDocs.yields(
new Errors.DocRevValueError('bad rev') new Errors.DocRevValueError('bad rev')
) )
await this.HttpController.unArchiveAllDocs( this.HttpController.unArchiveAllDocs(this.req, this.res, this.next)
this.req,
this.res,
this.next
)
}) })
it('returns a 409', function () { it('returns a 409', function () {
@ -569,10 +558,10 @@ describe('HttpController', function () {
}) })
describe('destroyProject', function () { describe('destroyProject', function () {
beforeEach(async function () { beforeEach(function () {
this.req.params = { project_id: this.projectId } this.req.params = { project_id: this.projectId }
this.DocArchiveManager.destroyProject = sinon.stub().resolves() this.DocArchiveManager.destroyProject = sinon.stub().callsArg(1)
await this.HttpController.destroyProject(this.req, this.res, this.next) this.HttpController.destroyProject(this.req, this.res, this.next)
}) })
it('should destroy the docs', function () { it('should destroy the docs', function () {

View file

@ -41,7 +41,7 @@ describe('MongoManager', function () {
this.doc = { name: 'mock-doc' } this.doc = { name: 'mock-doc' }
this.db.docs.findOne = sinon.stub().resolves(this.doc) this.db.docs.findOne = sinon.stub().resolves(this.doc)
this.filter = { lines: true } this.filter = { lines: true }
this.result = await this.MongoManager.findDoc( this.result = await this.MongoManager.promises.findDoc(
this.projectId, this.projectId,
this.docId, this.docId,
this.filter this.filter
@ -70,7 +70,11 @@ describe('MongoManager', function () {
describe('patchDoc', function () { describe('patchDoc', function () {
beforeEach(async function () { beforeEach(async function () {
this.meta = { name: 'foo.tex' } this.meta = { name: 'foo.tex' }
await this.MongoManager.patchDoc(this.projectId, this.docId, this.meta) await this.MongoManager.promises.patchDoc(
this.projectId,
this.docId,
this.meta
)
}) })
it('should pass the parameter along', function () { it('should pass the parameter along', function () {
@ -100,7 +104,7 @@ describe('MongoManager', function () {
describe('with included_deleted = false', function () { describe('with included_deleted = false', function () {
beforeEach(async function () { beforeEach(async function () {
this.result = await this.MongoManager.getProjectsDocs( this.result = await this.MongoManager.promises.getProjectsDocs(
this.projectId, this.projectId,
{ include_deleted: false }, { include_deleted: false },
this.filter this.filter
@ -128,7 +132,7 @@ describe('MongoManager', function () {
describe('with included_deleted = true', function () { describe('with included_deleted = true', function () {
beforeEach(async function () { beforeEach(async function () {
this.result = await this.MongoManager.getProjectsDocs( this.result = await this.MongoManager.promises.getProjectsDocs(
this.projectId, this.projectId,
{ include_deleted: true }, { include_deleted: true },
this.filter this.filter
@ -163,7 +167,7 @@ describe('MongoManager', function () {
this.db.docs.find = sinon.stub().returns({ this.db.docs.find = sinon.stub().returns({
toArray: sinon.stub().resolves([this.doc1, this.doc2, this.doc3]), toArray: sinon.stub().resolves([this.doc1, this.doc2, this.doc3]),
}) })
this.result = await this.MongoManager.getProjectsDeletedDocs( this.result = await this.MongoManager.promises.getProjectsDeletedDocs(
this.projectId, this.projectId,
this.filter this.filter
) )
@ -199,7 +203,7 @@ describe('MongoManager', function () {
}) })
it('should upsert the document', async function () { it('should upsert the document', async function () {
await this.MongoManager.upsertIntoDocCollection( await this.MongoManager.promises.upsertIntoDocCollection(
this.projectId, this.projectId,
this.docId, this.docId,
this.oldRev, this.oldRev,
@ -219,7 +223,7 @@ describe('MongoManager', function () {
it('should handle update error', async function () { it('should handle update error', async function () {
this.db.docs.updateOne.rejects(this.stubbedErr) this.db.docs.updateOne.rejects(this.stubbedErr)
await expect( await expect(
this.MongoManager.upsertIntoDocCollection( this.MongoManager.promises.upsertIntoDocCollection(
this.projectId, this.projectId,
this.docId, this.docId,
this.rev, this.rev,
@ -231,7 +235,7 @@ describe('MongoManager', function () {
}) })
it('should insert without a previous rev', async function () { it('should insert without a previous rev', async function () {
await this.MongoManager.upsertIntoDocCollection( await this.MongoManager.promises.upsertIntoDocCollection(
this.projectId, this.projectId,
this.docId, this.docId,
null, null,
@ -250,7 +254,7 @@ describe('MongoManager', function () {
it('should handle generic insert error', async function () { it('should handle generic insert error', async function () {
this.db.docs.insertOne.rejects(this.stubbedErr) this.db.docs.insertOne.rejects(this.stubbedErr)
await expect( await expect(
this.MongoManager.upsertIntoDocCollection( this.MongoManager.promises.upsertIntoDocCollection(
this.projectId, this.projectId,
this.docId, this.docId,
null, null,
@ -262,7 +266,7 @@ describe('MongoManager', function () {
it('should handle duplicate insert error', async function () { it('should handle duplicate insert error', async function () {
this.db.docs.insertOne.rejects({ code: 11000 }) this.db.docs.insertOne.rejects({ code: 11000 })
await expect( await expect(
this.MongoManager.upsertIntoDocCollection( this.MongoManager.promises.upsertIntoDocCollection(
this.projectId, this.projectId,
this.docId, this.docId,
null, null,
@ -276,7 +280,7 @@ describe('MongoManager', function () {
beforeEach(async function () { beforeEach(async function () {
this.projectId = new ObjectId() this.projectId = new ObjectId()
this.db.docs.deleteMany = sinon.stub().resolves() this.db.docs.deleteMany = sinon.stub().resolves()
await this.MongoManager.destroyProject(this.projectId) await this.MongoManager.promises.destroyProject(this.projectId)
}) })
it('should destroy all docs', function () { it('should destroy all docs', function () {
@ -293,13 +297,13 @@ describe('MongoManager', function () {
it('should not error when the rev has not changed', async function () { it('should not error when the rev has not changed', async function () {
this.db.docs.findOne = sinon.stub().resolves({ rev: 1 }) this.db.docs.findOne = sinon.stub().resolves({ rev: 1 })
await this.MongoManager.checkRevUnchanged(this.doc) await this.MongoManager.promises.checkRevUnchanged(this.doc)
}) })
it('should return an error when the rev has changed', async function () { it('should return an error when the rev has changed', async function () {
this.db.docs.findOne = sinon.stub().resolves({ rev: 2 }) this.db.docs.findOne = sinon.stub().resolves({ rev: 2 })
await expect( await expect(
this.MongoManager.checkRevUnchanged(this.doc) this.MongoManager.promises.checkRevUnchanged(this.doc)
).to.be.rejectedWith(Errors.DocModifiedError) ).to.be.rejectedWith(Errors.DocModifiedError)
}) })
@ -307,14 +311,14 @@ describe('MongoManager', function () {
this.db.docs.findOne = sinon.stub().resolves({ rev: 2 }) this.db.docs.findOne = sinon.stub().resolves({ rev: 2 })
this.doc = { _id: new ObjectId(), name: 'mock-doc', rev: NaN } this.doc = { _id: new ObjectId(), name: 'mock-doc', rev: NaN }
await expect( await expect(
this.MongoManager.checkRevUnchanged(this.doc) this.MongoManager.promises.checkRevUnchanged(this.doc)
).to.be.rejectedWith(Errors.DocRevValueError) ).to.be.rejectedWith(Errors.DocRevValueError)
}) })
it('should return a value error if checked doc rev is NaN', async function () { it('should return a value error if checked doc rev is NaN', async function () {
this.db.docs.findOne = sinon.stub().resolves({ rev: NaN }) this.db.docs.findOne = sinon.stub().resolves({ rev: NaN })
await expect( await expect(
this.MongoManager.checkRevUnchanged(this.doc) this.MongoManager.promises.checkRevUnchanged(this.doc)
).to.be.rejectedWith(Errors.DocRevValueError) ).to.be.rejectedWith(Errors.DocRevValueError)
}) })
}) })
@ -330,7 +334,7 @@ describe('MongoManager', function () {
describe('complete doc', function () { describe('complete doc', function () {
beforeEach(async function () { beforeEach(async function () {
await this.MongoManager.restoreArchivedDoc( await this.MongoManager.promises.restoreArchivedDoc(
this.projectId, this.projectId,
this.docId, this.docId,
this.archivedDoc this.archivedDoc
@ -360,7 +364,7 @@ describe('MongoManager', function () {
describe('without ranges', function () { describe('without ranges', function () {
beforeEach(async function () { beforeEach(async function () {
delete this.archivedDoc.ranges delete this.archivedDoc.ranges
await this.MongoManager.restoreArchivedDoc( await this.MongoManager.promises.restoreArchivedDoc(
this.projectId, this.projectId,
this.docId, this.docId,
this.archivedDoc this.archivedDoc
@ -391,7 +395,7 @@ describe('MongoManager', function () {
it('throws a DocRevValueError', async function () { it('throws a DocRevValueError', async function () {
this.db.docs.updateOne.resolves({ matchedCount: 0 }) this.db.docs.updateOne.resolves({ matchedCount: 0 })
await expect( await expect(
this.MongoManager.restoreArchivedDoc( this.MongoManager.promises.restoreArchivedDoc(
this.projectId, this.projectId,
this.docId, this.docId,
this.archivedDoc this.archivedDoc

View file

@ -30,7 +30,7 @@ describe('RangeManager', function () {
}) })
describe('jsonRangesToMongo', function () { describe('jsonRangesToMongo', function () {
it('should convert ObjectIds and dates to proper objects and fix comment id', function () { it('should convert ObjectIds and dates to proper objects', function () {
const changeId = new ObjectId().toString() const changeId = new ObjectId().toString()
const commentId = new ObjectId().toString() const commentId = new ObjectId().toString()
const userId = new ObjectId().toString() const userId = new ObjectId().toString()
@ -66,7 +66,7 @@ describe('RangeManager', function () {
], ],
comments: [ comments: [
{ {
id: new ObjectId(threadId), id: new ObjectId(commentId),
op: { c: 'foo', p: 3, t: new ObjectId(threadId) }, op: { c: 'foo', p: 3, t: new ObjectId(threadId) },
}, },
], ],
@ -110,6 +110,7 @@ describe('RangeManager', function () {
return it('should be consistent when transformed through json -> mongo -> json', function () { return it('should be consistent when transformed through json -> mongo -> json', function () {
const changeId = new ObjectId().toString() const changeId = new ObjectId().toString()
const commentId = new ObjectId().toString()
const userId = new ObjectId().toString() const userId = new ObjectId().toString()
const threadId = new ObjectId().toString() const threadId = new ObjectId().toString()
const ts = new Date().toJSON() const ts = new Date().toJSON()
@ -126,7 +127,7 @@ describe('RangeManager', function () {
], ],
comments: [ comments: [
{ {
id: threadId, id: commentId,
op: { c: 'foo', p: 3, t: threadId }, op: { c: 'foo', p: 3, t: threadId },
}, },
], ],
@ -141,7 +142,6 @@ describe('RangeManager', function () {
return describe('shouldUpdateRanges', function () { return describe('shouldUpdateRanges', function () {
beforeEach(function () { beforeEach(function () {
const threadId = new ObjectId()
this.ranges = { this.ranges = {
changes: [ changes: [
{ {
@ -155,8 +155,8 @@ describe('RangeManager', function () {
], ],
comments: [ comments: [
{ {
id: threadId, id: new ObjectId(),
op: { c: 'foo', p: 3, t: threadId }, op: { c: 'foo', p: 3, t: new ObjectId() },
}, },
], ],
} }

View file

@ -1,4 +1,3 @@
const OError = require('@overleaf/o-error')
const DMP = require('diff-match-patch') const DMP = require('diff-match-patch')
const { TextOperation } = require('overleaf-editor-core') const { TextOperation } = require('overleaf-editor-core')
const dmp = new DMP() const dmp = new DMP()
@ -39,62 +38,23 @@ module.exports = {
return ops return ops
}, },
/** diffAsHistoryV1EditOperation(before, after) {
* @param {import("overleaf-editor-core").StringFileData} file const diffs = dmp.diff_main(before, after)
* @param {string} after
* @return {TextOperation}
*/
diffAsHistoryOTEditOperation(file, after) {
const beforeWithoutTrackedDeletes = file.getContent({
filterTrackedDeletes: true,
})
const diffs = dmp.diff_main(beforeWithoutTrackedDeletes, after)
dmp.diff_cleanupSemantic(diffs) dmp.diff_cleanupSemantic(diffs)
const trackedChanges = file.trackedChanges.asSorted()
let nextTc = trackedChanges.shift()
const op = new TextOperation() const op = new TextOperation()
for (const diff of diffs) { for (const diff of diffs) {
let [type, content] = diff const [type, content] = diff
if (type === this.ADDED) { if (type === this.ADDED) {
op.insert(content) op.insert(content)
} else if (type === this.REMOVED || type === this.UNCHANGED) { } else if (type === this.REMOVED) {
while (op.baseLength + content.length > nextTc?.range.start) { op.remove(content.length)
if (nextTc.tracking.type === 'delete') { } else if (type === this.UNCHANGED) {
const untilRange = nextTc.range.start - op.baseLength op.retain(content.length)
if (type === this.REMOVED) {
op.remove(untilRange)
} else if (type === this.UNCHANGED) {
op.retain(untilRange)
}
op.retain(nextTc.range.end - nextTc.range.start)
content = content.slice(untilRange)
}
nextTc = trackedChanges.shift()
}
if (type === this.REMOVED) {
op.remove(content.length)
} else if (type === this.UNCHANGED) {
op.retain(content.length)
}
} else { } else {
throw new Error('Unknown type') throw new Error('Unknown type')
} }
} }
while (nextTc) {
if (
nextTc.tracking.type !== 'delete' ||
nextTc.range.start !== op.baseLength
) {
throw new OError(
'StringFileData.trackedChanges out of sync: unexpected range after end of diff',
{ nextTc, baseLength: op.baseLength }
)
}
op.retain(nextTc.range.end - nextTc.range.start)
nextTc = trackedChanges.shift()
}
return op return op
}, },
} }

View file

@ -194,8 +194,9 @@ const DocumentManager = {
let op let op
if (type === 'history-ot') { if (type === 'history-ot') {
const file = StringFileData.fromRaw(oldLines) const file = StringFileData.fromRaw(oldLines)
const operation = DiffCodec.diffAsHistoryOTEditOperation( const operation = DiffCodec.diffAsHistoryV1EditOperation(
file, // TODO(24596): tc support for history-ot
file.getContent({ filterTrackedDeletes: true }),
newLines.join('\n') newLines.join('\n')
) )
if (operation.isNoop()) { if (operation.isNoop()) {
@ -535,6 +536,11 @@ const DocumentManager = {
if (opts.historyRangesMigration) { if (opts.historyRangesMigration) {
historyRangesSupport = opts.historyRangesMigration === 'forwards' historyRangesSupport = opts.historyRangesMigration === 'forwards'
} }
if (!Array.isArray(lines)) {
const file = StringFileData.fromRaw(lines)
// TODO(24596): tc support for history-ot
lines = file.getLines()
}
await ProjectHistoryRedisManager.promises.queueResyncDocContent( await ProjectHistoryRedisManager.promises.queueResyncDocContent(
projectId, projectId,

View file

@ -62,7 +62,6 @@ const HistoryManager = {
// record updates for project history // record updates for project history
if ( if (
HistoryManager.shouldFlushHistoryOps( HistoryManager.shouldFlushHistoryOps(
projectId,
projectOpsLength, projectOpsLength,
ops.length, ops.length,
HistoryManager.FLUSH_PROJECT_EVERY_N_OPS HistoryManager.FLUSH_PROJECT_EVERY_N_OPS
@ -78,8 +77,7 @@ const HistoryManager = {
} }
}, },
shouldFlushHistoryOps(projectId, length, opsLength, threshold) { shouldFlushHistoryOps(length, opsLength, threshold) {
if (Settings.shortHistoryQueues.includes(projectId)) return true
if (!length) { if (!length) {
return false return false
} // don't flush unless we know the length } // don't flush unless we know the length

View file

@ -28,19 +28,4 @@ module.exports = {
// since we didn't hit the limit in the loop, the document is within the allowed length // since we didn't hit the limit in the loop, the document is within the allowed length
return false return false
}, },
/**
* @param {StringFileRawData} raw
* @param {number} maxDocLength
*/
stringFileDataContentIsTooLarge(raw, maxDocLength) {
let n = raw.content.length
if (n <= maxDocLength) return false // definitely under the limit, no need to calculate the total size
for (const tc of raw.trackedChanges ?? []) {
if (tc.tracking.type !== 'delete') continue
n -= tc.range.length
if (n <= maxDocLength) return false // under the limit now, no need to calculate the exact size
}
return true
},
} }

View file

@ -8,14 +8,13 @@ const rclient = require('@overleaf/redis-wrapper').createClient(
) )
const logger = require('@overleaf/logger') const logger = require('@overleaf/logger')
const metrics = require('./Metrics') const metrics = require('./Metrics')
const { docIsTooLarge, stringFileDataContentIsTooLarge } = require('./Limits') const { docIsTooLarge } = require('./Limits')
const { addTrackedDeletesToContent, extractOriginOrSource } = require('./Utils') const { addTrackedDeletesToContent, extractOriginOrSource } = require('./Utils')
const HistoryConversions = require('./HistoryConversions') const HistoryConversions = require('./HistoryConversions')
const OError = require('@overleaf/o-error') const OError = require('@overleaf/o-error')
/** /**
* @import { Ranges } from './types' * @import { Ranges } from './types'
* @import { StringFileRawData } from 'overleaf-editor-core/lib/types'
*/ */
const ProjectHistoryRedisManager = { const ProjectHistoryRedisManager = {
@ -181,7 +180,7 @@ const ProjectHistoryRedisManager = {
* @param {string} projectId * @param {string} projectId
* @param {string} projectHistoryId * @param {string} projectHistoryId
* @param {string} docId * @param {string} docId
* @param {string[] | StringFileRawData} lines * @param {string[]} lines
* @param {Ranges} ranges * @param {Ranges} ranges
* @param {string[]} resolvedCommentIds * @param {string[]} resolvedCommentIds
* @param {number} version * @param {number} version
@ -205,8 +204,13 @@ const ProjectHistoryRedisManager = {
'queue doc content resync' 'queue doc content resync'
) )
let content = lines.join('\n')
if (historyRangesSupport) {
content = addTrackedDeletesToContent(content, ranges.changes ?? [])
}
const projectUpdate = { const projectUpdate = {
resyncDocContent: { version }, resyncDocContent: { content, version },
projectHistoryId, projectHistoryId,
path: pathname, path: pathname,
doc: docId, doc: docId,
@ -215,38 +219,17 @@ const ProjectHistoryRedisManager = {
}, },
} }
let content = '' if (historyRangesSupport) {
if (Array.isArray(lines)) { projectUpdate.resyncDocContent.ranges =
content = lines.join('\n') HistoryConversions.toHistoryRanges(ranges)
if (historyRangesSupport) { projectUpdate.resyncDocContent.resolvedCommentIds = resolvedCommentIds
content = addTrackedDeletesToContent(content, ranges.changes ?? [])
projectUpdate.resyncDocContent.ranges =
HistoryConversions.toHistoryRanges(ranges)
projectUpdate.resyncDocContent.resolvedCommentIds = resolvedCommentIds
}
} else {
content = lines.content
projectUpdate.resyncDocContent.historyOTRanges = {
comments: lines.comments,
trackedChanges: lines.trackedChanges,
}
} }
projectUpdate.resyncDocContent.content = content
const jsonUpdate = JSON.stringify(projectUpdate) const jsonUpdate = JSON.stringify(projectUpdate)
// Do an optimised size check on the docLines using the serialised // Do an optimised size check on the docLines using the serialised
// project update length as an upper bound // project update length as an upper bound
const sizeBound = jsonUpdate.length const sizeBound = jsonUpdate.length
if (Array.isArray(lines)) { if (docIsTooLarge(sizeBound, lines, Settings.max_doc_length)) {
if (docIsTooLarge(sizeBound, lines, Settings.max_doc_length)) {
throw new OError(
'blocking resync doc content insert into project history queue: doc is too large',
{ projectId, docId, docSize: sizeBound }
)
}
} else if (
stringFileDataContentIsTooLarge(lines, Settings.max_doc_length)
) {
throw new OError( throw new OError(
'blocking resync doc content insert into project history queue: doc is too large', 'blocking resync doc content insert into project history queue: doc is too large',
{ projectId, docId, docSize: sizeBound } { projectId, docId, docSize: sizeBound }

View file

@ -317,7 +317,6 @@ function updateProjectWithLocks(
} }
if ( if (
HistoryManager.shouldFlushHistoryOps( HistoryManager.shouldFlushHistoryOps(
projectId,
projectOpsLength, projectOpsLength,
updates.length, updates.length,
HistoryManager.FLUSH_PROJECT_EVERY_N_OPS HistoryManager.FLUSH_PROJECT_EVERY_N_OPS

View file

@ -184,8 +184,4 @@ module.exports = {
smoothingOffset: process.env.SMOOTHING_OFFSET || 1000, // milliseconds smoothingOffset: process.env.SMOOTHING_OFFSET || 1000, // milliseconds
gracefulShutdownDelayInMs: gracefulShutdownDelayInMs:
parseInt(process.env.GRACEFUL_SHUTDOWN_DELAY_SECONDS ?? '10', 10) * 1000, parseInt(process.env.GRACEFUL_SHUTDOWN_DELAY_SECONDS ?? '10', 10) * 1000,
shortHistoryQueues: (process.env.SHORT_HISTORY_QUEUES || '')
.split(',')
.filter(s => !!s),
} }

View file

@ -28,15 +28,12 @@ services:
MOCHA_GREP: ${MOCHA_GREP} MOCHA_GREP: ${MOCHA_GREP}
NODE_ENV: test NODE_ENV: test
NODE_OPTIONS: "--unhandled-rejections=strict" NODE_OPTIONS: "--unhandled-rejections=strict"
volumes:
- ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it
depends_on: depends_on:
mongo: mongo:
condition: service_started condition: service_started
redis: redis:
condition: service_healthy condition: service_healthy
user: node user: node
entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 --
command: npm run test:acceptance command: npm run test:acceptance
@ -48,7 +45,7 @@ services:
command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs . command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs .
user: root user: root
redis: redis:
image: redis:7.4.3 image: redis
healthcheck: healthcheck:
test: ping="$$(redis-cli ping)" && [ "$$ping" = 'PONG' ] test: ping="$$(redis-cli ping)" && [ "$$ping" = 'PONG' ]
interval: 1s interval: 1s

View file

@ -26,7 +26,6 @@ services:
- .:/overleaf/services/document-updater - .:/overleaf/services/document-updater
- ../../node_modules:/overleaf/node_modules - ../../node_modules:/overleaf/node_modules
- ../../libraries:/overleaf/libraries - ../../libraries:/overleaf/libraries
- ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it
working_dir: /overleaf/services/document-updater working_dir: /overleaf/services/document-updater
environment: environment:
ELASTIC_SEARCH_DSN: es:9200 ELASTIC_SEARCH_DSN: es:9200
@ -46,11 +45,10 @@ services:
condition: service_started condition: service_started
redis: redis:
condition: service_healthy condition: service_healthy
entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 --
command: npm run --silent test:acceptance command: npm run --silent test:acceptance
redis: redis:
image: redis:7.4.3 image: redis
healthcheck: healthcheck:
test: ping=$$(redis-cli ping) && [ "$$ping" = 'PONG' ] test: ping=$$(redis-cli ping) && [ "$$ping" = 'PONG' ]
interval: 1s interval: 1s

View file

@ -15,7 +15,6 @@ const request = require('requestretry').defaults({
retryDelay: 10, retryDelay: 10,
}) })
const ONLY_PROJECT_ID = process.env.ONLY_PROJECT_ID
const AUTO_FIX_VERSION_MISMATCH = const AUTO_FIX_VERSION_MISMATCH =
process.env.AUTO_FIX_VERSION_MISMATCH === 'true' process.env.AUTO_FIX_VERSION_MISMATCH === 'true'
const AUTO_FIX_PARTIALLY_DELETED_DOC_METADATA = const AUTO_FIX_PARTIALLY_DELETED_DOC_METADATA =
@ -320,12 +319,10 @@ async function processProject(projectId) {
* @return {Promise<{perIterationOutOfSync: number, done: boolean}>} * @return {Promise<{perIterationOutOfSync: number, done: boolean}>}
*/ */
async function scanOnce(processed, outOfSync) { async function scanOnce(processed, outOfSync) {
const projectIds = ONLY_PROJECT_ID const projectIds = await ProjectFlusher.promises.flushAllProjects({
? [ONLY_PROJECT_ID] limit: LIMIT,
: await ProjectFlusher.promises.flushAllProjects({ dryRun: true,
limit: LIMIT, })
dryRun: true,
})
let perIterationOutOfSync = 0 let perIterationOutOfSync = 0
for (const projectId of projectIds) { for (const projectId of projectIds) {

View file

@ -686,285 +686,4 @@ describe('Setting a document', function () {
}) })
}) })
}) })
describe('with track changes (history-ot)', function () {
const lines = ['one', 'one and a half', 'two', 'three']
const userId = DocUpdaterClient.randomId()
const ts = new Date().toISOString()
beforeEach(function (done) {
numberOfReceivedUpdates = 0
this.newLines = ['one', 'two', 'three']
this.project_id = DocUpdaterClient.randomId()
this.doc_id = DocUpdaterClient.randomId()
this.historyOTUpdate = {
doc: this.doc_id,
op: [
{
textOperation: [
4,
{
r: 'one and a half\n'.length,
tracking: {
type: 'delete',
userId,
ts,
},
},
9,
],
},
],
v: this.version,
meta: { source: 'random-publicId' },
}
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines,
version: this.version,
otMigrationStage: 1,
})
DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, error => {
if (error) {
throw error
}
DocUpdaterClient.sendUpdate(
this.project_id,
this.doc_id,
this.historyOTUpdate,
error => {
if (error) {
throw error
}
DocUpdaterClient.waitForPendingUpdates(
this.project_id,
this.doc_id,
done
)
}
)
})
})
afterEach(function () {
MockProjectHistoryApi.flushProject.resetHistory()
MockWebApi.setDocument.resetHistory()
})
it('should record tracked changes', function (done) {
docUpdaterRedis.get(
Keys.docLines({ doc_id: this.doc_id }),
(error, data) => {
if (error) {
throw error
}
expect(JSON.parse(data)).to.deep.equal({
content: lines.join('\n'),
trackedChanges: [
{
range: {
pos: 4,
length: 15,
},
tracking: {
ts,
type: 'delete',
userId,
},
},
],
})
done()
}
)
})
it('should apply the change', function (done) {
DocUpdaterClient.getDoc(
this.project_id,
this.doc_id,
(error, res, data) => {
if (error) {
throw error
}
expect(data.lines).to.deep.equal(this.newLines)
done()
}
)
})
const cases = [
{
name: 'when resetting the content',
lines,
want: {
content: 'one\none and a half\none and a half\ntwo\nthree',
trackedChanges: [
{
range: {
pos: 'one and a half\n'.length + 4,
length: 15,
},
tracking: {
ts,
type: 'delete',
userId,
},
},
],
},
},
{
name: 'when adding content before a tracked delete',
lines: ['one', 'INSERT', 'two', 'three'],
want: {
content: 'one\nINSERT\none and a half\ntwo\nthree',
trackedChanges: [
{
range: {
pos: 'INSERT\n'.length + 4,
length: 15,
},
tracking: {
ts,
type: 'delete',
userId,
},
},
],
},
},
{
name: 'when adding content after a tracked delete',
lines: ['one', 'two', 'INSERT', 'three'],
want: {
content: 'one\none and a half\ntwo\nINSERT\nthree',
trackedChanges: [
{
range: {
pos: 4,
length: 15,
},
tracking: {
ts,
type: 'delete',
userId,
},
},
],
},
},
{
name: 'when deleting content before a tracked delete',
lines: ['two', 'three'],
want: {
content: 'one and a half\ntwo\nthree',
trackedChanges: [
{
range: {
pos: 0,
length: 15,
},
tracking: {
ts,
type: 'delete',
userId,
},
},
],
},
},
{
name: 'when deleting content after a tracked delete',
lines: ['one', 'two'],
want: {
content: 'one\none and a half\ntwo',
trackedChanges: [
{
range: {
pos: 4,
length: 15,
},
tracking: {
ts,
type: 'delete',
userId,
},
},
],
},
},
{
name: 'when deleting content immediately after a tracked delete',
lines: ['one', 'three'],
want: {
content: 'one\none and a half\nthree',
trackedChanges: [
{
range: {
pos: 4,
length: 15,
},
tracking: {
ts,
type: 'delete',
userId,
},
},
],
},
},
{
name: 'when deleting content across a tracked delete',
lines: ['onethree'],
want: {
content: 'oneone and a half\nthree',
trackedChanges: [
{
range: {
pos: 3,
length: 15,
},
tracking: {
ts,
type: 'delete',
userId,
},
},
],
},
},
]
for (const { name, lines, want } of cases) {
describe(name, function () {
beforeEach(function (done) {
DocUpdaterClient.setDocLines(
this.project_id,
this.doc_id,
lines,
this.source,
userId,
false,
(error, res, body) => {
if (error) {
return done(error)
}
this.statusCode = res.statusCode
this.body = body
done()
}
)
})
it('should update accordingly', function (done) {
docUpdaterRedis.get(
Keys.docLines({ doc_id: this.doc_id }),
(error, data) => {
if (error) {
throw error
}
expect(JSON.parse(data)).to.deep.equal(want)
done()
}
)
})
})
}
})
}) })

View file

@ -14,7 +14,6 @@ describe('HistoryManager', function () {
requires: { requires: {
request: (this.request = {}), request: (this.request = {}),
'@overleaf/settings': (this.Settings = { '@overleaf/settings': (this.Settings = {
shortHistoryQueues: [],
apis: { apis: {
project_history: { project_history: {
url: 'http://project_history.example.com', url: 'http://project_history.example.com',
@ -119,7 +118,7 @@ describe('HistoryManager', function () {
beforeEach(function () { beforeEach(function () {
this.HistoryManager.shouldFlushHistoryOps = sinon.stub() this.HistoryManager.shouldFlushHistoryOps = sinon.stub()
this.HistoryManager.shouldFlushHistoryOps this.HistoryManager.shouldFlushHistoryOps
.withArgs(this.project_id, this.project_ops_length) .withArgs(this.project_ops_length)
.returns(true) .returns(true)
this.HistoryManager.recordAndFlushHistoryOps( this.HistoryManager.recordAndFlushHistoryOps(
@ -140,7 +139,7 @@ describe('HistoryManager', function () {
beforeEach(function () { beforeEach(function () {
this.HistoryManager.shouldFlushHistoryOps = sinon.stub() this.HistoryManager.shouldFlushHistoryOps = sinon.stub()
this.HistoryManager.shouldFlushHistoryOps this.HistoryManager.shouldFlushHistoryOps
.withArgs(this.project_id, this.project_ops_length) .withArgs(this.project_ops_length)
.returns(false) .returns(false)
this.HistoryManager.recordAndFlushHistoryOps( this.HistoryManager.recordAndFlushHistoryOps(
@ -158,7 +157,6 @@ describe('HistoryManager', function () {
describe('shouldFlushHistoryOps', function () { describe('shouldFlushHistoryOps', function () {
it('should return false if the number of ops is not known', function () { it('should return false if the number of ops is not known', function () {
this.HistoryManager.shouldFlushHistoryOps( this.HistoryManager.shouldFlushHistoryOps(
this.project_id,
null, null,
['a', 'b', 'c'].length, ['a', 'b', 'c'].length,
1 1
@ -170,7 +168,6 @@ describe('HistoryManager', function () {
// Previously we were on 11 ops // Previously we were on 11 ops
// We didn't pass over a multiple of 5 // We didn't pass over a multiple of 5
this.HistoryManager.shouldFlushHistoryOps( this.HistoryManager.shouldFlushHistoryOps(
this.project_id,
14, 14,
['a', 'b', 'c'].length, ['a', 'b', 'c'].length,
5 5
@ -181,7 +178,6 @@ describe('HistoryManager', function () {
// Previously we were on 12 ops // Previously we were on 12 ops
// We've reached a new multiple of 5 // We've reached a new multiple of 5
this.HistoryManager.shouldFlushHistoryOps( this.HistoryManager.shouldFlushHistoryOps(
this.project_id,
15, 15,
['a', 'b', 'c'].length, ['a', 'b', 'c'].length,
5 5
@ -193,22 +189,11 @@ describe('HistoryManager', function () {
// Previously we were on 16 ops // Previously we were on 16 ops
// We didn't pass over a multiple of 5 // We didn't pass over a multiple of 5
this.HistoryManager.shouldFlushHistoryOps( this.HistoryManager.shouldFlushHistoryOps(
this.project_id,
17, 17,
['a', 'b', 'c'].length, ['a', 'b', 'c'].length,
5 5
).should.equal(true) ).should.equal(true)
}) })
it('should return true if the project has a short queue', function () {
this.Settings.shortHistoryQueues = [this.project_id]
this.HistoryManager.shouldFlushHistoryOps(
this.project_id,
14,
['a', 'b', 'c'].length,
5
).should.equal(true)
})
}) })
}) })

View file

@ -81,88 +81,4 @@ describe('Limits', function () {
}) })
}) })
}) })
describe('stringFileDataContentIsTooLarge', function () {
it('should handle small docs', function () {
expect(
this.Limits.stringFileDataContentIsTooLarge({ content: '' }, 123)
).to.equal(false)
})
it('should handle docs at the limit', function () {
expect(
this.Limits.stringFileDataContentIsTooLarge(
{ content: 'x'.repeat(123) },
123
)
).to.equal(false)
})
it('should handle docs above the limit', function () {
expect(
this.Limits.stringFileDataContentIsTooLarge(
{ content: 'x'.repeat(123 + 1) },
123
)
).to.equal(true)
})
it('should handle docs above the limit and below with tracked-deletes removed', function () {
expect(
this.Limits.stringFileDataContentIsTooLarge(
{
content: 'x'.repeat(123 + 1),
trackedChanges: [
{
range: { pos: 1, length: 1 },
tracking: {
type: 'delete',
ts: '2025-06-16T14:31:44.910Z',
userId: 'user-id',
},
},
],
},
123
)
).to.equal(false)
})
it('should handle docs above the limit and above with tracked-deletes removed', function () {
expect(
this.Limits.stringFileDataContentIsTooLarge(
{
content: 'x'.repeat(123 + 2),
trackedChanges: [
{
range: { pos: 1, length: 1 },
tracking: {
type: 'delete',
ts: '2025-06-16T14:31:44.910Z',
userId: 'user-id',
},
},
],
},
123
)
).to.equal(true)
})
it('should handle docs above the limit and with tracked-inserts', function () {
expect(
this.Limits.stringFileDataContentIsTooLarge(
{
content: 'x'.repeat(123 + 1),
trackedChanges: [
{
range: { pos: 1, length: 1 },
tracking: {
type: 'insert',
ts: '2025-06-16T14:31:44.910Z',
userId: 'user-id',
},
},
],
},
123
)
).to.equal(true)
})
})
}) })

View file

@ -15,7 +15,6 @@ describe('ProjectHistoryRedisManager', function () {
this.Limits = { this.Limits = {
docIsTooLarge: sinon.stub().returns(false), docIsTooLarge: sinon.stub().returns(false),
stringFileDataContentIsTooLarge: sinon.stub().returns(false),
} }
this.ProjectHistoryRedisManager = SandboxedModule.require(modulePath, { this.ProjectHistoryRedisManager = SandboxedModule.require(modulePath, {
@ -62,18 +61,22 @@ describe('ProjectHistoryRedisManager', function () {
}) })
it('should queue an update', function () { it('should queue an update', function () {
this.multi.rpush.should.have.been.calledWithExactly( this.multi.rpush
`ProjectHistory:Ops:${this.project_id}`, .calledWithExactly(
this.ops[0], `ProjectHistory:Ops:${this.project_id}`,
this.ops[1] this.ops[0],
) this.ops[1]
)
.should.equal(true)
}) })
it('should set the queue timestamp if not present', function () { it('should set the queue timestamp if not present', function () {
this.multi.setnx.should.have.been.calledWithExactly( this.multi.setnx
`ProjectHistory:FirstOpTimestamp:${this.project_id}`, .calledWithExactly(
Date.now() `ProjectHistory:FirstOpTimestamp:${this.project_id}`,
) Date.now()
)
.should.equal(true)
}) })
}) })
@ -115,10 +118,9 @@ describe('ProjectHistoryRedisManager', function () {
file: this.file_id, file: this.file_id,
} }
this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly( this.ProjectHistoryRedisManager.promises.queueOps
this.project_id, .calledWithExactly(this.project_id, JSON.stringify(update))
JSON.stringify(update) .should.equal(true)
)
}) })
}) })
@ -164,10 +166,9 @@ describe('ProjectHistoryRedisManager', function () {
doc: this.doc_id, doc: this.doc_id,
} }
this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly( this.ProjectHistoryRedisManager.promises.queueOps
this.project_id, .calledWithExactly(this.project_id, JSON.stringify(update))
JSON.stringify(update) .should.equal(true)
)
}) })
it('should queue an update with file metadata', async function () { it('should queue an update with file metadata', async function () {
@ -349,10 +350,9 @@ describe('ProjectHistoryRedisManager', function () {
doc: this.doc_id, doc: this.doc_id,
} }
this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly( this.ProjectHistoryRedisManager.promises.queueOps
this.project_id, .calledWithExactly(this.project_id, JSON.stringify(update))
JSON.stringify(update) .should.equal(true)
)
}) })
it('should not forward ranges if history ranges support is undefined', async function () { it('should not forward ranges if history ranges support is undefined', async function () {
@ -402,10 +402,9 @@ describe('ProjectHistoryRedisManager', function () {
doc: this.doc_id, doc: this.doc_id,
} }
this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly( this.ProjectHistoryRedisManager.promises.queueOps
this.project_id, .calledWithExactly(this.project_id, JSON.stringify(update))
JSON.stringify(update) .should.equal(true)
)
}) })
it('should pass "false" as the createdBlob field if not provided', async function () { it('should pass "false" as the createdBlob field if not provided', async function () {
@ -433,10 +432,9 @@ describe('ProjectHistoryRedisManager', function () {
doc: this.doc_id, doc: this.doc_id,
} }
this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly( this.ProjectHistoryRedisManager.promises.queueOps
this.project_id, .calledWithExactly(this.project_id, JSON.stringify(update))
JSON.stringify(update) .should.equal(true)
)
}) })
it('should pass through the value of the createdBlob field', async function () { it('should pass through the value of the createdBlob field', async function () {
@ -465,10 +463,9 @@ describe('ProjectHistoryRedisManager', function () {
doc: this.doc_id, doc: this.doc_id,
} }
this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly( this.ProjectHistoryRedisManager.promises.queueOps
this.project_id, .calledWithExactly(this.project_id, JSON.stringify(update))
JSON.stringify(update) .should.equal(true)
)
}) })
}) })
@ -496,8 +493,8 @@ describe('ProjectHistoryRedisManager', function () {
beforeEach(async function () { beforeEach(async function () {
this.update = { this.update = {
resyncDocContent: { resyncDocContent: {
version: this.version,
content: 'one\ntwo', content: 'one\ntwo',
version: this.version,
}, },
projectHistoryId: this.projectHistoryId, projectHistoryId: this.projectHistoryId,
path: this.pathname, path: this.pathname,
@ -519,18 +516,19 @@ describe('ProjectHistoryRedisManager', function () {
}) })
it('should check if the doc is too large', function () { it('should check if the doc is too large', function () {
this.Limits.docIsTooLarge.should.have.been.calledWith( this.Limits.docIsTooLarge
JSON.stringify(this.update).length, .calledWith(
this.lines, JSON.stringify(this.update).length,
this.settings.max_doc_length this.lines,
) this.settings.max_doc_length
)
.should.equal(true)
}) })
it('should queue an update', function () { it('should queue an update', function () {
this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly( this.ProjectHistoryRedisManager.promises.queueOps
this.project_id, .calledWithExactly(this.project_id, JSON.stringify(this.update))
JSON.stringify(this.update) .should.equal(true)
)
}) })
}) })
@ -553,8 +551,9 @@ describe('ProjectHistoryRedisManager', function () {
}) })
it('should not queue an update if the doc is too large', function () { it('should not queue an update if the doc is too large', function () {
this.ProjectHistoryRedisManager.promises.queueOps.should.not.have.been this.ProjectHistoryRedisManager.promises.queueOps.called.should.equal(
.called false
)
}) })
}) })
@ -562,10 +561,10 @@ describe('ProjectHistoryRedisManager', function () {
beforeEach(async function () { beforeEach(async function () {
this.update = { this.update = {
resyncDocContent: { resyncDocContent: {
content: 'onedeleted\ntwo',
version: this.version, version: this.version,
ranges: this.ranges, ranges: this.ranges,
resolvedCommentIds: this.resolvedCommentIds, resolvedCommentIds: this.resolvedCommentIds,
content: 'onedeleted\ntwo',
}, },
projectHistoryId: this.projectHistoryId, projectHistoryId: this.projectHistoryId,
path: this.pathname, path: this.pathname,
@ -602,76 +601,9 @@ describe('ProjectHistoryRedisManager', function () {
}) })
it('should queue an update', function () { it('should queue an update', function () {
this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly( this.ProjectHistoryRedisManager.promises.queueOps
this.project_id, .calledWithExactly(this.project_id, JSON.stringify(this.update))
JSON.stringify(this.update) .should.equal(true)
)
})
})
describe('history-ot', function () {
beforeEach(async function () {
this.lines = {
content: 'onedeleted\ntwo',
comments: [{ id: 'id1', ranges: [{ pos: 0, length: 3 }] }],
trackedChanges: [
{
range: { pos: 3, length: 7 },
tracking: {
type: 'delete',
userId: 'user-id',
ts: '2025-06-16T14:31:44.910Z',
},
},
],
}
this.update = {
resyncDocContent: {
version: this.version,
historyOTRanges: {
comments: this.lines.comments,
trackedChanges: this.lines.trackedChanges,
},
content: this.lines.content,
},
projectHistoryId: this.projectHistoryId,
path: this.pathname,
doc: this.doc_id,
meta: { ts: new Date() },
}
await this.ProjectHistoryRedisManager.promises.queueResyncDocContent(
this.project_id,
this.projectHistoryId,
this.doc_id,
this.lines,
this.ranges,
this.resolvedCommentIds,
this.version,
this.pathname,
true
)
})
it('should include tracked deletes in the update', function () {
this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly(
this.project_id,
JSON.stringify(this.update)
)
})
it('should check the doc length without tracked deletes', function () {
this.Limits.stringFileDataContentIsTooLarge.should.have.been.calledWith(
this.lines,
this.settings.max_doc_length
)
})
it('should queue an update', function () {
this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly(
this.project_id,
JSON.stringify(this.update)
)
}) })
}) })
}) })

View file

@ -1,17 +1,11 @@
# Build the a8m/envsubst binary, as it supports default values, # Dockerfile for git-bridge
# which the gnu envsubst (from gettext-base) does not.
FROM golang:1.24.3-alpine AS envsubst_builder
WORKDIR /build
RUN go install github.com/a8m/envsubst/cmd/envsubst@latest
FROM maven:3-amazoncorretto-21-debian AS base FROM maven:3-amazoncorretto-21-debian AS base
RUN apt-get update && apt-get install -y make git sqlite3 \ RUN apt-get update && apt-get install -y make git sqlite3 \
&& rm -rf /var/lib/apt/lists && rm -rf /var/lib/apt/lists
COPY --from=envsubst_builder /go/bin/envsubst /opt/envsubst COPY vendor/envsubst /opt/envsubst
RUN chmod +x /opt/envsubst RUN chmod +x /opt/envsubst
RUN useradd --create-home node RUN useradd --create-home node
@ -39,7 +33,7 @@ RUN adduser -D node
COPY --from=builder /git-bridge.jar / COPY --from=builder /git-bridge.jar /
COPY --from=envsubst_builder /go/bin/envsubst /opt/envsubst COPY vendor/envsubst /opt/envsubst
RUN chmod +x /opt/envsubst RUN chmod +x /opt/envsubst
COPY conf/envsubst_template.json envsubst_template.json COPY conf/envsubst_template.json envsubst_template.json

View file

@ -18,8 +18,8 @@
<jmock.junit4.version>2.8.4</jmock.junit4.version> <jmock.junit4.version>2.8.4</jmock.junit4.version>
<jetty.servlet.version>9.4.57.v20241219</jetty.servlet.version> <jetty.servlet.version>9.4.57.v20241219</jetty.servlet.version>
<gson.version>2.9.0</gson.version> <gson.version>2.9.0</gson.version>
<async.http.client.version>3.0.2</async.http.client.version> <async.http.client.version>3.0.1</async.http.client.version>
<jgit.version>6.10.1.202505221210-r</jgit.version> <jgit.version>6.6.1.202309021850-r</jgit.version>
<sqlite.jdbc.version>3.41.2.2</sqlite.jdbc.version> <sqlite.jdbc.version>3.41.2.2</sqlite.jdbc.version>
<joda.time.version>2.9.9</joda.time.version> <joda.time.version>2.9.9</joda.time.version>
<google.oauth.client.version>1.37.0</google.oauth.client.version> <google.oauth.client.version>1.37.0</google.oauth.client.version>

BIN
services/git-bridge/vendor/envsubst vendored Executable file

Binary file not shown.

View file

@ -1,76 +0,0 @@
const crypto = require('node:crypto')
class Rollout {
constructor(config) {
// The history buffer level is used to determine whether to queue changes
// in Redis or persist them directly to the chunk store.
// If defaults to 0 (no queuing) if not set.
this.historyBufferLevel = config.has('historyBufferLevel')
? parseInt(config.get('historyBufferLevel'), 10)
: 0
// The forcePersistBuffer flag will ensure the buffer is fully persisted before
// any persist operation. Set this to true if you want to make the persisted-version
// in Redis match the endVersion of the latest chunk. This should be set to true
// when downgrading from a history buffer level that queues changes in Redis
// without persisting them immediately.
this.forcePersistBuffer = config.has('forcePersistBuffer')
? config.get('forcePersistBuffer') === 'true'
: false
// Support gradual rollout of the next history buffer level
// with a percentage of projects using it.
this.nextHistoryBufferLevel = config.has('nextHistoryBufferLevel')
? parseInt(config.get('nextHistoryBufferLevel'), 10)
: null
this.nextHistoryBufferLevelRolloutPercentage = config.has(
'nextHistoryBufferLevelRolloutPercentage'
)
? parseInt(config.get('nextHistoryBufferLevelRolloutPercentage'), 10)
: 0
}
report(logger) {
logger.info(
{
historyBufferLevel: this.historyBufferLevel,
forcePersistBuffer: this.forcePersistBuffer,
nextHistoryBufferLevel: this.nextHistoryBufferLevel,
nextHistoryBufferLevelRolloutPercentage:
this.nextHistoryBufferLevelRolloutPercentage,
},
this.historyBufferLevel > 0 || this.forcePersistBuffer
? 'using history buffer'
: 'history buffer disabled'
)
}
/**
* Get the history buffer level for a project.
* @param {string} projectId
* @returns {Object} - An object containing the history buffer level and force persist buffer flag.
* @property {number} historyBufferLevel - The history buffer level to use for processing changes.
* @property {boolean} forcePersistBuffer - If true, forces the buffer to be persisted before any operation.
*/
getHistoryBufferLevelOptions(projectId) {
if (
this.nextHistoryBufferLevel > this.historyBufferLevel &&
this.nextHistoryBufferLevelRolloutPercentage > 0
) {
const hash = crypto.createHash('sha1').update(projectId).digest('hex')
const percentage = parseInt(hash.slice(0, 8), 16) % 100
// If the project is in the rollout percentage, we use the next history buffer level.
if (percentage < this.nextHistoryBufferLevelRolloutPercentage) {
return {
historyBufferLevel: this.nextHistoryBufferLevel,
forcePersistBuffer: this.forcePersistBuffer,
}
}
}
return {
historyBufferLevel: this.historyBufferLevel,
forcePersistBuffer: this.forcePersistBuffer,
}
}
}
module.exports = Rollout

View file

@ -2,7 +2,6 @@
'use strict' 'use strict'
const config = require('config')
const { expressify } = require('@overleaf/promise-utils') const { expressify } = require('@overleaf/promise-utils')
const HTTPStatus = require('http-status') const HTTPStatus = require('http-status')
@ -22,15 +21,10 @@ const BatchBlobStore = storage.BatchBlobStore
const BlobStore = storage.BlobStore const BlobStore = storage.BlobStore
const chunkStore = storage.chunkStore const chunkStore = storage.chunkStore
const HashCheckBlobStore = storage.HashCheckBlobStore const HashCheckBlobStore = storage.HashCheckBlobStore
const commitChanges = storage.commitChanges const persistChanges = storage.persistChanges
const persistBuffer = storage.persistBuffer
const InvalidChangeError = storage.InvalidChangeError const InvalidChangeError = storage.InvalidChangeError
const render = require('./render') const render = require('./render')
const Rollout = require('../app/rollout')
const rollout = new Rollout(config)
rollout.report(logger) // display the rollout configuration in the logs
async function importSnapshot(req, res) { async function importSnapshot(req, res) {
const projectId = req.swagger.params.project_id.value const projectId = req.swagger.params.project_id.value
@ -41,7 +35,6 @@ async function importSnapshot(req, res) {
try { try {
snapshot = Snapshot.fromRaw(rawSnapshot) snapshot = Snapshot.fromRaw(rawSnapshot)
} catch (err) { } catch (err) {
logger.warn({ err, projectId }, 'failed to import snapshot')
return render.unprocessableEntity(res) return render.unprocessableEntity(res)
} }
@ -50,7 +43,6 @@ async function importSnapshot(req, res) {
historyId = await chunkStore.initializeProject(projectId, snapshot) historyId = await chunkStore.initializeProject(projectId, snapshot)
} catch (err) { } catch (err) {
if (err instanceof chunkStore.AlreadyInitialized) { if (err instanceof chunkStore.AlreadyInitialized) {
logger.warn({ err, projectId }, 'already initialized')
return render.conflict(res) return render.conflict(res)
} else { } else {
throw err throw err
@ -116,12 +108,7 @@ async function importChanges(req, res, next) {
let result let result
try { try {
const { historyBufferLevel, forcePersistBuffer } = result = await persistChanges(projectId, changes, limits, endVersion)
rollout.getHistoryBufferLevelOptions(projectId)
result = await commitChanges(projectId, changes, limits, endVersion, {
historyBufferLevel,
forcePersistBuffer,
})
} catch (err) { } catch (err) {
if ( if (
err instanceof Chunk.ConflictingEndVersion || err instanceof Chunk.ConflictingEndVersion ||
@ -154,29 +141,5 @@ async function importChanges(req, res, next) {
} }
} }
async function flushChanges(req, res, next) {
const projectId = req.swagger.params.project_id.value
// Use the same limits importChanges, since these are passed to persistChanges
const farFuture = new Date()
farFuture.setTime(farFuture.getTime() + 7 * 24 * 3600 * 1000)
const limits = {
maxChanges: 0,
minChangeTimestamp: farFuture,
maxChangeTimestamp: farFuture,
autoResync: true,
}
try {
await persistBuffer(projectId, limits)
res.status(HTTPStatus.OK).end()
} catch (err) {
if (err instanceof Chunk.NotFoundError) {
render.notFound(res)
} else {
throw err
}
}
}
exports.importSnapshot = expressify(importSnapshot) exports.importSnapshot = expressify(importSnapshot)
exports.importChanges = expressify(importChanges) exports.importChanges = expressify(importChanges)
exports.flushChanges = expressify(flushChanges)

View file

@ -34,7 +34,6 @@ async function initializeProject(req, res, next) {
res.status(HTTPStatus.OK).json({ projectId }) res.status(HTTPStatus.OK).json({ projectId })
} catch (err) { } catch (err) {
if (err instanceof chunkStore.AlreadyInitialized) { if (err instanceof chunkStore.AlreadyInitialized) {
logger.warn({ err, projectId }, 'failed to initialize')
render.conflict(res) render.conflict(res)
} else { } else {
throw err throw err
@ -243,15 +242,11 @@ async function createProjectBlob(req, res, next) {
const sizeLimit = new StreamSizeLimit(maxUploadSize) const sizeLimit = new StreamSizeLimit(maxUploadSize)
await pipeline(req, sizeLimit, fs.createWriteStream(tmpPath)) await pipeline(req, sizeLimit, fs.createWriteStream(tmpPath))
if (sizeLimit.sizeLimitExceeded) { if (sizeLimit.sizeLimitExceeded) {
logger.warn(
{ projectId, expectedHash, maxUploadSize },
'blob exceeds size threshold'
)
return render.requestEntityTooLarge(res) return render.requestEntityTooLarge(res)
} }
const hash = await blobHash.fromFile(tmpPath) const hash = await blobHash.fromFile(tmpPath)
if (hash !== expectedHash) { if (hash !== expectedHash) {
logger.warn({ projectId, hash, expectedHash }, 'Hash mismatch') logger.debug({ hash, expectedHash }, 'Hash mismatch')
return render.conflict(res, 'File hash mismatch') return render.conflict(res, 'File hash mismatch')
} }
@ -348,10 +343,6 @@ async function copyProjectBlob(req, res, next) {
targetBlobStore.getBlob(blobHash), targetBlobStore.getBlob(blobHash),
]) ])
if (!sourceBlob) { if (!sourceBlob) {
logger.warn(
{ sourceProjectId, targetProjectId, blobHash },
'missing source blob when copying across projects'
)
return render.notFound(res) return render.notFound(res)
} }
// Exit early if the blob exists in the target project. // Exit early if the blob exists in the target project.

View file

@ -139,45 +139,9 @@ const getChanges = {
], ],
} }
const flushChanges = {
'x-swagger-router-controller': 'project_import',
operationId: 'flushChanges',
tags: ['ProjectImport'],
description: 'Flush project changes from buffer to the chunk store.',
parameters: [
{
name: 'project_id',
in: 'path',
description: 'project id',
required: true,
type: 'string',
},
],
responses: {
200: {
description: 'Success',
schema: {
$ref: '#/definitions/Project',
},
},
404: {
description: 'Not Found',
schema: {
$ref: '#/definitions/Error',
},
},
},
security: [
{
basic: [],
},
],
}
exports.paths = { exports.paths = {
'/projects/{project_id}/import': { post: importSnapshot }, '/projects/{project_id}/import': { post: importSnapshot },
'/projects/{project_id}/legacy_import': { post: importSnapshot }, '/projects/{project_id}/legacy_import': { post: importSnapshot },
'/projects/{project_id}/changes': { get: getChanges, post: importChanges }, '/projects/{project_id}/changes': { get: getChanges, post: importChanges },
'/projects/{project_id}/legacy_changes': { post: importChanges }, '/projects/{project_id}/legacy_changes': { post: importChanges },
'/projects/{project_id}/flush': { post: flushChanges },
} }

View file

@ -100,13 +100,11 @@ function setupErrorHandling() {
}) })
} }
if (err.code === 'ENUM_MISMATCH') { if (err.code === 'ENUM_MISMATCH') {
logger.warn({ err, projectId }, err.message)
return res.status(HTTPStatus.UNPROCESSABLE_ENTITY).json({ return res.status(HTTPStatus.UNPROCESSABLE_ENTITY).json({
message: 'invalid enum value: ' + err.paramName, message: 'invalid enum value: ' + err.paramName,
}) })
} }
if (err.code === 'REQUIRED') { if (err.code === 'REQUIRED') {
logger.warn({ err, projectId }, err.message)
return res.status(HTTPStatus.UNPROCESSABLE_ENTITY).json({ return res.status(HTTPStatus.UNPROCESSABLE_ENTITY).json({
message: err.message, message: err.message,
}) })

View file

@ -84,10 +84,6 @@
"maxFileUploadSize": "MAX_FILE_UPLOAD_SIZE", "maxFileUploadSize": "MAX_FILE_UPLOAD_SIZE",
"httpsOnly": "HTTPS_ONLY", "httpsOnly": "HTTPS_ONLY",
"httpRequestTimeout": "HTTP_REQUEST_TIMEOUT", "httpRequestTimeout": "HTTP_REQUEST_TIMEOUT",
"historyBufferLevel": "HISTORY_BUFFER_LEVEL",
"forcePersistBuffer": "FORCE_PERSIST_BUFFER",
"nextHistoryBufferLevel": "NEXT_HISTORY_BUFFER_LEVEL",
"nextHistoryBufferLevelRolloutPercentage": "NEXT_HISTORY_BUFFER_LEVEL_ROLLOUT_PERCENTAGE",
"redis": { "redis": {
"queue": { "queue": {
"host": "QUEUES_REDIS_HOST", "host": "QUEUES_REDIS_HOST",
@ -104,9 +100,5 @@
"password": "REDIS_PASSWORD", "password": "REDIS_PASSWORD",
"port": "REDIS_PORT" "port": "REDIS_PORT"
} }
},
"projectHistory": {
"host": "PROJECT_HISTORY_HOST",
"port": "PROJECT_HISTORY_PORT"
} }
} }

View file

@ -39,8 +39,5 @@
"databasePoolMin": "2", "databasePoolMin": "2",
"databasePoolMax": "10", "databasePoolMax": "10",
"httpsOnly": "false", "httpsOnly": "false",
"httpRequestTimeout": "300000", "httpRequestTimeout": "300000"
"projectHistory": {
"port": "3054"
}
} }

Some files were not shown because too many files have changed in this diff Show more