Compare commits

..

5 commits

590 changed files with 6217 additions and 21217 deletions

View file

@ -1,12 +1,3 @@
---
name: Bug report
about: Report a bug
title: ''
labels: type:bug
assignees: ''
---
<!--
Note: If you are using www.overleaf.com and have a problem,

View file

@ -25,10 +25,10 @@ services:
env_file:
- dev.env
environment:
- DOCKER_RUNNER=true
- TEXLIVE_IMAGE=texlive-full # docker build texlive -t texlive-full
- SANDBOXED_COMPILES=true
- SANDBOXED_COMPILES_HOST_DIR_COMPILES=${PWD}/compiles
- SANDBOXED_COMPILES_HOST_DIR_OUTPUT=${PWD}/output
- COMPILES_HOST_DIR=${PWD}/compiles
- OUTPUT_HOST_DIR=${PWD}/output
user: root
volumes:
- ${PWD}/compiles:/overleaf/services/clsi/compiles

View file

@ -1,6 +1,6 @@
access-token-encryptor
--dependencies=None
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
--docker-repos=gcr.io/overleaf-ops
--env-add=
--env-pass-through=
--esmock-loader=False

View file

@ -1,6 +1,6 @@
fetch-utils
--dependencies=None
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
--docker-repos=gcr.io/overleaf-ops
--env-add=
--env-pass-through=
--esmock-loader=False

View file

@ -1,6 +1,6 @@
logger
--dependencies=None
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
--docker-repos=gcr.io/overleaf-ops
--env-add=
--env-pass-through=
--esmock-loader=False

View file

@ -1,6 +1,6 @@
metrics
--dependencies=None
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
--docker-repos=gcr.io/overleaf-ops
--env-add=
--env-pass-through=
--esmock-loader=False

View file

@ -1,6 +1,6 @@
mongo-utils
--dependencies=None
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
--docker-repos=gcr.io/overleaf-ops
--env-add=
--env-pass-through=
--esmock-loader=False

View file

@ -1,6 +1,6 @@
o-error
--dependencies=None
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
--docker-repos=gcr.io/overleaf-ops
--env-add=
--env-pass-through=
--esmock-loader=False

View file

@ -1,6 +1,6 @@
object-persistor
--dependencies=None
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
--docker-repos=gcr.io/overleaf-ops
--env-add=
--env-pass-through=
--esmock-loader=False

View file

@ -1,6 +1,6 @@
overleaf-editor-core
--dependencies=None
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
--docker-repos=gcr.io/overleaf-ops
--env-add=
--env-pass-through=
--esmock-loader=False

View file

@ -1,7 +1,7 @@
// @ts-check
/**
* @import { ClearTrackingPropsRawData, TrackingDirective } from '../types'
* @import { ClearTrackingPropsRawData } from '../types'
*/
class ClearTrackingProps {
@ -11,27 +11,12 @@ class ClearTrackingProps {
/**
* @param {any} other
* @returns {other is ClearTrackingProps}
* @returns {boolean}
*/
equals(other) {
return other instanceof ClearTrackingProps
}
/**
* @param {TrackingDirective} other
* @returns {other is ClearTrackingProps}
*/
canMergeWith(other) {
return other instanceof ClearTrackingProps
}
/**
* @param {TrackingDirective} other
*/
mergeWith(other) {
return this
}
/**
* @returns {ClearTrackingPropsRawData}
*/

View file

@ -11,7 +11,7 @@ const EditOperation = require('../operation/edit_operation')
const EditOperationBuilder = require('../operation/edit_operation_builder')
/**
* @import { BlobStore, ReadonlyBlobStore, RangesBlob, RawHashFileData, RawLazyStringFileData } from '../types'
* @import { BlobStore, ReadonlyBlobStore, RangesBlob, RawFileData, RawLazyStringFileData } from '../types'
*/
class LazyStringFileData extends FileData {
@ -159,11 +159,11 @@ class LazyStringFileData extends FileData {
/** @inheritdoc
* @param {BlobStore} blobStore
* @return {Promise<RawHashFileData>}
* @return {Promise<RawFileData>}
*/
async store(blobStore) {
if (this.operations.length === 0) {
/** @type RawHashFileData */
/** @type RawFileData */
const raw = { hash: this.hash }
if (this.rangesHash) {
raw.rangesHash = this.rangesHash
@ -171,11 +171,9 @@ class LazyStringFileData extends FileData {
return raw
}
const eager = await this.toEager(blobStore)
const raw = await eager.store(blobStore)
this.hash = raw.hash
this.rangesHash = raw.rangesHash
this.operations.length = 0
return raw
/** @type RawFileData */
return await eager.store(blobStore)
}
}

View file

@ -8,7 +8,7 @@ const CommentList = require('./comment_list')
const TrackedChangeList = require('./tracked_change_list')
/**
* @import { StringFileRawData, RawHashFileData, BlobStore, CommentRawData } from "../types"
* @import { StringFileRawData, RawFileData, BlobStore, CommentRawData } from "../types"
* @import { TrackedChangeRawData, RangesBlob } from "../types"
* @import EditOperation from "../operation/edit_operation"
*/
@ -139,7 +139,7 @@ class StringFileData extends FileData {
/**
* @inheritdoc
* @param {BlobStore} blobStore
* @return {Promise<RawHashFileData>}
* @return {Promise<RawFileData>}
*/
async store(blobStore) {
const blob = await blobStore.putString(this.content)

View file

@ -84,21 +84,6 @@ class TrackedChange {
)
)
}
/**
* Return an equivalent tracked change whose extent is limited to the given
* range
*
* @param {Range} range
* @returns {TrackedChange | null} - the result or null if the intersection is empty
*/
intersectRange(range) {
const intersection = this.range.intersect(range)
if (intersection == null) {
return null
}
return new TrackedChange(intersection, this.tracking)
}
}
module.exports = TrackedChange

View file

@ -2,11 +2,9 @@
const Range = require('../range')
const TrackedChange = require('./tracked_change')
const TrackingProps = require('../file_data/tracking_props')
const { InsertOp, RemoveOp, RetainOp } = require('../operation/scan_op')
/**
* @import { TrackingDirective, TrackedChangeRawData } from "../types"
* @import TextOperation from "../operation/text_operation"
*/
class TrackedChangeList {
@ -60,22 +58,6 @@ class TrackedChangeList {
return this._trackedChanges.filter(change => range.contains(change.range))
}
/**
* Returns tracked changes that overlap with the given range
* @param {Range} range
* @returns {TrackedChange[]}
*/
intersectRange(range) {
const changes = []
for (const change of this._trackedChanges) {
const intersection = change.intersectRange(range)
if (intersection != null) {
changes.push(intersection)
}
}
return changes
}
/**
* Returns the tracking props for a given range.
* @param {Range} range
@ -107,8 +89,6 @@ class TrackedChangeList {
/**
* Collapses consecutive (and compatible) ranges
*
* @private
* @returns {void}
*/
_mergeRanges() {
@ -137,28 +117,12 @@ class TrackedChangeList {
}
/**
* Apply an insert operation
*
* @param {number} cursor
* @param {string} insertedText
* @param {{tracking?: TrackingProps}} opts
*/
applyInsert(cursor, insertedText, opts = {}) {
this._applyInsert(cursor, insertedText, opts)
this._mergeRanges()
}
/**
* Apply an insert operation
*
* This method will not merge ranges at the end
*
* @private
* @param {number} cursor
* @param {string} insertedText
* @param {{tracking?: TrackingProps}} [opts]
*/
_applyInsert(cursor, insertedText, opts = {}) {
const newTrackedChanges = []
for (const trackedChange of this._trackedChanges) {
if (
@ -207,29 +171,15 @@ class TrackedChangeList {
newTrackedChanges.push(newTrackedChange)
}
this._trackedChanges = newTrackedChanges
this._mergeRanges()
}
/**
* Apply a delete operation to the list of tracked changes
*
* @param {number} cursor
* @param {number} length
*/
applyDelete(cursor, length) {
this._applyDelete(cursor, length)
this._mergeRanges()
}
/**
* Apply a delete operation to the list of tracked changes
*
* This method will not merge ranges at the end
*
* @private
* @param {number} cursor
* @param {number} length
*/
_applyDelete(cursor, length) {
const newTrackedChanges = []
for (const trackedChange of this._trackedChanges) {
const deletedRange = new Range(cursor, length)
@ -255,31 +205,15 @@ class TrackedChangeList {
}
}
this._trackedChanges = newTrackedChanges
}
/**
* Apply a retain operation to the list of tracked changes
*
* @param {number} cursor
* @param {number} length
* @param {{tracking?: TrackingDirective}} [opts]
*/
applyRetain(cursor, length, opts = {}) {
this._applyRetain(cursor, length, opts)
this._mergeRanges()
}
/**
* Apply a retain operation to the list of tracked changes
*
* This method will not merge ranges at the end
*
* @private
* @param {number} cursor
* @param {number} length
* @param {{tracking?: TrackingDirective}} opts
*/
_applyRetain(cursor, length, opts = {}) {
applyRetain(cursor, length, opts = {}) {
// If there's no tracking info, leave everything as-is
if (!opts.tracking) {
return
@ -335,31 +269,6 @@ class TrackedChangeList {
newTrackedChanges.push(newTrackedChange)
}
this._trackedChanges = newTrackedChanges
}
/**
* Apply a text operation to the list of tracked changes
*
* Ranges are merged only once at the end, for performance and to avoid
* problematic edge cases where intermediate ranges get incorrectly merged.
*
* @param {TextOperation} operation
*/
applyTextOperation(operation) {
// this cursor tracks the destination document that gets modified as
// operations are applied to it.
let cursor = 0
for (const op of operation.ops) {
if (op instanceof InsertOp) {
this._applyInsert(cursor, op.insertion, { tracking: op.tracking })
cursor += op.insertion.length
} else if (op instanceof RemoveOp) {
this._applyDelete(cursor, op.length)
} else if (op instanceof RetainOp) {
this._applyRetain(cursor, op.length, { tracking: op.tracking })
cursor += op.length
}
}
this._mergeRanges()
}
}

View file

@ -62,35 +62,6 @@ class TrackingProps {
this.ts.getTime() === other.ts.getTime()
)
}
/**
* Are these tracking props compatible with the other tracking props for merging
* ranges?
*
* @param {TrackingDirective} other
* @returns {other is TrackingProps}
*/
canMergeWith(other) {
if (!(other instanceof TrackingProps)) {
return false
}
return this.type === other.type && this.userId === other.userId
}
/**
* Merge two tracking props
*
* Assumes that `canMerge(other)` returns true
*
* @param {TrackingDirective} other
*/
mergeWith(other) {
if (!this.canMergeWith(other)) {
throw new Error('Cannot merge with incompatible tracking props')
}
const ts = this.ts <= other.ts ? this.ts : other.ts
return new TrackingProps(this.type, this.userId, ts)
}
}
module.exports = TrackingProps

View file

@ -175,7 +175,7 @@ class InsertOp extends ScanOp {
return false
}
if (this.tracking) {
if (!other.tracking || !this.tracking.canMergeWith(other.tracking)) {
if (!this.tracking.equals(other.tracking)) {
return false
}
} else if (other.tracking) {
@ -198,10 +198,7 @@ class InsertOp extends ScanOp {
throw new Error('Cannot merge with incompatible operation')
}
this.insertion += other.insertion
if (this.tracking != null && other.tracking != null) {
this.tracking = this.tracking.mergeWith(other.tracking)
}
// We already have the same commentIds
// We already have the same tracking info and commentIds
}
/**
@ -309,13 +306,9 @@ class RetainOp extends ScanOp {
return false
}
if (this.tracking) {
if (!other.tracking || !this.tracking.canMergeWith(other.tracking)) {
return false
return this.tracking.equals(other.tracking)
}
} else if (other.tracking) {
return false
}
return true
return !other.tracking
}
/**
@ -326,9 +319,6 @@ class RetainOp extends ScanOp {
throw new Error('Cannot merge with incompatible operation')
}
this.length += other.length
if (this.tracking != null && other.tracking != null) {
this.tracking = this.tracking.mergeWith(other.tracking)
}
}
/**

View file

@ -314,18 +314,25 @@ class TextOperation extends EditOperation {
str
)
}
file.trackedChanges.applyRetain(result.length, op.length, {
tracking: op.tracking,
})
result += str.slice(inputCursor, inputCursor + op.length)
inputCursor += op.length
} else if (op instanceof InsertOp) {
if (containsNonBmpChars(op.insertion)) {
throw new InvalidInsertionError(str, op.toJSON())
}
file.trackedChanges.applyInsert(result.length, op.insertion, {
tracking: op.tracking,
})
file.comments.applyInsert(
new Range(result.length, op.insertion.length),
{ commentIds: op.commentIds }
)
result += op.insertion
} else if (op instanceof RemoveOp) {
file.trackedChanges.applyDelete(result.length, op.length)
file.comments.applyDelete(new Range(result.length, op.length))
inputCursor += op.length
} else {
@ -345,8 +352,6 @@ class TextOperation extends EditOperation {
throw new TextOperation.TooLongError(operation, result.length)
}
file.trackedChanges.applyTextOperation(this)
file.content = result
}
@ -395,37 +400,45 @@ class TextOperation extends EditOperation {
for (let i = 0, l = ops.length; i < l; i++) {
const op = ops[i]
if (op instanceof RetainOp) {
if (op.tracking) {
// Where we need to end up after the retains
const target = strIndex + op.length
// A previous retain could have overriden some tracking info. Now we
// need to restore it.
const previousChanges = previousState.trackedChanges.intersectRange(
const previousRanges = previousState.trackedChanges.inRange(
new Range(strIndex, op.length)
)
for (const change of previousChanges) {
if (strIndex < change.range.start) {
inverse.retain(change.range.start - strIndex, {
tracking: new ClearTrackingProps(),
})
strIndex = change.range.start
let removeTrackingInfoIfNeeded
if (op.tracking) {
removeTrackingInfoIfNeeded = new ClearTrackingProps()
}
inverse.retain(change.range.length, {
tracking: change.tracking,
for (const trackedChange of previousRanges) {
if (strIndex < trackedChange.range.start) {
inverse.retain(trackedChange.range.start - strIndex, {
tracking: removeTrackingInfoIfNeeded,
})
strIndex += change.range.length
strIndex = trackedChange.range.start
}
if (trackedChange.range.end < strIndex + op.length) {
inverse.retain(trackedChange.range.length, {
tracking: trackedChange.tracking,
})
strIndex = trackedChange.range.end
}
if (trackedChange.range.end !== strIndex) {
// No need to split the range at the end
const [left] = trackedChange.range.splitAt(strIndex)
inverse.retain(left.length, { tracking: trackedChange.tracking })
strIndex = left.end
}
}
if (strIndex < target) {
inverse.retain(target - strIndex, {
tracking: new ClearTrackingProps(),
tracking: removeTrackingInfoIfNeeded,
})
strIndex = target
}
} else {
inverse.retain(op.length)
strIndex += op.length
}
} else if (op instanceof InsertOp) {
inverse.remove(op.insertion.length)
} else if (op instanceof RemoveOp) {

View file

@ -86,32 +86,10 @@ class Range {
}
/**
* Does this range overlap another range?
*
* Overlapping means that the two ranges have at least one character in common
*
* @param {Range} other - the other range
* @param {Range} range
*/
overlaps(other) {
return this.start < other.end && this.end > other.start
}
/**
* Does this range overlap the start of another range?
*
* @param {Range} other - the other range
*/
overlapsStart(other) {
return this.start <= other.start && this.end > other.start
}
/**
* Does this range overlap the end of another range?
*
* @param {Range} other - the other range
*/
overlapsEnd(other) {
return this.start < other.end && this.end >= other.end
overlaps(range) {
return this.start < range.end && this.end > range.start
}
/**
@ -249,26 +227,6 @@ class Range {
)
return [rangeUpToCursor, rangeAfterCursor]
}
/**
* Returns the intersection of this range with another range
*
* @param {Range} other - the other range
* @return {Range | null} the intersection or null if the intersection is empty
*/
intersect(other) {
if (this.contains(other)) {
return other
} else if (other.contains(this)) {
return this
} else if (other.overlapsStart(this)) {
return new Range(this.pos, other.end - this.start)
} else if (other.overlapsEnd(this)) {
return new Range(other.pos, this.end - other.start)
} else {
return null
}
}
}
module.exports = Range

View file

@ -193,13 +193,4 @@ describe('LazyStringFileData', function () {
expect(fileData.getStringLength()).to.equal(longString.length)
expect(fileData.getOperations()).to.have.length(1)
})
it('truncates its operations after being stored', async function () {
const testHash = File.EMPTY_FILE_HASH
const fileData = new LazyStringFileData(testHash, undefined, 0)
fileData.edit(new TextOperation().insert('abc'))
const stored = await fileData.store(this.blobStore)
expect(fileData.hash).to.equal(stored.hash)
expect(fileData.operations).to.deep.equal([])
})
})

View file

@ -1,3 +1,4 @@
// @ts-check
'use strict'
const { expect } = require('chai')
@ -448,44 +449,4 @@ describe('Range', function () {
expect(() => range.insertAt(16, 3)).to.throw()
})
})
describe('intersect', function () {
it('should handle partially overlapping ranges', function () {
const range1 = new Range(5, 10)
const range2 = new Range(3, 6)
const intersection1 = range1.intersect(range2)
expect(intersection1.pos).to.equal(5)
expect(intersection1.length).to.equal(4)
const intersection2 = range2.intersect(range1)
expect(intersection2.pos).to.equal(5)
expect(intersection2.length).to.equal(4)
})
it('should intersect with itself', function () {
const range = new Range(5, 10)
const intersection = range.intersect(range)
expect(intersection.pos).to.equal(5)
expect(intersection.length).to.equal(10)
})
it('should handle nested ranges', function () {
const range1 = new Range(5, 10)
const range2 = new Range(7, 2)
const intersection1 = range1.intersect(range2)
expect(intersection1.pos).to.equal(7)
expect(intersection1.length).to.equal(2)
const intersection2 = range2.intersect(range1)
expect(intersection2.pos).to.equal(7)
expect(intersection2.length).to.equal(2)
})
it('should handle disconnected ranges', function () {
const range1 = new Range(5, 10)
const range2 = new Range(20, 30)
const intersection1 = range1.intersect(range2)
expect(intersection1).to.be.null
const intersection2 = range2.intersect(range1)
expect(intersection2).to.be.null
})
})
})

View file

@ -107,7 +107,7 @@ describe('RetainOp', function () {
expect(op1.equals(new RetainOp(3))).to.be.true
})
it('cannot merge with another RetainOp if the tracking user is different', function () {
it('cannot merge with another RetainOp if tracking info is different', function () {
const op1 = new RetainOp(
4,
new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z'))
@ -120,14 +120,14 @@ describe('RetainOp', function () {
expect(() => op1.mergeWith(op2)).to.throw(Error)
})
it('can merge with another RetainOp if the tracking user is the same', function () {
it('can merge with another RetainOp if tracking info is the same', function () {
const op1 = new RetainOp(
4,
new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z'))
)
const op2 = new RetainOp(
4,
new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:01.000Z'))
new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z'))
)
op1.mergeWith(op2)
expect(
@ -310,7 +310,7 @@ describe('InsertOp', function () {
expect(() => op1.mergeWith(op2)).to.throw(Error)
})
it('cannot merge with another InsertOp if tracking user is different', function () {
it('cannot merge with another InsertOp if tracking info is different', function () {
const op1 = new InsertOp(
'a',
new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z'))
@ -323,7 +323,7 @@ describe('InsertOp', function () {
expect(() => op1.mergeWith(op2)).to.throw(Error)
})
it('can merge with another InsertOp if tracking user and comment info is the same', function () {
it('can merge with another InsertOp if tracking and comment info is the same', function () {
const op1 = new InsertOp(
'a',
new TrackingProps(
@ -338,7 +338,7 @@ describe('InsertOp', function () {
new TrackingProps(
'insert',
'user1',
new Date('2024-01-01T00:00:01.000Z')
new Date('2024-01-01T00:00:00.000Z')
),
['1', '2']
)

View file

@ -322,47 +322,6 @@ describe('TextOperation', function () {
new TextOperation().retain(4).remove(4).retain(3)
)
})
it('undoing a tracked delete restores the tracked changes', function () {
expectInverseToLeadToInitialState(
new StringFileData(
'the quick brown fox jumps over the lazy dog',
undefined,
[
{
range: { pos: 5, length: 5 },
tracking: {
ts: '2023-01-01T00:00:00.000Z',
type: 'insert',
userId: 'user1',
},
},
{
range: { pos: 12, length: 3 },
tracking: {
ts: '2023-01-01T00:00:00.000Z',
type: 'delete',
userId: 'user1',
},
},
{
range: { pos: 18, length: 5 },
tracking: {
ts: '2023-01-01T00:00:00.000Z',
type: 'insert',
userId: 'user1',
},
},
]
),
new TextOperation()
.retain(7)
.retain(13, {
tracking: new TrackingProps('delete', 'user1', new Date()),
})
.retain(23)
)
})
})
describe('compose', function () {

View file

@ -1,6 +1,6 @@
promise-utils
--dependencies=None
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
--docker-repos=gcr.io/overleaf-ops
--env-add=
--env-pass-through=
--esmock-loader=False

View file

@ -1,6 +1,6 @@
ranges-tracker
--dependencies=None
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
--docker-repos=gcr.io/overleaf-ops
--env-add=
--env-pass-through=
--esmock-loader=False

View file

@ -1,6 +1,6 @@
redis-wrapper
--dependencies=None
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
--docker-repos=gcr.io/overleaf-ops
--env-add=
--env-pass-through=
--esmock-loader=False

View file

@ -1,6 +1,6 @@
settings
--dependencies=None
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
--docker-repos=gcr.io/overleaf-ops
--env-add=
--env-pass-through=
--esmock-loader=False

View file

@ -1,6 +1,6 @@
stream-utils
--dependencies=None
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
--docker-repos=gcr.io/overleaf-ops
--env-add=
--env-pass-through=
--esmock-loader=False

1715
package-lock.json generated

File diff suppressed because it is too large Load diff

View file

@ -37,7 +37,7 @@
},
"swagger-tools": {
"body-parser": "1.20.3",
"multer": "2.0.1",
"multer": "2.0.0",
"path-to-regexp": "3.3.0",
"qs": "6.13.0"
}

View file

@ -1,28 +0,0 @@
FROM sharelatex/sharelatex:5.5.0
# fix tls configuration in redis for history-v1
COPY pr_25168.patch .
RUN patch -p1 < pr_25168.patch && rm pr_25168.patch
# improve logging in history system
COPY pr_26086.patch .
RUN patch -p1 < pr_26086.patch && rm pr_26086.patch
# fix create-user.mjs script
COPY pr_26152.patch .
RUN patch -p1 < pr_26152.patch && rm pr_26152.patch
# check mongo featureCompatibilityVersion
COPY pr_26091.patch .
RUN patch -p1 < pr_26091.patch && rm pr_26091.patch
# update multer and tar-fs
RUN sed -i 's/"multer": "2.0.0"/"multer": "2.0.1"/g' package.json
RUN sed -i 's/"dockerode": "^4.0.5"/"dockerode": "^4.0.7"/g' services/clsi/package.json
RUN sed -i 's/"tar-fs": "^3.0.4"/"tar-fs": "^3.0.9"/g' services/clsi/package.json
RUN sed -i 's/199c5ff05bd375c508f4074498237baead7f5148/4dbceda355efc3fc8ac3cf5c66c3778c8a6fdb23/g' services/web/package.json
COPY package-lock.json.diff .
RUN patch package-lock.json < package-lock.json.diff
RUN npm install --omit=dev
RUN npm install @paralleldrive/cuid2@2.2.2 -w services/history-v1

File diff suppressed because it is too large Load diff

View file

@ -1,19 +0,0 @@
--- a/services/history-v1/config/custom-environment-variables.json
+++ b/services/history-v1/config/custom-environment-variables.json
@@ -50,12 +50,14 @@
"history": {
"host": "OVERLEAF_REDIS_HOST",
"password": "OVERLEAF_REDIS_PASS",
- "port": "OVERLEAF_REDIS_PORT"
+ "port": "OVERLEAF_REDIS_PORT",
+ "tls": "OVERLEAF_REDIS_TLS"
},
"lock": {
"host": "OVERLEAF_REDIS_HOST",
"password": "OVERLEAF_REDIS_PASS",
- "port": "OVERLEAF_REDIS_PORT"
+ "port": "OVERLEAF_REDIS_PORT",
+ "tls": "OVERLEAF_REDIS_TLS"
}
}
}

View file

@ -1,200 +0,0 @@
--- a/services/history-v1/api/controllers/project_import.js
+++ b/services/history-v1/api/controllers/project_import.js
@@ -35,6 +35,7 @@ async function importSnapshot(req, res) {
try {
snapshot = Snapshot.fromRaw(rawSnapshot)
} catch (err) {
+ logger.warn({ err, projectId }, 'failed to import snapshot')
return render.unprocessableEntity(res)
}
@@ -43,6 +44,7 @@ async function importSnapshot(req, res) {
historyId = await chunkStore.initializeProject(projectId, snapshot)
} catch (err) {
if (err instanceof chunkStore.AlreadyInitialized) {
+ logger.warn({ err, projectId }, 'already initialized')
return render.conflict(res)
} else {
throw err
--- a/services/history-v1/api/controllers/projects.js
+++ b/services/history-v1/api/controllers/projects.js
@@ -34,6 +34,7 @@ async function initializeProject(req, res, next) {
res.status(HTTPStatus.OK).json({ projectId })
} catch (err) {
if (err instanceof chunkStore.AlreadyInitialized) {
+ logger.warn({ err, projectId }, 'failed to initialize')
render.conflict(res)
} else {
throw err
@@ -242,11 +243,15 @@ async function createProjectBlob(req, res, next) {
const sizeLimit = new StreamSizeLimit(maxUploadSize)
await pipeline(req, sizeLimit, fs.createWriteStream(tmpPath))
if (sizeLimit.sizeLimitExceeded) {
+ logger.warn(
+ { projectId, expectedHash, maxUploadSize },
+ 'blob exceeds size threshold'
+ )
return render.requestEntityTooLarge(res)
}
const hash = await blobHash.fromFile(tmpPath)
if (hash !== expectedHash) {
- logger.debug({ hash, expectedHash }, 'Hash mismatch')
+ logger.warn({ projectId, hash, expectedHash }, 'Hash mismatch')
return render.conflict(res, 'File hash mismatch')
}
@@ -343,6 +348,10 @@ async function copyProjectBlob(req, res, next) {
targetBlobStore.getBlob(blobHash),
])
if (!sourceBlob) {
+ logger.warn(
+ { sourceProjectId, targetProjectId, blobHash },
+ 'missing source blob when copying across projects'
+ )
return render.notFound(res)
}
// Exit early if the blob exists in the target project.
--- a/services/history-v1/app.js
+++ b/services/history-v1/app.js
@@ -100,11 +100,13 @@ function setupErrorHandling() {
})
}
if (err.code === 'ENUM_MISMATCH') {
+ logger.warn({ err, projectId }, err.message)
return res.status(HTTPStatus.UNPROCESSABLE_ENTITY).json({
message: 'invalid enum value: ' + err.paramName,
})
}
if (err.code === 'REQUIRED') {
+ logger.warn({ err, projectId }, err.message)
return res.status(HTTPStatus.UNPROCESSABLE_ENTITY).json({
message: err.message,
})
--- a/services/project-history/app/js/HistoryStoreManager.js
+++ b/services/project-history/app/js/HistoryStoreManager.js
@@ -35,7 +35,10 @@ class StringStream extends stream.Readable {
_mocks.getMostRecentChunk = (projectId, historyId, callback) => {
const path = `projects/${historyId}/latest/history`
logger.debug({ projectId, historyId }, 'getting chunk from history service')
- _requestChunk({ path, json: true }, callback)
+ _requestChunk({ path, json: true }, (err, chunk) => {
+ if (err) return callback(OError.tag(err))
+ callback(null, chunk)
+ })
}
/**
@@ -54,7 +57,10 @@ export function getChunkAtVersion(projectId, historyId, version, callback) {
{ projectId, historyId, version },
'getting chunk from history service for version'
)
- _requestChunk({ path, json: true }, callback)
+ _requestChunk({ path, json: true }, (err, chunk) => {
+ if (err) return callback(OError.tag(err))
+ callback(null, chunk)
+ })
}
export function getMostRecentVersion(projectId, historyId, callback) {
@@ -68,8 +74,10 @@ export function getMostRecentVersion(projectId, historyId, callback) {
_.sortBy(chunk.chunk.history.changes || [], x => x.timestamp)
)
// find the latest project and doc versions in the chunk
- _getLatestProjectVersion(projectId, chunk, (err1, projectVersion) =>
+ _getLatestProjectVersion(projectId, chunk, (err1, projectVersion) => {
+ if (err1) err1 = OError.tag(err1)
_getLatestV2DocVersions(projectId, chunk, (err2, v2DocVersions) => {
+ if (err2) err2 = OError.tag(err2)
// return the project and doc versions
const projectStructureAndDocVersions = {
project: projectVersion,
@@ -83,7 +91,7 @@ export function getMostRecentVersion(projectId, historyId, callback) {
chunk
)
})
- )
+ })
})
}
@@ -211,7 +219,10 @@ export function getProjectBlob(historyId, blobHash, callback) {
logger.debug({ historyId, blobHash }, 'getting blob from history service')
_requestHistoryService(
{ path: `projects/${historyId}/blobs/${blobHash}` },
- callback
+ (err, blob) => {
+ if (err) return callback(OError.tag(err))
+ callback(null, blob)
+ }
)
}
@@ -277,7 +288,10 @@ function createBlobFromString(historyId, data, fileId, callback) {
(fsPath, cb) => {
_createBlob(historyId, fsPath, cb)
},
- callback
+ (err, hash) => {
+ if (err) return callback(OError.tag(err))
+ callback(null, hash)
+ }
)
}
@@ -330,7 +344,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) {
try {
ranges = HistoryBlobTranslator.createRangeBlobDataFromUpdate(update)
} catch (error) {
- return callback(error)
+ return callback(OError.tag(error))
}
createBlobFromString(
historyId,
@@ -338,7 +352,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) {
`project-${projectId}-doc-${update.doc}`,
(err, fileHash) => {
if (err) {
- return callback(err)
+ return callback(OError.tag(err))
}
if (ranges) {
createBlobFromString(
@@ -347,7 +361,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) {
`project-${projectId}-doc-${update.doc}-ranges`,
(err, rangesHash) => {
if (err) {
- return callback(err)
+ return callback(OError.tag(err))
}
logger.debug(
{ fileHash, rangesHash },
@@ -415,7 +429,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) {
},
(err, fileHash) => {
if (err) {
- return callback(err)
+ return callback(OError.tag(err))
}
if (update.hash && update.hash !== fileHash) {
logger.warn(
@@ -447,7 +461,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) {
},
(err, fileHash) => {
if (err) {
- return callback(err)
+ return callback(OError.tag(err))
}
logger.debug({ fileHash }, 'created empty blob for file')
callback(null, { file: fileHash })
@@ -520,7 +534,10 @@ export function initializeProject(historyId, callback) {
export function deleteProject(projectId, callback) {
_requestHistoryService(
{ method: 'DELETE', path: `projects/${projectId}` },
- callback
+ err => {
+ if (err) return callback(OError.tag(err))
+ callback(null)
+ }
)
}

View file

@ -1,60 +0,0 @@
--- a/services/web/modules/server-ce-scripts/scripts/check-mongodb.mjs
+++ b/services/web/modules/server-ce-scripts/scripts/check-mongodb.mjs
@@ -7,6 +7,7 @@ import {
const { ObjectId } = mongodb
const MIN_MONGO_VERSION = [6, 0]
+const MIN_MONGO_FEATURE_COMPATIBILITY_VERSION = [6, 0]
async function main() {
let mongoClient
@@ -18,6 +19,7 @@ async function main() {
}
await checkMongoVersion(mongoClient)
+ await checkFeatureCompatibilityVersion(mongoClient)
try {
await testTransactions(mongoClient)
@@ -53,6 +55,41 @@ async function checkMongoVersion(mongoClient) {
}
}
+async function checkFeatureCompatibilityVersion(mongoClient) {
+ const {
+ featureCompatibilityVersion: { version },
+ } = await mongoClient
+ .db()
+ .admin()
+ .command({ getParameter: 1, featureCompatibilityVersion: 1 })
+ const [major, minor] = version.split('.').map(v => parseInt(v))
+ const [minMajor, minMinor] = MIN_MONGO_FEATURE_COMPATIBILITY_VERSION
+
+ if (major < minMajor || (major === minMajor && minor < minMinor)) {
+ const minVersion = MIN_MONGO_FEATURE_COMPATIBILITY_VERSION.join('.')
+ console.error(`
+The MongoDB server has featureCompatibilityVersion=${version}, but Overleaf requires at least version ${minVersion}.
+
+Open a mongo shell:
+- Overleaf Toolkit deployments: $ bin/mongo
+- Legacy docker-compose.yml deployments: $ docker exec -it mongo mongosh localhost/sharelatex
+
+In the mongo shell:
+> db.adminCommand( { setFeatureCompatibilityVersion: "${minMajor}.${minMinor}" } )
+
+Verify the new value:
+> db.adminCommand( { getParameter: 1, featureCompatibilityVersion: 1 } )
+ ...
+ {
+ featureCompatibilityVersion: { version: ${minMajor}.${minMinor}' },
+...
+
+Aborting.
+`)
+ process.exit(1)
+ }
+}
+
main()
.then(() => {
console.error('Mongodb is up.')

View file

@ -1,16 +0,0 @@
--- a/services/web/modules/server-ce-scripts/scripts/create-user.mjs
+++ b/services/web/modules/server-ce-scripts/scripts/create-user.mjs
@@ -48,3 +48,13 @@ Please visit the following URL to set a password for ${email} and log in:
)
})
}
+
+if (filename === process.argv[1]) {
+ try {
+ await main()
+ process.exit(0)
+ } catch (error) {
+ console.error({ error })
+ process.exit(1)
+ }
+}

View file

@ -6,8 +6,8 @@ all: test-e2e
# Notable the container labels com.docker.compose.project.working_dir and com.docker.compose.project.config_files need to match when creating containers from the docker host (how you started things) and from host-admin (how tests reconfigure the instance).
export PWD = $(shell pwd)
export TEX_LIVE_DOCKER_IMAGE ?= us-east1-docker.pkg.dev/overleaf-ops/ol-docker/texlive-full:2023.1
export ALL_TEX_LIVE_DOCKER_IMAGES ?= us-east1-docker.pkg.dev/overleaf-ops/ol-docker/texlive-full:2023.1,us-east1-docker.pkg.dev/overleaf-ops/ol-docker/texlive-full:2022.1
export TEX_LIVE_DOCKER_IMAGE ?= gcr.io/overleaf-ops/texlive-full:2023.1
export ALL_TEX_LIVE_DOCKER_IMAGES ?= gcr.io/overleaf-ops/texlive-full:2023.1,gcr.io/overleaf-ops/texlive-full:2022.1
export IMAGE_TAG_PRO ?= us-east1-docker.pkg.dev/overleaf-ops/ol-docker/pro:latest
export CYPRESS_SHARD ?=
export COMPOSE_PROJECT_NAME ?= test
@ -20,7 +20,6 @@ test-e2e-native:
npm run cypress:open
test-e2e:
docker compose build host-admin
docker compose up --no-log-prefix --exit-code-from=e2e e2e
test-e2e-open:
@ -46,7 +45,7 @@ prefetch_custom_compose_pull:
prefetch_custom: prefetch_custom_texlive
prefetch_custom_texlive:
echo "$$ALL_TEX_LIVE_DOCKER_IMAGES" | tr ',' '\n' | xargs -I% \
sh -exc 'tag=%; re_tag=quay.io/sharelatex/$${tag#*/*/*/}; docker pull $$tag; docker tag $$tag $$re_tag'
sh -exc 'tag=%; re_tag=quay.io/sharelatex/$${tag#*/*/}; docker pull $$tag; docker tag $$tag $$re_tag'
prefetch_custom: prefetch_old
prefetch_old:

View file

@ -179,21 +179,6 @@ describe('admin panel', function () {
cy.get('nav').findByText('Manage Users').click()
})
it('displays expected tabs', () => {
const tabs = ['Users', 'License Usage']
cy.get('[role="tab"]').each((el, index) => {
cy.wrap(el).findByText(tabs[index]).click()
})
cy.get('[role="tab"]').should('have.length', tabs.length)
})
it('license usage tab', () => {
cy.get('a').contains('License Usage').click()
cy.findByText(
'An active user is one who has opened a project in this Server Pro instance in the last 12 months.'
)
})
describe('create users', () => {
beforeEach(() => {
cy.get('a').contains('New User').click()

View file

@ -131,7 +131,7 @@ services:
saml:
restart: always
image: us-east1-docker.pkg.dev/overleaf-ops/ol-docker/saml-test
image: gcr.io/overleaf-ops/saml-test
environment:
SAML_TEST_SP_ENTITY_ID: 'sharelatex-test-saml'
SAML_BASE_URL_PATH: 'http://saml/simplesaml/'

View file

@ -24,13 +24,10 @@ services:
MOCHA_GREP: ${MOCHA_GREP}
NODE_ENV: test
NODE_OPTIONS: "--unhandled-rejections=strict"
volumes:
- ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it
depends_on:
mongo:
condition: service_started
user: node
entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 --
command: npm run test:acceptance

View file

@ -26,7 +26,6 @@ services:
- .:/overleaf/services/chat
- ../../node_modules:/overleaf/node_modules
- ../../libraries:/overleaf/libraries
- ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it
working_dir: /overleaf/services/chat
environment:
ELASTIC_SEARCH_DSN: es:9200
@ -40,7 +39,6 @@ services:
depends_on:
mongo:
condition: service_started
entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 --
command: npm run --silent test:acceptance
mongo:

View file

@ -19,18 +19,18 @@ The CLSI can be configured through the following environment variables:
* `ALLOWED_IMAGES` - Space separated list of allowed Docker TeX Live images
* `CATCH_ERRORS` - Set to `true` to log uncaught exceptions
* `COMPILE_GROUP_DOCKER_CONFIGS` - JSON string of Docker configs for compile groups
* `SANDBOXED_COMPILES` - Set to true to use sibling containers
* `SANDBOXED_COMPILES_HOST_DIR_COMPILES` - Working directory for LaTeX compiles
* `SANDBOXED_COMPILES_HOST_DIR_OUTPUT` - Output directory for LaTeX compiles
* `COMPILES_HOST_DIR` - Working directory for LaTeX compiles
* `OUTPUT_HOST_DIR` - Output directory for LaTeX compiles
* `COMPILE_SIZE_LIMIT` - Sets the body-parser [limit](https://github.com/expressjs/body-parser#limit)
* `DOCKER_RUNNER` - Set to true to use sibling containers
* `DOCKER_RUNTIME` -
* `FILESTORE_DOMAIN_OVERRIDE` - The url for the filestore service e.g.`http://$FILESTORE_HOST:3009`
* `FILESTORE_PARALLEL_FILE_DOWNLOADS` - Number of parallel file downloads
* `LISTEN_ADDRESS` - The address for the RESTful service to listen on. Set to `0.0.0.0` to listen on all network interfaces
* `PROCESS_LIFE_SPAN_LIMIT_MS` - Process life span limit in milliseconds
* `SMOKE_TEST` - Whether to run smoke tests
* `TEXLIVE_IMAGE` - The TeX Live Docker image to use for sibling containers, e.g. `us-east1-docker.pkg.dev/overleaf-ops/ol-docker/texlive-full:2017.1`
* `TEX_LIVE_IMAGE_NAME_OVERRIDE` - The name of the registry for the Docker image e.g. `us-east1-docker.pkg.dev/overleaf-ops/ol-docker`
* `TEXLIVE_IMAGE` - The TeX Live Docker image to use for sibling containers, e.g. `gcr.io/overleaf-ops/texlive-full:2017.1`
* `TEX_LIVE_IMAGE_NAME_OVERRIDE` - The name of the registry for the Docker image e.g. `gcr.io/overleaf-ops`
* `TEXLIVE_IMAGE_USER` - When using sibling containers, the user to run as in the TeX Live image. Defaults to `tex`
* `TEXLIVE_OPENOUT_ANY` - Sets the `openout_any` environment variable for TeX Live (see the `\openout` primitive [documentation](http://tug.org/texinfohtml/web2c.html#tex-invocation))
@ -63,10 +63,10 @@ Then start the Docker container:
docker run --rm \
-p 127.0.0.1:3013:3013 \
-e LISTEN_ADDRESS=0.0.0.0 \
-e SANDBOXED_COMPILES=true \
-e DOCKER_RUNNER=true \
-e TEXLIVE_IMAGE=texlive/texlive \
-e TEXLIVE_IMAGE_USER=root \
-e SANDBOXED_COMPILES_HOST_DIR_COMPILES="$PWD/compiles" \
-e COMPILES_HOST_DIR="$PWD/compiles" \
-v "$PWD/compiles:/overleaf/services/clsi/compiles" \
-v "$PWD/cache:/overleaf/services/clsi/cache" \
-v /var/run/docker.sock:/var/run/docker.sock \

View file

@ -2,7 +2,7 @@ clsi
--data-dirs=cache,compiles,output
--dependencies=
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
--env-add=ENABLE_PDF_CACHING="true",PDF_CACHING_ENABLE_WORKER_POOL="true",ALLOWED_IMAGES=quay.io/sharelatex/texlive-full:2017.1,TEXLIVE_IMAGE=quay.io/sharelatex/texlive-full:2017.1,TEX_LIVE_IMAGE_NAME_OVERRIDE=us-east1-docker.pkg.dev/overleaf-ops/ol-docker,TEXLIVE_IMAGE_USER="tex",SANDBOXED_COMPILES="true",SANDBOXED_COMPILES_HOST_DIR_COMPILES=$PWD/compiles,SANDBOXED_COMPILES_HOST_DIR_OUTPUT=$PWD/output
--env-add=ENABLE_PDF_CACHING="true",PDF_CACHING_ENABLE_WORKER_POOL="true",ALLOWED_IMAGES=quay.io/sharelatex/texlive-full:2017.1,TEXLIVE_IMAGE=quay.io/sharelatex/texlive-full:2017.1,TEX_LIVE_IMAGE_NAME_OVERRIDE=us-east1-docker.pkg.dev/overleaf-ops/ol-docker,TEXLIVE_IMAGE_USER="tex",DOCKER_RUNNER="true",COMPILES_HOST_DIR=$PWD/compiles,OUTPUT_HOST_DIR=$PWD/output
--env-pass-through=
--esmock-loader=False
--node-version=22.15.1

View file

@ -29,9 +29,9 @@ services:
TEXLIVE_IMAGE: quay.io/sharelatex/texlive-full:2017.1
TEX_LIVE_IMAGE_NAME_OVERRIDE: us-east1-docker.pkg.dev/overleaf-ops/ol-docker
TEXLIVE_IMAGE_USER: "tex"
SANDBOXED_COMPILES: "true"
SANDBOXED_COMPILES_HOST_DIR_COMPILES: $PWD/compiles
SANDBOXED_COMPILES_HOST_DIR_OUTPUT: $PWD/output
DOCKER_RUNNER: "true"
COMPILES_HOST_DIR: $PWD/compiles
OUTPUT_HOST_DIR: $PWD/output
volumes:
- ./compiles:/overleaf/services/clsi/compiles
- /var/run/docker.sock:/var/run/docker.sock

View file

@ -47,8 +47,8 @@ services:
TEXLIVE_IMAGE: quay.io/sharelatex/texlive-full:2017.1
TEX_LIVE_IMAGE_NAME_OVERRIDE: us-east1-docker.pkg.dev/overleaf-ops/ol-docker
TEXLIVE_IMAGE_USER: "tex"
SANDBOXED_COMPILES: "true"
SANDBOXED_COMPILES_HOST_DIR_COMPILES: $PWD/compiles
SANDBOXED_COMPILES_HOST_DIR_OUTPUT: $PWD/output
DOCKER_RUNNER: "true"
COMPILES_HOST_DIR: $PWD/compiles
OUTPUT_HOST_DIR: $PWD/output
command: npm run --silent test:acceptance

View file

@ -27,13 +27,13 @@
"async": "^3.2.5",
"body-parser": "^1.20.3",
"bunyan": "^1.8.15",
"dockerode": "^4.0.7",
"dockerode": "^4.0.5",
"express": "^4.21.2",
"lodash": "^4.17.21",
"p-limit": "^3.1.0",
"request": "^2.88.2",
"send": "^0.19.0",
"tar-fs": "^3.0.9",
"tar-fs": "^3.0.4",
"workerpool": "^6.1.5"
},
"devDependencies": {

View file

@ -24,13 +24,10 @@ services:
MOCHA_GREP: ${MOCHA_GREP}
NODE_ENV: test
NODE_OPTIONS: "--unhandled-rejections=strict"
volumes:
- ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it
depends_on:
mongo:
condition: service_started
user: node
entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 --
command: npm run test:acceptance

View file

@ -26,7 +26,6 @@ services:
- .:/overleaf/services/contacts
- ../../node_modules:/overleaf/node_modules
- ../../libraries:/overleaf/libraries
- ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it
working_dir: /overleaf/services/contacts
environment:
ELASTIC_SEARCH_DSN: es:9200
@ -40,7 +39,6 @@ services:
depends_on:
mongo:
condition: service_started
entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 --
command: npm run --silent test:acceptance
mongo:

View file

@ -6,9 +6,9 @@
"main": "app.js",
"scripts": {
"start": "node app.js",
"test:acceptance:_run": "mocha --loader=esmock --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js",
"test:acceptance:_run": "LOG_LEVEL=fatal mocha --loader=esmock --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js",
"test:acceptance": "npm run test:acceptance:_run -- --grep=$MOCHA_GREP",
"test:unit:_run": "mocha --loader=esmock --recursive --reporter spec $@ test/unit/js",
"test:unit:_run": "LOG_LEVEL=fatal mocha --loader=esmock --recursive --reporter spec $@ test/unit/js",
"test:unit": "npm run test:unit:_run -- --grep=$MOCHA_GREP",
"nodemon": "node --watch app.js",
"lint": "eslint --max-warnings 0 --format unix .",

View file

@ -50,14 +50,6 @@ app.param('doc_id', function (req, res, next, docId) {
app.get('/project/:project_id/doc-deleted', HttpController.getAllDeletedDocs)
app.get('/project/:project_id/doc', HttpController.getAllDocs)
app.get('/project/:project_id/ranges', HttpController.getAllRanges)
app.get(
'/project/:project_id/comment-thread-ids',
HttpController.getCommentThreadIds
)
app.get(
'/project/:project_id/tracked-changes-user-ids',
HttpController.getTrackedChangesUserIds
)
app.get('/project/:project_id/has-ranges', HttpController.projectHasRanges)
app.get('/project/:project_id/doc/:doc_id', HttpController.getDoc)
app.get('/project/:project_id/doc/:doc_id/deleted', HttpController.isDocDeleted)

View file

@ -1,4 +1,5 @@
const MongoManager = require('./MongoManager')
const { callbackify } = require('node:util')
const MongoManager = require('./MongoManager').promises
const Errors = require('./Errors')
const logger = require('@overleaf/logger')
const Settings = require('@overleaf/settings')
@ -7,12 +8,29 @@ const { ReadableString } = require('@overleaf/stream-utils')
const RangeManager = require('./RangeManager')
const PersistorManager = require('./PersistorManager')
const pMap = require('p-map')
const { streamToBuffer } = require('./StreamToBuffer')
const { streamToBuffer } = require('./StreamToBuffer').promises
const { BSON } = require('mongodb-legacy')
const PARALLEL_JOBS = Settings.parallelArchiveJobs
const UN_ARCHIVE_BATCH_SIZE = Settings.unArchiveBatchSize
module.exports = {
archiveAllDocs: callbackify(archiveAllDocs),
archiveDoc: callbackify(archiveDoc),
unArchiveAllDocs: callbackify(unArchiveAllDocs),
unarchiveDoc: callbackify(unarchiveDoc),
destroyProject: callbackify(destroyProject),
getDoc: callbackify(getDoc),
promises: {
archiveAllDocs,
archiveDoc,
unArchiveAllDocs,
unarchiveDoc,
destroyProject,
getDoc,
},
}
async function archiveAllDocs(projectId) {
if (!_isArchivingEnabled()) {
return
@ -44,8 +62,6 @@ async function archiveDoc(projectId, docId) {
throw new Error('doc has no lines')
}
RangeManager.fixCommentIds(doc)
// warn about any oversized docs already in mongo
const linesSize = BSON.calculateObjectSize(doc.lines || {})
const rangesSize = BSON.calculateObjectSize(doc.ranges || {})
@ -209,12 +225,3 @@ function _isArchivingEnabled() {
return true
}
module.exports = {
archiveAllDocs,
archiveDoc,
unArchiveAllDocs,
unarchiveDoc,
destroyProject,
getDoc,
}

View file

@ -5,6 +5,7 @@ const _ = require('lodash')
const DocArchive = require('./DocArchiveManager')
const RangeManager = require('./RangeManager')
const Settings = require('@overleaf/settings')
const { callbackifyAll } = require('@overleaf/promise-utils')
const { setTimeout } = require('node:timers/promises')
/**
@ -28,7 +29,7 @@ const DocManager = {
throw new Error('must include inS3 when getting doc')
}
const doc = await MongoManager.findDoc(projectId, docId, filter)
const doc = await MongoManager.promises.findDoc(projectId, docId, filter)
if (doc == null) {
throw new Errors.NotFoundError(
@ -37,19 +38,15 @@ const DocManager = {
}
if (doc.inS3) {
await DocArchive.unarchiveDoc(projectId, docId)
await DocArchive.promises.unarchiveDoc(projectId, docId)
return await DocManager._getDoc(projectId, docId, filter)
}
if (filter.ranges) {
RangeManager.fixCommentIds(doc)
}
return doc
},
async isDocDeleted(projectId, docId) {
const doc = await MongoManager.findDoc(projectId, docId, {
const doc = await MongoManager.promises.findDoc(projectId, docId, {
deleted: true,
})
@ -77,7 +74,7 @@ const DocManager = {
// returns the doc without any version information
async _peekRawDoc(projectId, docId) {
const doc = await MongoManager.findDoc(projectId, docId, {
const doc = await MongoManager.promises.findDoc(projectId, docId, {
lines: true,
rev: true,
deleted: true,
@ -94,7 +91,7 @@ const DocManager = {
if (doc.inS3) {
// skip the unarchiving to mongo when getting a doc
const archivedDoc = await DocArchive.getDoc(projectId, docId)
const archivedDoc = await DocArchive.promises.getDoc(projectId, docId)
Object.assign(doc, archivedDoc)
}
@ -105,7 +102,7 @@ const DocManager = {
// without unarchiving it (avoids unnecessary writes to mongo)
async peekDoc(projectId, docId) {
const doc = await DocManager._peekRawDoc(projectId, docId)
await MongoManager.checkRevUnchanged(doc)
await MongoManager.promises.checkRevUnchanged(doc)
return doc
},
@ -114,18 +111,16 @@ const DocManager = {
lines: true,
inS3: true,
})
if (!doc) throw new Errors.NotFoundError()
if (!Array.isArray(doc.lines)) throw new Errors.DocWithoutLinesError()
return doc.lines.join('\n')
return doc
},
async getAllDeletedDocs(projectId, filter) {
return await MongoManager.getProjectsDeletedDocs(projectId, filter)
return await MongoManager.promises.getProjectsDeletedDocs(projectId, filter)
},
async getAllNonDeletedDocs(projectId, filter) {
await DocArchive.unArchiveAllDocs(projectId)
const docs = await MongoManager.getProjectsDocs(
await DocArchive.promises.unArchiveAllDocs(projectId)
const docs = await MongoManager.promises.getProjectsDocs(
projectId,
{ include_deleted: false },
filter
@ -133,46 +128,15 @@ const DocManager = {
if (docs == null) {
throw new Errors.NotFoundError(`No docs for project ${projectId}`)
}
if (filter.ranges) {
for (const doc of docs) {
RangeManager.fixCommentIds(doc)
}
}
return docs
},
async getCommentThreadIds(projectId) {
const docs = await DocManager.getAllNonDeletedDocs(projectId, {
_id: true,
ranges: true,
})
const byDoc = new Map()
for (const doc of docs) {
const ids = new Set()
for (const comment of doc.ranges?.comments || []) {
ids.add(comment.op.t)
}
if (ids.size > 0) byDoc.set(doc._id.toString(), Array.from(ids))
}
return Object.fromEntries(byDoc.entries())
},
async getTrackedChangesUserIds(projectId) {
const docs = await DocManager.getAllNonDeletedDocs(projectId, {
ranges: true,
})
const userIds = new Set()
for (const doc of docs) {
for (const change of doc.ranges?.changes || []) {
if (change.metadata.user_id === 'anonymous-user') continue
userIds.add(change.metadata.user_id)
}
}
return Array.from(userIds)
},
async projectHasRanges(projectId) {
const docs = await MongoManager.getProjectsDocs(projectId, {}, { _id: 1 })
const docs = await MongoManager.promises.getProjectsDocs(
projectId,
{},
{ _id: 1 }
)
const docIds = docs.map(doc => doc._id)
for (const docId of docIds) {
const doc = await DocManager.peekDoc(projectId, docId)
@ -283,7 +247,7 @@ const DocManager = {
}
modified = true
await MongoManager.upsertIntoDocCollection(
await MongoManager.promises.upsertIntoDocCollection(
projectId,
docId,
doc?.rev,
@ -298,7 +262,11 @@ const DocManager = {
async patchDoc(projectId, docId, meta) {
const projection = { _id: 1, deleted: true }
const doc = await MongoManager.findDoc(projectId, docId, projection)
const doc = await MongoManager.promises.findDoc(
projectId,
docId,
projection
)
if (!doc) {
throw new Errors.NotFoundError(
`No such project/doc to delete: ${projectId}/${docId}`
@ -307,7 +275,7 @@ const DocManager = {
if (meta.deleted && Settings.docstore.archiveOnSoftDelete) {
// The user will not read this doc anytime soon. Flush it out of mongo.
DocArchive.archiveDoc(projectId, docId).catch(err => {
DocArchive.promises.archiveDoc(projectId, docId).catch(err => {
logger.warn(
{ projectId, docId, err },
'archiving a single doc in the background failed'
@ -315,8 +283,15 @@ const DocManager = {
})
}
await MongoManager.patchDoc(projectId, docId, meta)
await MongoManager.promises.patchDoc(projectId, docId, meta)
},
}
module.exports = DocManager
module.exports = {
...callbackifyAll(DocManager, {
multiResult: {
updateDoc: ['modified', 'rev'],
},
}),
promises: DocManager,
}

View file

@ -10,13 +10,10 @@ class DocRevValueError extends OError {}
class DocVersionDecrementedError extends OError {}
class DocWithoutLinesError extends OError {}
module.exports = {
Md5MismatchError,
DocModifiedError,
DocRevValueError,
DocVersionDecrementedError,
DocWithoutLinesError,
...Errors,
}

View file

@ -1,12 +1,22 @@
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
const { db, ObjectId } = require('./mongodb')
const request = require('request')
const async = require('async')
const _ = require('lodash')
const crypto = require('node:crypto')
const settings = require('@overleaf/settings')
const { port } = settings.internal.docstore
const logger = require('@overleaf/logger')
const { fetchNothing, fetchJson } = require('@overleaf/fetch-utils')
async function check() {
module.exports = {
check(callback) {
const docId = new ObjectId()
const projectId = new ObjectId(settings.docstore.healthCheck.project_id)
const url = `http://127.0.0.1:${port}/project/${projectId}/doc/${docId}`
@ -14,22 +24,44 @@ async function check() {
'smoke test - delete me',
`${crypto.randomBytes(32).toString('hex')}`,
]
logger.debug({ lines, url, docId, projectId }, 'running health check')
let body
try {
await fetchNothing(url, {
method: 'POST',
json: { lines, version: 42, ranges: {} },
signal: AbortSignal.timeout(3_000),
const getOpts = () => ({
url,
timeout: 3000,
})
body = await fetchJson(url, { signal: AbortSignal.timeout(3_000) })
} finally {
await db.docs.deleteOne({ _id: docId, project_id: projectId })
logger.debug({ lines, url, docId, projectId }, 'running health check')
const jobs = [
function (cb) {
const opts = getOpts()
opts.json = { lines, version: 42, ranges: {} }
return request.post(opts, cb)
},
function (cb) {
const opts = getOpts()
opts.json = true
return request.get(opts, function (err, res, body) {
if (err != null) {
logger.err({ err }, 'docstore returned a error in health check get')
return cb(err)
} else if (res == null) {
return cb(new Error('no response from docstore with get check'))
} else if ((res != null ? res.statusCode : undefined) !== 200) {
return cb(new Error(`status code not 200, its ${res.statusCode}`))
} else if (
_.isEqual(body != null ? body.lines : undefined, lines) &&
(body != null ? body._id : undefined) === docId.toString()
) {
return cb()
} else {
return cb(
new Error(
`health check lines not equal ${body.lines} != ${lines}`
)
)
}
if (!_.isEqual(body?.lines, lines)) {
throw new Error(`health check lines not equal ${body.lines} != ${lines}`)
}
}
module.exports = {
check,
})
},
cb => db.docs.deleteOne({ _id: docId, project_id: projectId }, cb),
]
return async.series(jobs, callback)
},
}

View file

@ -4,50 +4,81 @@ const DocArchive = require('./DocArchiveManager')
const HealthChecker = require('./HealthChecker')
const Errors = require('./Errors')
const Settings = require('@overleaf/settings')
const { expressify } = require('@overleaf/promise-utils')
async function getDoc(req, res) {
function getDoc(req, res, next) {
const { doc_id: docId, project_id: projectId } = req.params
const includeDeleted = req.query.include_deleted === 'true'
logger.debug({ projectId, docId }, 'getting doc')
const doc = await DocManager.getFullDoc(projectId, docId)
DocManager.getFullDoc(projectId, docId, function (error, doc) {
if (error) {
return next(error)
}
logger.debug({ docId, projectId }, 'got doc')
if (doc.deleted && !includeDeleted) {
if (doc == null) {
res.sendStatus(404)
} else if (doc.deleted && !includeDeleted) {
res.sendStatus(404)
} else {
res.json(_buildDocView(doc))
}
})
}
async function peekDoc(req, res) {
function peekDoc(req, res, next) {
const { doc_id: docId, project_id: projectId } = req.params
logger.debug({ projectId, docId }, 'peeking doc')
const doc = await DocManager.peekDoc(projectId, docId)
DocManager.peekDoc(projectId, docId, function (error, doc) {
if (error) {
return next(error)
}
if (doc == null) {
res.sendStatus(404)
} else {
res.setHeader('x-doc-status', doc.inS3 ? 'archived' : 'active')
res.json(_buildDocView(doc))
}
async function isDocDeleted(req, res) {
const { doc_id: docId, project_id: projectId } = req.params
const deleted = await DocManager.isDocDeleted(projectId, docId)
res.json({ deleted })
})
}
async function getRawDoc(req, res) {
function isDocDeleted(req, res, next) {
const { doc_id: docId, project_id: projectId } = req.params
DocManager.isDocDeleted(projectId, docId, function (error, deleted) {
if (error) {
return next(error)
}
res.json({ deleted })
})
}
function getRawDoc(req, res, next) {
const { doc_id: docId, project_id: projectId } = req.params
logger.debug({ projectId, docId }, 'getting raw doc')
const content = await DocManager.getDocLines(projectId, docId)
DocManager.getDocLines(projectId, docId, function (error, doc) {
if (error) {
return next(error)
}
if (doc == null) {
res.sendStatus(404)
} else {
res.setHeader('content-type', 'text/plain')
res.send(content)
res.send(_buildRawDocView(doc))
}
})
}
async function getAllDocs(req, res) {
function getAllDocs(req, res, next) {
const { project_id: projectId } = req.params
logger.debug({ projectId }, 'getting all docs')
const docs = await DocManager.getAllNonDeletedDocs(projectId, {
lines: true,
rev: true,
})
DocManager.getAllNonDeletedDocs(
projectId,
{ lines: true, rev: true },
function (error, docs) {
if (docs == null) {
docs = []
}
if (error) {
return next(error)
}
const docViews = _buildDocsArrayView(projectId, docs)
for (const docView of docViews) {
if (!docView.lines) {
@ -57,14 +88,19 @@ async function getAllDocs(req, res) {
}
res.json(docViews)
}
)
}
async function getAllDeletedDocs(req, res) {
function getAllDeletedDocs(req, res, next) {
const { project_id: projectId } = req.params
logger.debug({ projectId }, 'getting all deleted docs')
const docs = await DocManager.getAllDeletedDocs(projectId, {
name: true,
deletedAt: true,
})
DocManager.getAllDeletedDocs(
projectId,
{ name: true, deletedAt: true },
function (error, docs) {
if (error) {
return next(error)
}
res.json(
docs.map(doc => ({
_id: doc._id.toString(),
@ -73,35 +109,38 @@ async function getAllDeletedDocs(req, res) {
}))
)
}
)
}
async function getAllRanges(req, res) {
function getAllRanges(req, res, next) {
const { project_id: projectId } = req.params
logger.debug({ projectId }, 'getting all ranges')
const docs = await DocManager.getAllNonDeletedDocs(projectId, {
ranges: true,
})
DocManager.getAllNonDeletedDocs(
projectId,
{ ranges: true },
function (error, docs) {
if (docs == null) {
docs = []
}
if (error) {
return next(error)
}
res.json(_buildDocsArrayView(projectId, docs))
}
async function getCommentThreadIds(req, res) {
const { project_id: projectId } = req.params
const threadIds = await DocManager.getCommentThreadIds(projectId)
res.json(threadIds)
)
}
async function getTrackedChangesUserIds(req, res) {
function projectHasRanges(req, res, next) {
const { project_id: projectId } = req.params
const userIds = await DocManager.getTrackedChangesUserIds(projectId)
res.json(userIds)
DocManager.projectHasRanges(projectId, (err, projectHasRanges) => {
if (err) {
return next(err)
}
async function projectHasRanges(req, res) {
const { project_id: projectId } = req.params
const projectHasRanges = await DocManager.projectHasRanges(projectId)
res.json({ projectHasRanges })
})
}
async function updateDoc(req, res) {
function updateDoc(req, res, next) {
const { doc_id: docId, project_id: projectId } = req.params
const lines = req.body?.lines
const version = req.body?.version
@ -133,20 +172,25 @@ async function updateDoc(req, res) {
}
logger.debug({ projectId, docId }, 'got http request to update doc')
const { modified, rev } = await DocManager.updateDoc(
DocManager.updateDoc(
projectId,
docId,
lines,
version,
ranges
)
ranges,
function (error, modified, rev) {
if (error) {
return next(error)
}
res.json({
modified,
rev,
})
}
)
}
async function patchDoc(req, res) {
function patchDoc(req, res, next) {
const { doc_id: docId, project_id: projectId } = req.params
logger.debug({ projectId, docId }, 'patching doc')
@ -159,8 +203,12 @@ async function patchDoc(req, res) {
logger.fatal({ field }, 'joi validation for pathDoc is broken')
}
})
await DocManager.patchDoc(projectId, docId, meta)
DocManager.patchDoc(projectId, docId, meta, function (error) {
if (error) {
return next(error)
}
res.sendStatus(204)
})
}
function _buildDocView(doc) {
@ -173,6 +221,10 @@ function _buildDocView(doc) {
return docView
}
function _buildRawDocView(doc) {
return (doc?.lines ?? []).join('\n')
}
function _buildDocsArrayView(projectId, docs) {
const docViews = []
for (const doc of docs) {
@ -189,69 +241,79 @@ function _buildDocsArrayView(projectId, docs) {
return docViews
}
async function archiveAllDocs(req, res) {
function archiveAllDocs(req, res, next) {
const { project_id: projectId } = req.params
logger.debug({ projectId }, 'archiving all docs')
await DocArchive.archiveAllDocs(projectId)
DocArchive.archiveAllDocs(projectId, function (error) {
if (error) {
return next(error)
}
res.sendStatus(204)
})
}
async function archiveDoc(req, res) {
function archiveDoc(req, res, next) {
const { doc_id: docId, project_id: projectId } = req.params
logger.debug({ projectId, docId }, 'archiving a doc')
await DocArchive.archiveDoc(projectId, docId)
DocArchive.archiveDoc(projectId, docId, function (error) {
if (error) {
return next(error)
}
res.sendStatus(204)
})
}
async function unArchiveAllDocs(req, res) {
function unArchiveAllDocs(req, res, next) {
const { project_id: projectId } = req.params
logger.debug({ projectId }, 'unarchiving all docs')
try {
await DocArchive.unArchiveAllDocs(projectId)
} catch (err) {
DocArchive.unArchiveAllDocs(projectId, function (err) {
if (err) {
if (err instanceof Errors.DocRevValueError) {
logger.warn({ err }, 'Failed to unarchive doc')
return res.sendStatus(409)
}
throw err
return next(err)
}
res.sendStatus(200)
})
}
async function destroyProject(req, res) {
function destroyProject(req, res, next) {
const { project_id: projectId } = req.params
logger.debug({ projectId }, 'destroying all docs')
await DocArchive.destroyProject(projectId)
DocArchive.destroyProject(projectId, function (error) {
if (error) {
return next(error)
}
res.sendStatus(204)
})
}
async function healthCheck(req, res) {
try {
await HealthChecker.check()
} catch (err) {
function healthCheck(req, res) {
HealthChecker.check(function (err) {
if (err) {
logger.err({ err }, 'error performing health check')
res.sendStatus(500)
return
}
} else {
res.sendStatus(200)
}
})
}
module.exports = {
getDoc: expressify(getDoc),
peekDoc: expressify(peekDoc),
isDocDeleted: expressify(isDocDeleted),
getRawDoc: expressify(getRawDoc),
getAllDocs: expressify(getAllDocs),
getAllDeletedDocs: expressify(getAllDeletedDocs),
getAllRanges: expressify(getAllRanges),
getTrackedChangesUserIds: expressify(getTrackedChangesUserIds),
getCommentThreadIds: expressify(getCommentThreadIds),
projectHasRanges: expressify(projectHasRanges),
updateDoc: expressify(updateDoc),
patchDoc: expressify(patchDoc),
archiveAllDocs: expressify(archiveAllDocs),
archiveDoc: expressify(archiveDoc),
unArchiveAllDocs: expressify(unArchiveAllDocs),
destroyProject: expressify(destroyProject),
healthCheck: expressify(healthCheck),
getDoc,
peekDoc,
isDocDeleted,
getRawDoc,
getAllDocs,
getAllDeletedDocs,
getAllRanges,
projectHasRanges,
updateDoc,
patchDoc,
archiveAllDocs,
archiveDoc,
unArchiveAllDocs,
destroyProject,
healthCheck,
}

View file

@ -1,6 +1,7 @@
const { db, ObjectId } = require('./mongodb')
const Settings = require('@overleaf/settings')
const Errors = require('./Errors')
const { callbackify } = require('node:util')
const ARCHIVING_LOCK_DURATION_MS = Settings.archivingLockDurationMs
@ -240,6 +241,22 @@ async function destroyProject(projectId) {
}
module.exports = {
findDoc: callbackify(findDoc),
getProjectsDeletedDocs: callbackify(getProjectsDeletedDocs),
getProjectsDocs: callbackify(getProjectsDocs),
getArchivedProjectDocs: callbackify(getArchivedProjectDocs),
getNonArchivedProjectDocIds: callbackify(getNonArchivedProjectDocIds),
getNonDeletedArchivedProjectDocs: callbackify(
getNonDeletedArchivedProjectDocs
),
upsertIntoDocCollection: callbackify(upsertIntoDocCollection),
restoreArchivedDoc: callbackify(restoreArchivedDoc),
patchDoc: callbackify(patchDoc),
getDocForArchiving: callbackify(getDocForArchiving),
markDocAsArchived: callbackify(markDocAsArchived),
checkRevUnchanged: callbackify(checkRevUnchanged),
destroyProject: callbackify(destroyProject),
promises: {
findDoc,
getProjectsDeletedDocs,
getProjectsDocs,
@ -253,4 +270,5 @@ module.exports = {
markDocAsArchived,
checkRevUnchanged,
destroyProject,
},
}

View file

@ -49,25 +49,15 @@ module.exports = RangeManager = {
updateMetadata(change.metadata)
}
for (const comment of Array.from(ranges.comments || [])) {
// Two bugs resulted in mismatched ids, prefer the thread id from the op: https://github.com/overleaf/internal/issues/23272
comment.id = RangeManager._safeObjectId(comment.op?.t || comment.id)
if (comment.op) comment.op.t = comment.id
// resolved property is added to comments when they are obtained from history, but this state doesn't belong in mongo docs collection
// more info: https://github.com/overleaf/internal/issues/24371#issuecomment-2913095174
delete comment.op?.resolved
comment.id = RangeManager._safeObjectId(comment.id)
if ((comment.op != null ? comment.op.t : undefined) != null) {
comment.op.t = RangeManager._safeObjectId(comment.op.t)
}
updateMetadata(comment.metadata)
}
return ranges
},
fixCommentIds(doc) {
for (const comment of doc?.ranges?.comments || []) {
// Two bugs resulted in mismatched ids, prefer the thread id from the op: https://github.com/overleaf/internal/issues/23272
if (comment.op?.t) comment.id = comment.op.t
}
},
_safeObjectId(data) {
try {
return new ObjectId(data)

View file

@ -2,9 +2,13 @@ const { LoggerStream, WritableBuffer } = require('@overleaf/stream-utils')
const Settings = require('@overleaf/settings')
const logger = require('@overleaf/logger/logging-manager')
const { pipeline } = require('node:stream/promises')
const { callbackify } = require('node:util')
module.exports = {
streamToBuffer: callbackify(streamToBuffer),
promises: {
streamToBuffer,
},
}
async function streamToBuffer(projectId, docId, stream) {

View file

@ -27,15 +27,12 @@ services:
MOCHA_GREP: ${MOCHA_GREP}
NODE_ENV: test
NODE_OPTIONS: "--unhandled-rejections=strict"
volumes:
- ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it
depends_on:
mongo:
condition: service_started
gcs:
condition: service_healthy
user: node
entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 --
command: npm run test:acceptance

View file

@ -26,7 +26,6 @@ services:
- .:/overleaf/services/docstore
- ../../node_modules:/overleaf/node_modules
- ../../libraries:/overleaf/libraries
- ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it
working_dir: /overleaf/services/docstore
environment:
ELASTIC_SEARCH_DSN: es:9200
@ -45,7 +44,6 @@ services:
condition: service_started
gcs:
condition: service_healthy
entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 --
command: npm run --silent test:acceptance
mongo:

View file

@ -17,7 +17,6 @@
"types:check": "tsc --noEmit"
},
"dependencies": {
"@overleaf/fetch-utils": "*",
"@overleaf/logger": "*",
"@overleaf/metrics": "*",
"@overleaf/o-error": "*",

View file

@ -1001,15 +1001,6 @@ describe('Archiving', function () {
},
version: 2,
}
this.fixedRanges = {
...this.doc.ranges,
comments: [
{
...this.doc.ranges.comments[0],
id: this.doc.ranges.comments[0].op.t,
},
],
}
return DocstoreClient.createDoc(
this.project_id,
this.doc._id,
@ -1057,7 +1048,7 @@ describe('Archiving', function () {
throw error
}
s3Doc.lines.should.deep.equal(this.doc.lines)
const ranges = JSON.parse(JSON.stringify(this.fixedRanges)) // ObjectId -> String
const ranges = JSON.parse(JSON.stringify(this.doc.ranges)) // ObjectId -> String
s3Doc.ranges.should.deep.equal(ranges)
return done()
}
@ -1084,7 +1075,7 @@ describe('Archiving', function () {
throw error
}
doc.lines.should.deep.equal(this.doc.lines)
doc.ranges.should.deep.equal(this.fixedRanges)
doc.ranges.should.deep.equal(this.doc.ranges)
expect(doc.inS3).not.to.exist
return done()
})

View file

@ -20,73 +20,30 @@ const DocstoreClient = require('./helpers/DocstoreClient')
describe('Getting all docs', function () {
beforeEach(function (done) {
this.project_id = new ObjectId()
this.threadId1 = new ObjectId().toString()
this.threadId2 = new ObjectId().toString()
this.docs = [
{
_id: new ObjectId(),
lines: ['one', 'two', 'three'],
ranges: {
comments: [
{ id: new ObjectId().toString(), op: { t: this.threadId1 } },
],
changes: [
{
id: new ObjectId().toString(),
metadata: { user_id: 'user-id-1' },
},
],
},
ranges: { mock: 'one' },
rev: 2,
},
{
_id: new ObjectId(),
lines: ['aaa', 'bbb', 'ccc'],
ranges: {
changes: [
{
id: new ObjectId().toString(),
metadata: { user_id: 'user-id-2' },
},
],
},
ranges: { mock: 'two' },
rev: 4,
},
{
_id: new ObjectId(),
lines: ['111', '222', '333'],
ranges: {
comments: [
{ id: new ObjectId().toString(), op: { t: this.threadId2 } },
],
changes: [
{
id: new ObjectId().toString(),
metadata: { user_id: 'anonymous-user' },
},
],
},
ranges: { mock: 'three' },
rev: 6,
},
]
this.fixedRanges = this.docs.map(doc => {
if (!doc.ranges?.comments?.length) return doc.ranges
return {
...doc.ranges,
comments: [
{ ...doc.ranges.comments[0], id: doc.ranges.comments[0].op.t },
],
}
})
this.deleted_doc = {
_id: new ObjectId(),
lines: ['deleted'],
ranges: {
comments: [{ id: new ObjectId().toString(), op: { t: 'thread-id-3' } }],
changes: [
{ id: new ObjectId().toString(), metadata: { user_id: 'user-id-3' } },
],
},
ranges: { mock: 'four' },
rev: 8,
}
const version = 42
@ -139,7 +96,7 @@ describe('Getting all docs', function () {
})
})
it('getAllRanges should return all the (non-deleted) doc ranges', function (done) {
return it('getAllRanges should return all the (non-deleted) doc ranges', function (done) {
return DocstoreClient.getAllRanges(this.project_id, (error, res, docs) => {
if (error != null) {
throw error
@ -147,38 +104,9 @@ describe('Getting all docs', function () {
docs.length.should.equal(this.docs.length)
for (let i = 0; i < docs.length; i++) {
const doc = docs[i]
doc.ranges.should.deep.equal(this.fixedRanges[i])
doc.ranges.should.deep.equal(this.docs[i].ranges)
}
return done()
})
})
it('getTrackedChangesUserIds should return all the user ids from (non-deleted) ranges', function (done) {
DocstoreClient.getTrackedChangesUserIds(
this.project_id,
(error, res, userIds) => {
if (error != null) {
throw error
}
userIds.should.deep.equal(['user-id-1', 'user-id-2'])
done()
}
)
})
it('getCommentThreadIds should return all the thread ids from (non-deleted) ranges', function (done) {
DocstoreClient.getCommentThreadIds(
this.project_id,
(error, res, threadIds) => {
if (error != null) {
throw error
}
threadIds.should.deep.equal({
[this.docs[0]._id.toString()]: [this.threadId1],
[this.docs[2]._id.toString()]: [this.threadId2],
})
done()
}
)
})
})

View file

@ -28,26 +28,10 @@ describe('Getting a doc', function () {
op: { i: 'foo', p: 3 },
meta: {
user_id: new ObjectId().toString(),
ts: new Date().toJSON(),
ts: new Date().toString(),
},
},
],
comments: [
{
id: new ObjectId().toString(),
op: { c: 'comment', p: 1, t: new ObjectId().toString() },
metadata: {
user_id: new ObjectId().toString(),
ts: new Date().toJSON(),
},
},
],
}
this.fixedRanges = {
...this.ranges,
comments: [
{ ...this.ranges.comments[0], id: this.ranges.comments[0].op.t },
],
}
return DocstoreApp.ensureRunning(() => {
return DocstoreClient.createDoc(
@ -76,7 +60,7 @@ describe('Getting a doc', function () {
if (error) return done(error)
doc.lines.should.deep.equal(this.lines)
doc.version.should.equal(this.version)
doc.ranges.should.deep.equal(this.fixedRanges)
doc.ranges.should.deep.equal(this.ranges)
return done()
}
)
@ -130,7 +114,7 @@ describe('Getting a doc', function () {
if (error) return done(error)
doc.lines.should.deep.equal(this.lines)
doc.version.should.equal(this.version)
doc.ranges.should.deep.equal(this.fixedRanges)
doc.ranges.should.deep.equal(this.ranges)
doc.deleted.should.equal(true)
return done()
}

View file

@ -1,28 +0,0 @@
const { db } = require('../../../app/js/mongodb')
const DocstoreApp = require('./helpers/DocstoreApp')
const DocstoreClient = require('./helpers/DocstoreClient')
const { expect } = require('chai')
describe('HealthChecker', function () {
beforeEach('start', function (done) {
DocstoreApp.ensureRunning(done)
})
beforeEach('clear docs collection', async function () {
await db.docs.deleteMany({})
})
let res
beforeEach('run health check', function (done) {
DocstoreClient.healthCheck((err, _res) => {
res = _res
done(err)
})
})
it('should return 200', function () {
res.statusCode.should.equal(200)
})
it('should not leave any cruft behind', async function () {
expect(await db.docs.find({}).toArray()).to.deep.equal([])
})
})

View file

@ -100,26 +100,6 @@ module.exports = DocstoreClient = {
)
},
getCommentThreadIds(projectId, callback) {
request.get(
{
url: `http://127.0.0.1:${settings.internal.docstore.port}/project/${projectId}/comment-thread-ids`,
json: true,
},
callback
)
},
getTrackedChangesUserIds(projectId, callback) {
request.get(
{
url: `http://127.0.0.1:${settings.internal.docstore.port}/project/${projectId}/tracked-changes-user-ids`,
json: true,
},
callback
)
},
updateDoc(projectId, docId, lines, version, ranges, callback) {
return request.post(
{
@ -201,13 +181,6 @@ module.exports = DocstoreClient = {
)
},
healthCheck(callback) {
request.get(
`http://127.0.0.1:${settings.internal.docstore.port}/health_check`,
callback
)
},
getS3Doc(projectId, docId, callback) {
getStringFromPersistor(
Persistor,

View file

@ -4,7 +4,7 @@ const modulePath = '../../../app/js/DocArchiveManager.js'
const SandboxedModule = require('sandboxed-module')
const { ObjectId } = require('mongodb-legacy')
const Errors = require('../../../app/js/Errors')
const StreamToBuffer = require('../../../app/js/StreamToBuffer')
const StreamToBuffer = require('../../../app/js/StreamToBuffer').promises
describe('DocArchiveManager', function () {
let DocArchiveManager,
@ -31,7 +31,6 @@ describe('DocArchiveManager', function () {
RangeManager = {
jsonRangesToMongo: sinon.stub().returns({ mongo: 'ranges' }),
fixCommentIds: sinon.stub(),
}
Settings = {
docstore: {
@ -143,6 +142,7 @@ describe('DocArchiveManager', function () {
}
MongoManager = {
promises: {
markDocAsArchived: sinon.stub().resolves(),
restoreArchivedDoc: sinon.stub().resolves(),
upsertIntoDocCollection: sinon.stub().resolves(),
@ -153,11 +153,13 @@ describe('DocArchiveManager', function () {
findDoc: sinon.stub().callsFake(fakeGetDoc),
getDocForArchiving: sinon.stub().callsFake(fakeGetDoc),
destroyProject: sinon.stub().resolves(),
},
}
// Wrap streamToBuffer so that we can pass in something that it expects (in
// this case, a Promise) rather than a stubbed stream object
streamToBuffer = {
promises: {
streamToBuffer: async () => {
const inputStream = new Promise(resolve => {
stream.on('data', data => resolve(data))
@ -171,6 +173,7 @@ describe('DocArchiveManager', function () {
return value
},
},
}
DocArchiveManager = SandboxedModule.require(modulePath, {
@ -189,13 +192,9 @@ describe('DocArchiveManager', function () {
describe('archiveDoc', function () {
it('should resolve when passed a valid document', async function () {
await expect(DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id)).to
.eventually.be.fulfilled
})
it('should fix comment ids', async function () {
await DocArchiveManager.archiveDoc(projectId, mongoDocs[1]._id)
expect(RangeManager.fixCommentIds).to.have.been.called
await expect(
DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
).to.eventually.be.fulfilled
})
it('should throw an error if the doc has no lines', async function () {
@ -203,26 +202,26 @@ describe('DocArchiveManager', function () {
doc.lines = null
await expect(
DocArchiveManager.archiveDoc(projectId, doc._id)
DocArchiveManager.promises.archiveDoc(projectId, doc._id)
).to.eventually.be.rejectedWith('doc has no lines')
})
it('should add the schema version', async function () {
await DocArchiveManager.archiveDoc(projectId, mongoDocs[1]._id)
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[1]._id)
expect(StreamUtils.ReadableString).to.have.been.calledWith(
sinon.match(/"schema_v":1/)
)
})
it('should calculate the hex md5 sum of the content', async function () {
await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id)
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
expect(Crypto.createHash).to.have.been.calledWith('md5')
expect(HashUpdate).to.have.been.calledWith(archivedDocJson)
expect(HashDigest).to.have.been.calledWith('hex')
})
it('should pass the md5 hash to the object persistor for verification', async function () {
await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id)
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
expect(PersistorManager.sendStream).to.have.been.calledWith(
sinon.match.any,
@ -233,7 +232,7 @@ describe('DocArchiveManager', function () {
})
it('should pass the correct bucket and key to the persistor', async function () {
await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id)
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
expect(PersistorManager.sendStream).to.have.been.calledWith(
Settings.docstore.bucket,
@ -242,7 +241,7 @@ describe('DocArchiveManager', function () {
})
it('should create a stream from the encoded json and send it', async function () {
await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id)
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
expect(StreamUtils.ReadableString).to.have.been.calledWith(
archivedDocJson
)
@ -254,8 +253,8 @@ describe('DocArchiveManager', function () {
})
it('should mark the doc as archived', async function () {
await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id)
expect(MongoManager.markDocAsArchived).to.have.been.calledWith(
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
expect(MongoManager.promises.markDocAsArchived).to.have.been.calledWith(
projectId,
mongoDocs[0]._id,
mongoDocs[0].rev
@ -268,8 +267,8 @@ describe('DocArchiveManager', function () {
})
it('should bail out early', async function () {
await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id)
expect(MongoManager.getDocForArchiving).to.not.have.been.called
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
expect(MongoManager.promises.getDocForArchiving).to.not.have.been.called
})
})
@ -286,7 +285,7 @@ describe('DocArchiveManager', function () {
it('should return an error', async function () {
await expect(
DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id)
DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
).to.eventually.be.rejectedWith('null bytes detected')
})
})
@ -297,19 +296,21 @@ describe('DocArchiveManager', function () {
describe('when the doc is in S3', function () {
beforeEach(function () {
MongoManager.findDoc = sinon.stub().resolves({ inS3: true, rev })
MongoManager.promises.findDoc = sinon
.stub()
.resolves({ inS3: true, rev })
docId = mongoDocs[0]._id
lines = ['doc', 'lines']
rev = 123
})
it('should resolve when passed a valid document', async function () {
await expect(DocArchiveManager.unarchiveDoc(projectId, docId)).to
.eventually.be.fulfilled
await expect(DocArchiveManager.promises.unarchiveDoc(projectId, docId))
.to.eventually.be.fulfilled
})
it('should test md5 validity with the raw buffer', async function () {
await DocArchiveManager.unarchiveDoc(projectId, docId)
await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
expect(HashUpdate).to.have.been.calledWith(
sinon.match.instanceOf(Buffer)
)
@ -318,17 +319,15 @@ describe('DocArchiveManager', function () {
it('should throw an error if the md5 does not match', async function () {
PersistorManager.getObjectMd5Hash.resolves('badf00d')
await expect(
DocArchiveManager.unarchiveDoc(projectId, docId)
DocArchiveManager.promises.unarchiveDoc(projectId, docId)
).to.eventually.be.rejected.and.be.instanceof(Errors.Md5MismatchError)
})
it('should restore the doc in Mongo', async function () {
await DocArchiveManager.unarchiveDoc(projectId, docId)
expect(MongoManager.restoreArchivedDoc).to.have.been.calledWith(
projectId,
docId,
archivedDoc
)
await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
expect(
MongoManager.promises.restoreArchivedDoc
).to.have.been.calledWith(projectId, docId, archivedDoc)
})
describe('when archiving is not configured', function () {
@ -338,15 +337,15 @@ describe('DocArchiveManager', function () {
it('should error out on archived doc', async function () {
await expect(
DocArchiveManager.unarchiveDoc(projectId, docId)
DocArchiveManager.promises.unarchiveDoc(projectId, docId)
).to.eventually.be.rejected.and.match(
/found archived doc, but archiving backend is not configured/
)
})
it('should return early on non-archived doc', async function () {
MongoManager.findDoc = sinon.stub().resolves({ rev })
await DocArchiveManager.unarchiveDoc(projectId, docId)
MongoManager.promises.findDoc = sinon.stub().resolves({ rev })
await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
expect(PersistorManager.getObjectMd5Hash).to.not.have.been.called
})
})
@ -364,12 +363,10 @@ describe('DocArchiveManager', function () {
})
it('should return the docs lines', async function () {
await DocArchiveManager.unarchiveDoc(projectId, docId)
expect(MongoManager.restoreArchivedDoc).to.have.been.calledWith(
projectId,
docId,
{ lines, rev }
)
await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
expect(
MongoManager.promises.restoreArchivedDoc
).to.have.been.calledWith(projectId, docId, { lines, rev })
})
})
@ -388,16 +385,14 @@ describe('DocArchiveManager', function () {
})
it('should return the doc lines and ranges', async function () {
await DocArchiveManager.unarchiveDoc(projectId, docId)
expect(MongoManager.restoreArchivedDoc).to.have.been.calledWith(
projectId,
docId,
{
await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
expect(
MongoManager.promises.restoreArchivedDoc
).to.have.been.calledWith(projectId, docId, {
lines,
ranges: { mongo: 'ranges' },
rev: 456,
}
)
})
})
})
@ -411,12 +406,10 @@ describe('DocArchiveManager', function () {
})
it('should return only the doc lines', async function () {
await DocArchiveManager.unarchiveDoc(projectId, docId)
expect(MongoManager.restoreArchivedDoc).to.have.been.calledWith(
projectId,
docId,
{ lines, rev: 456 }
)
await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
expect(
MongoManager.promises.restoreArchivedDoc
).to.have.been.calledWith(projectId, docId, { lines, rev: 456 })
})
})
@ -430,12 +423,10 @@ describe('DocArchiveManager', function () {
})
it('should use the rev obtained from Mongo', async function () {
await DocArchiveManager.unarchiveDoc(projectId, docId)
expect(MongoManager.restoreArchivedDoc).to.have.been.calledWith(
projectId,
docId,
{ lines, rev }
)
await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
expect(
MongoManager.promises.restoreArchivedDoc
).to.have.been.calledWith(projectId, docId, { lines, rev })
})
})
@ -450,7 +441,7 @@ describe('DocArchiveManager', function () {
it('should throw an error', async function () {
await expect(
DocArchiveManager.unarchiveDoc(projectId, docId)
DocArchiveManager.promises.unarchiveDoc(projectId, docId)
).to.eventually.be.rejectedWith(
"I don't understand the doc format in s3"
)
@ -460,8 +451,8 @@ describe('DocArchiveManager', function () {
})
it('should not do anything if the file is already unarchived', async function () {
MongoManager.findDoc.resolves({ inS3: false })
await DocArchiveManager.unarchiveDoc(projectId, docId)
MongoManager.promises.findDoc.resolves({ inS3: false })
await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
expect(PersistorManager.getObjectStream).not.to.have.been.called
})
@ -470,7 +461,7 @@ describe('DocArchiveManager', function () {
.stub()
.rejects(new Errors.NotFoundError())
await expect(
DocArchiveManager.unarchiveDoc(projectId, docId)
DocArchiveManager.promises.unarchiveDoc(projectId, docId)
).to.eventually.be.rejected.and.be.instanceof(Errors.NotFoundError)
})
})
@ -478,11 +469,13 @@ describe('DocArchiveManager', function () {
describe('destroyProject', function () {
describe('when archiving is enabled', function () {
beforeEach(async function () {
await DocArchiveManager.destroyProject(projectId)
await DocArchiveManager.promises.destroyProject(projectId)
})
it('should delete the project in Mongo', function () {
expect(MongoManager.destroyProject).to.have.been.calledWith(projectId)
expect(MongoManager.promises.destroyProject).to.have.been.calledWith(
projectId
)
})
it('should delete the project in the persistor', function () {
@ -496,11 +489,13 @@ describe('DocArchiveManager', function () {
describe('when archiving is disabled', function () {
beforeEach(async function () {
Settings.docstore.backend = ''
await DocArchiveManager.destroyProject(projectId)
await DocArchiveManager.promises.destroyProject(projectId)
})
it('should delete the project in Mongo', function () {
expect(MongoManager.destroyProject).to.have.been.calledWith(projectId)
expect(MongoManager.promises.destroyProject).to.have.been.calledWith(
projectId
)
})
it('should not delete the project in the persistor', function () {
@ -511,35 +506,33 @@ describe('DocArchiveManager', function () {
describe('archiveAllDocs', function () {
it('should resolve with valid arguments', async function () {
await expect(DocArchiveManager.archiveAllDocs(projectId)).to.eventually.be
.fulfilled
await expect(DocArchiveManager.promises.archiveAllDocs(projectId)).to
.eventually.be.fulfilled
})
it('should archive all project docs which are not in s3', async function () {
await DocArchiveManager.archiveAllDocs(projectId)
await DocArchiveManager.promises.archiveAllDocs(projectId)
// not inS3
expect(MongoManager.markDocAsArchived).to.have.been.calledWith(
expect(MongoManager.promises.markDocAsArchived).to.have.been.calledWith(
projectId,
mongoDocs[0]._id
)
expect(MongoManager.markDocAsArchived).to.have.been.calledWith(
expect(MongoManager.promises.markDocAsArchived).to.have.been.calledWith(
projectId,
mongoDocs[1]._id
)
expect(MongoManager.markDocAsArchived).to.have.been.calledWith(
expect(MongoManager.promises.markDocAsArchived).to.have.been.calledWith(
projectId,
mongoDocs[4]._id
)
// inS3
expect(MongoManager.markDocAsArchived).not.to.have.been.calledWith(
projectId,
mongoDocs[2]._id
)
expect(MongoManager.markDocAsArchived).not.to.have.been.calledWith(
projectId,
mongoDocs[3]._id
)
expect(
MongoManager.promises.markDocAsArchived
).not.to.have.been.calledWith(projectId, mongoDocs[2]._id)
expect(
MongoManager.promises.markDocAsArchived
).not.to.have.been.calledWith(projectId, mongoDocs[3]._id)
})
describe('when archiving is not configured', function () {
@ -548,20 +541,21 @@ describe('DocArchiveManager', function () {
})
it('should bail out early', async function () {
await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id)
expect(MongoManager.getNonArchivedProjectDocIds).to.not.have.been.called
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
expect(MongoManager.promises.getNonArchivedProjectDocIds).to.not.have
.been.called
})
})
})
describe('unArchiveAllDocs', function () {
it('should resolve with valid arguments', async function () {
await expect(DocArchiveManager.unArchiveAllDocs(projectId)).to.eventually
.be.fulfilled
await expect(DocArchiveManager.promises.unArchiveAllDocs(projectId)).to
.eventually.be.fulfilled
})
it('should unarchive all inS3 docs', async function () {
await DocArchiveManager.unArchiveAllDocs(projectId)
await DocArchiveManager.promises.unArchiveAllDocs(projectId)
for (const doc of archivedDocs) {
expect(PersistorManager.getObjectStream).to.have.been.calledWith(
@ -577,9 +571,9 @@ describe('DocArchiveManager', function () {
})
it('should bail out early', async function () {
await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id)
expect(MongoManager.getNonDeletedArchivedProjectDocs).to.not.have.been
.called
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
expect(MongoManager.promises.getNonDeletedArchivedProjectDocs).to.not
.have.been.called
})
})
})

View file

@ -17,22 +17,25 @@ describe('DocManager', function () {
this.version = 42
this.MongoManager = {
promises: {
findDoc: sinon.stub(),
getProjectsDocs: sinon.stub(),
patchDoc: sinon.stub().resolves(),
upsertIntoDocCollection: sinon.stub().resolves(),
},
}
this.DocArchiveManager = {
promises: {
unarchiveDoc: sinon.stub(),
unArchiveAllDocs: sinon.stub(),
archiveDoc: sinon.stub().resolves(),
},
}
this.RangeManager = {
jsonRangesToMongo(r) {
return r
},
shouldUpdateRanges: sinon.stub().returns(false),
fixCommentIds: sinon.stub(),
}
this.settings = { docstore: {} }
@ -49,7 +52,7 @@ describe('DocManager', function () {
describe('getFullDoc', function () {
beforeEach(function () {
this.DocManager._getDoc = sinon.stub()
this.DocManager.promises._getDoc = sinon.stub()
this.doc = {
_id: this.doc_id,
lines: ['2134'],
@ -57,10 +60,13 @@ describe('DocManager', function () {
})
it('should call get doc with a quick filter', async function () {
this.DocManager._getDoc.resolves(this.doc)
const doc = await this.DocManager.getFullDoc(this.project_id, this.doc_id)
this.DocManager.promises._getDoc.resolves(this.doc)
const doc = await this.DocManager.promises.getFullDoc(
this.project_id,
this.doc_id
)
doc.should.equal(this.doc)
this.DocManager._getDoc
this.DocManager.promises._getDoc
.calledWith(this.project_id, this.doc_id, {
lines: true,
rev: true,
@ -73,27 +79,27 @@ describe('DocManager', function () {
})
it('should return error when get doc errors', async function () {
this.DocManager._getDoc.rejects(this.stubbedError)
this.DocManager.promises._getDoc.rejects(this.stubbedError)
await expect(
this.DocManager.getFullDoc(this.project_id, this.doc_id)
this.DocManager.promises.getFullDoc(this.project_id, this.doc_id)
).to.be.rejectedWith(this.stubbedError)
})
})
describe('getRawDoc', function () {
beforeEach(function () {
this.DocManager._getDoc = sinon.stub()
this.DocManager.promises._getDoc = sinon.stub()
this.doc = { lines: ['2134'] }
})
it('should call get doc with a quick filter', async function () {
this.DocManager._getDoc.resolves(this.doc)
const content = await this.DocManager.getDocLines(
this.DocManager.promises._getDoc.resolves(this.doc)
const doc = await this.DocManager.promises.getDocLines(
this.project_id,
this.doc_id
)
content.should.equal(this.doc.lines.join('\n'))
this.DocManager._getDoc
doc.should.equal(this.doc)
this.DocManager.promises._getDoc
.calledWith(this.project_id, this.doc_id, {
lines: true,
inS3: true,
@ -102,46 +108,11 @@ describe('DocManager', function () {
})
it('should return error when get doc errors', async function () {
this.DocManager._getDoc.rejects(this.stubbedError)
this.DocManager.promises._getDoc.rejects(this.stubbedError)
await expect(
this.DocManager.getDocLines(this.project_id, this.doc_id)
this.DocManager.promises.getDocLines(this.project_id, this.doc_id)
).to.be.rejectedWith(this.stubbedError)
})
it('should return error when get doc does not exist', async function () {
this.DocManager._getDoc.resolves(null)
await expect(
this.DocManager.getDocLines(this.project_id, this.doc_id)
).to.be.rejectedWith(Errors.NotFoundError)
})
it('should return error when get doc has no lines', async function () {
this.DocManager._getDoc.resolves({})
await expect(
this.DocManager.getDocLines(this.project_id, this.doc_id)
).to.be.rejectedWith(Errors.DocWithoutLinesError)
})
})
describe('_getDoc', function () {
it('should return error when get doc does not exist', async function () {
this.MongoManager.findDoc.resolves(null)
await expect(
this.DocManager._getDoc(this.project_id, this.doc_id, { inS3: true })
).to.be.rejectedWith(Errors.NotFoundError)
})
it('should fix comment ids', async function () {
this.MongoManager.findDoc.resolves({
_id: this.doc_id,
ranges: {},
})
await this.DocManager._getDoc(this.project_id, this.doc_id, {
inS3: true,
ranges: true,
})
expect(this.RangeManager.fixCommentIds).to.have.been.called
})
})
describe('getDoc', function () {
@ -157,25 +128,26 @@ describe('DocManager', function () {
describe('when using a filter', function () {
beforeEach(function () {
this.MongoManager.findDoc.resolves(this.doc)
this.MongoManager.promises.findDoc.resolves(this.doc)
})
it('should error if inS3 is not set to true', async function () {
await expect(
this.DocManager._getDoc(this.project_id, this.doc_id, {
this.DocManager.promises._getDoc(this.project_id, this.doc_id, {
inS3: false,
})
).to.be.rejected
})
it('should always get inS3 even when no filter is passed', async function () {
await expect(this.DocManager._getDoc(this.project_id, this.doc_id)).to
.be.rejected
this.MongoManager.findDoc.called.should.equal(false)
await expect(
this.DocManager.promises._getDoc(this.project_id, this.doc_id)
).to.be.rejected
this.MongoManager.promises.findDoc.called.should.equal(false)
})
it('should not error if inS3 is set to true', async function () {
await this.DocManager._getDoc(this.project_id, this.doc_id, {
await this.DocManager.promises._getDoc(this.project_id, this.doc_id, {
inS3: true,
})
})
@ -183,8 +155,8 @@ describe('DocManager', function () {
describe('when the doc is in the doc collection', function () {
beforeEach(async function () {
this.MongoManager.findDoc.resolves(this.doc)
this.result = await this.DocManager._getDoc(
this.MongoManager.promises.findDoc.resolves(this.doc)
this.result = await this.DocManager.promises._getDoc(
this.project_id,
this.doc_id,
{ version: true, inS3: true }
@ -192,7 +164,7 @@ describe('DocManager', function () {
})
it('should get the doc from the doc collection', function () {
this.MongoManager.findDoc
this.MongoManager.promises.findDoc
.calledWith(this.project_id, this.doc_id)
.should.equal(true)
})
@ -205,9 +177,9 @@ describe('DocManager', function () {
describe('when MongoManager.findDoc errors', function () {
it('should return the error', async function () {
this.MongoManager.findDoc.rejects(this.stubbedError)
this.MongoManager.promises.findDoc.rejects(this.stubbedError)
await expect(
this.DocManager._getDoc(this.project_id, this.doc_id, {
this.DocManager.promises._getDoc(this.project_id, this.doc_id, {
version: true,
inS3: true,
})
@ -230,15 +202,15 @@ describe('DocManager', function () {
version: 2,
inS3: false,
}
this.MongoManager.findDoc.resolves(this.doc)
this.DocArchiveManager.unarchiveDoc.callsFake(
this.MongoManager.promises.findDoc.resolves(this.doc)
this.DocArchiveManager.promises.unarchiveDoc.callsFake(
async (projectId, docId) => {
this.MongoManager.findDoc.resolves({
this.MongoManager.promises.findDoc.resolves({
...this.unarchivedDoc,
})
}
)
this.result = await this.DocManager._getDoc(
this.result = await this.DocManager.promises._getDoc(
this.project_id,
this.doc_id,
{
@ -249,13 +221,13 @@ describe('DocManager', function () {
})
it('should call the DocArchive to unarchive the doc', function () {
this.DocArchiveManager.unarchiveDoc
this.DocArchiveManager.promises.unarchiveDoc
.calledWith(this.project_id, this.doc_id)
.should.equal(true)
})
it('should look up the doc twice', function () {
this.MongoManager.findDoc.calledTwice.should.equal(true)
this.MongoManager.promises.findDoc.calledTwice.should.equal(true)
})
it('should return the doc', function () {
@ -267,9 +239,9 @@ describe('DocManager', function () {
describe('when the doc does not exist in the docs collection', function () {
it('should return a NotFoundError', async function () {
this.MongoManager.findDoc.resolves(null)
this.MongoManager.promises.findDoc.resolves(null)
await expect(
this.DocManager._getDoc(this.project_id, this.doc_id, {
this.DocManager.promises._getDoc(this.project_id, this.doc_id, {
version: true,
inS3: true,
})
@ -290,27 +262,23 @@ describe('DocManager', function () {
lines: ['mock-lines'],
},
]
this.MongoManager.getProjectsDocs.resolves(this.docs)
this.DocArchiveManager.unArchiveAllDocs.resolves(this.docs)
this.filter = { lines: true, ranges: true }
this.result = await this.DocManager.getAllNonDeletedDocs(
this.MongoManager.promises.getProjectsDocs.resolves(this.docs)
this.DocArchiveManager.promises.unArchiveAllDocs.resolves(this.docs)
this.filter = { lines: true }
this.result = await this.DocManager.promises.getAllNonDeletedDocs(
this.project_id,
this.filter
)
})
it('should get the project from the database', function () {
this.MongoManager.getProjectsDocs.should.have.been.calledWith(
this.MongoManager.promises.getProjectsDocs.should.have.been.calledWith(
this.project_id,
{ include_deleted: false },
this.filter
)
})
it('should fix comment ids', async function () {
expect(this.RangeManager.fixCommentIds).to.have.been.called
})
it('should return the docs', function () {
expect(this.result).to.deep.equal(this.docs)
})
@ -318,10 +286,13 @@ describe('DocManager', function () {
describe('when there are no docs for the project', function () {
it('should return a NotFoundError', async function () {
this.MongoManager.getProjectsDocs.resolves(null)
this.DocArchiveManager.unArchiveAllDocs.resolves(null)
this.MongoManager.promises.getProjectsDocs.resolves(null)
this.DocArchiveManager.promises.unArchiveAllDocs.resolves(null)
await expect(
this.DocManager.getAllNonDeletedDocs(this.project_id, this.filter)
this.DocManager.promises.getAllNonDeletedDocs(
this.project_id,
this.filter
)
).to.be.rejectedWith(`No docs for project ${this.project_id}`)
})
})
@ -332,7 +303,7 @@ describe('DocManager', function () {
beforeEach(function () {
this.lines = ['mock', 'doc', 'lines']
this.rev = 77
this.MongoManager.findDoc.resolves({
this.MongoManager.promises.findDoc.resolves({
_id: new ObjectId(this.doc_id),
})
this.meta = {}
@ -340,7 +311,7 @@ describe('DocManager', function () {
describe('standard path', function () {
beforeEach(async function () {
await this.DocManager.patchDoc(
await this.DocManager.promises.patchDoc(
this.project_id,
this.doc_id,
this.meta
@ -348,14 +319,14 @@ describe('DocManager', function () {
})
it('should get the doc', function () {
expect(this.MongoManager.findDoc).to.have.been.calledWith(
expect(this.MongoManager.promises.findDoc).to.have.been.calledWith(
this.project_id,
this.doc_id
)
})
it('should persist the meta', function () {
expect(this.MongoManager.patchDoc).to.have.been.calledWith(
expect(this.MongoManager.promises.patchDoc).to.have.been.calledWith(
this.project_id,
this.doc_id,
this.meta
@ -368,7 +339,7 @@ describe('DocManager', function () {
this.settings.docstore.archiveOnSoftDelete = false
this.meta.deleted = true
await this.DocManager.patchDoc(
await this.DocManager.promises.patchDoc(
this.project_id,
this.doc_id,
this.meta
@ -376,7 +347,8 @@ describe('DocManager', function () {
})
it('should not flush the doc out of mongo', function () {
expect(this.DocArchiveManager.archiveDoc).to.not.have.been.called
expect(this.DocArchiveManager.promises.archiveDoc).to.not.have.been
.called
})
})
@ -384,7 +356,7 @@ describe('DocManager', function () {
beforeEach(async function () {
this.settings.docstore.archiveOnSoftDelete = false
this.meta.deleted = false
await this.DocManager.patchDoc(
await this.DocManager.promises.patchDoc(
this.project_id,
this.doc_id,
this.meta
@ -392,7 +364,8 @@ describe('DocManager', function () {
})
it('should not flush the doc out of mongo', function () {
expect(this.DocArchiveManager.archiveDoc).to.not.have.been.called
expect(this.DocArchiveManager.promises.archiveDoc).to.not.have.been
.called
})
})
@ -404,7 +377,7 @@ describe('DocManager', function () {
describe('when the background flush succeeds', function () {
beforeEach(async function () {
await this.DocManager.patchDoc(
await this.DocManager.promises.patchDoc(
this.project_id,
this.doc_id,
this.meta
@ -416,18 +389,17 @@ describe('DocManager', function () {
})
it('should flush the doc out of mongo', function () {
expect(this.DocArchiveManager.archiveDoc).to.have.been.calledWith(
this.project_id,
this.doc_id
)
expect(
this.DocArchiveManager.promises.archiveDoc
).to.have.been.calledWith(this.project_id, this.doc_id)
})
})
describe('when the background flush fails', function () {
beforeEach(async function () {
this.err = new Error('foo')
this.DocArchiveManager.archiveDoc.rejects(this.err)
await this.DocManager.patchDoc(
this.DocArchiveManager.promises.archiveDoc.rejects(this.err)
await this.DocManager.promises.patchDoc(
this.project_id,
this.doc_id,
this.meta
@ -450,9 +422,9 @@ describe('DocManager', function () {
describe('when the doc does not exist', function () {
it('should return a NotFoundError', async function () {
this.MongoManager.findDoc.resolves(null)
this.MongoManager.promises.findDoc.resolves(null)
await expect(
this.DocManager.patchDoc(this.project_id, this.doc_id, {})
this.DocManager.promises.patchDoc(this.project_id, this.doc_id, {})
).to.be.rejectedWith(
`No such project/doc to delete: ${this.project_id}/${this.doc_id}`
)
@ -498,13 +470,13 @@ describe('DocManager', function () {
ranges: this.originalRanges,
}
this.DocManager._getDoc = sinon.stub()
this.DocManager.promises._getDoc = sinon.stub()
})
describe('when only the doc lines have changed', function () {
beforeEach(async function () {
this.DocManager._getDoc = sinon.stub().resolves(this.doc)
this.result = await this.DocManager.updateDoc(
this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc)
this.result = await this.DocManager.promises.updateDoc(
this.project_id,
this.doc_id,
this.newDocLines,
@ -514,7 +486,7 @@ describe('DocManager', function () {
})
it('should get the existing doc', function () {
this.DocManager._getDoc
this.DocManager.promises._getDoc
.calledWith(this.project_id, this.doc_id, {
version: true,
rev: true,
@ -526,7 +498,7 @@ describe('DocManager', function () {
})
it('should upsert the document to the doc collection', function () {
this.MongoManager.upsertIntoDocCollection
this.MongoManager.promises.upsertIntoDocCollection
.calledWith(this.project_id, this.doc_id, this.rev, {
lines: this.newDocLines,
})
@ -540,9 +512,9 @@ describe('DocManager', function () {
describe('when the doc ranges have changed', function () {
beforeEach(async function () {
this.DocManager._getDoc = sinon.stub().resolves(this.doc)
this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc)
this.RangeManager.shouldUpdateRanges.returns(true)
this.result = await this.DocManager.updateDoc(
this.result = await this.DocManager.promises.updateDoc(
this.project_id,
this.doc_id,
this.oldDocLines,
@ -552,7 +524,7 @@ describe('DocManager', function () {
})
it('should upsert the ranges', function () {
this.MongoManager.upsertIntoDocCollection
this.MongoManager.promises.upsertIntoDocCollection
.calledWith(this.project_id, this.doc_id, this.rev, {
ranges: this.newRanges,
})
@ -566,8 +538,8 @@ describe('DocManager', function () {
describe('when only the version has changed', function () {
beforeEach(async function () {
this.DocManager._getDoc = sinon.stub().resolves(this.doc)
this.result = await this.DocManager.updateDoc(
this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc)
this.result = await this.DocManager.promises.updateDoc(
this.project_id,
this.doc_id,
this.oldDocLines,
@ -577,7 +549,7 @@ describe('DocManager', function () {
})
it('should update the version', function () {
this.MongoManager.upsertIntoDocCollection.should.have.been.calledWith(
this.MongoManager.promises.upsertIntoDocCollection.should.have.been.calledWith(
this.project_id,
this.doc_id,
this.rev,
@ -592,8 +564,8 @@ describe('DocManager', function () {
describe('when the doc has not changed at all', function () {
beforeEach(async function () {
this.DocManager._getDoc = sinon.stub().resolves(this.doc)
this.result = await this.DocManager.updateDoc(
this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc)
this.result = await this.DocManager.promises.updateDoc(
this.project_id,
this.doc_id,
this.oldDocLines,
@ -603,7 +575,9 @@ describe('DocManager', function () {
})
it('should not update the ranges or lines or version', function () {
this.MongoManager.upsertIntoDocCollection.called.should.equal(false)
this.MongoManager.promises.upsertIntoDocCollection.called.should.equal(
false
)
})
it('should return the old rev and modified == false', function () {
@ -614,7 +588,7 @@ describe('DocManager', function () {
describe('when the version is null', function () {
it('should return an error', async function () {
await expect(
this.DocManager.updateDoc(
this.DocManager.promises.updateDoc(
this.project_id,
this.doc_id,
this.newDocLines,
@ -628,7 +602,7 @@ describe('DocManager', function () {
describe('when the lines are null', function () {
it('should return an error', async function () {
await expect(
this.DocManager.updateDoc(
this.DocManager.promises.updateDoc(
this.project_id,
this.doc_id,
null,
@ -642,7 +616,7 @@ describe('DocManager', function () {
describe('when the ranges are null', function () {
it('should return an error', async function () {
await expect(
this.DocManager.updateDoc(
this.DocManager.promises.updateDoc(
this.project_id,
this.doc_id,
this.newDocLines,
@ -656,9 +630,9 @@ describe('DocManager', function () {
describe('when there is a generic error getting the doc', function () {
beforeEach(async function () {
this.error = new Error('doc could not be found')
this.DocManager._getDoc = sinon.stub().rejects(this.error)
this.DocManager.promises._getDoc = sinon.stub().rejects(this.error)
await expect(
this.DocManager.updateDoc(
this.DocManager.promises.updateDoc(
this.project_id,
this.doc_id,
this.newDocLines,
@ -669,15 +643,16 @@ describe('DocManager', function () {
})
it('should not upsert the document to the doc collection', function () {
this.MongoManager.upsertIntoDocCollection.should.not.have.been.called
this.MongoManager.promises.upsertIntoDocCollection.should.not.have.been
.called
})
})
describe('when the version was decremented', function () {
it('should return an error', async function () {
this.DocManager._getDoc = sinon.stub().resolves(this.doc)
this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc)
await expect(
this.DocManager.updateDoc(
this.DocManager.promises.updateDoc(
this.project_id,
this.doc_id,
this.newDocLines,
@ -690,8 +665,8 @@ describe('DocManager', function () {
describe('when the doc lines have not changed', function () {
beforeEach(async function () {
this.DocManager._getDoc = sinon.stub().resolves(this.doc)
this.result = await this.DocManager.updateDoc(
this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc)
this.result = await this.DocManager.promises.updateDoc(
this.project_id,
this.doc_id,
this.oldDocLines.slice(),
@ -701,7 +676,9 @@ describe('DocManager', function () {
})
it('should not update the doc', function () {
this.MongoManager.upsertIntoDocCollection.called.should.equal(false)
this.MongoManager.promises.upsertIntoDocCollection.called.should.equal(
false
)
})
it('should return the existing rev', function () {
@ -711,8 +688,8 @@ describe('DocManager', function () {
describe('when the doc does not exist', function () {
beforeEach(async function () {
this.DocManager._getDoc = sinon.stub().resolves(null)
this.result = await this.DocManager.updateDoc(
this.DocManager.promises._getDoc = sinon.stub().resolves(null)
this.result = await this.DocManager.promises.updateDoc(
this.project_id,
this.doc_id,
this.newDocLines,
@ -722,7 +699,7 @@ describe('DocManager', function () {
})
it('should upsert the document to the doc collection', function () {
this.MongoManager.upsertIntoDocCollection.should.have.been.calledWith(
this.MongoManager.promises.upsertIntoDocCollection.should.have.been.calledWith(
this.project_id,
this.doc_id,
undefined,
@ -741,12 +718,12 @@ describe('DocManager', function () {
describe('when another update is racing', function () {
beforeEach(async function () {
this.DocManager._getDoc = sinon.stub().resolves(this.doc)
this.MongoManager.upsertIntoDocCollection
this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc)
this.MongoManager.promises.upsertIntoDocCollection
.onFirstCall()
.rejects(new Errors.DocRevValueError())
this.RangeManager.shouldUpdateRanges.returns(true)
this.result = await this.DocManager.updateDoc(
this.result = await this.DocManager.promises.updateDoc(
this.project_id,
this.doc_id,
this.newDocLines,
@ -756,7 +733,7 @@ describe('DocManager', function () {
})
it('should upsert the doc twice', function () {
this.MongoManager.upsertIntoDocCollection.should.have.been.calledWith(
this.MongoManager.promises.upsertIntoDocCollection.should.have.been.calledWith(
this.project_id,
this.doc_id,
this.rev,
@ -766,7 +743,8 @@ describe('DocManager', function () {
version: this.version + 1,
}
)
this.MongoManager.upsertIntoDocCollection.should.have.been.calledTwice
this.MongoManager.promises.upsertIntoDocCollection.should.have.been
.calledTwice
})
it('should return the new rev', function () {

View file

@ -14,7 +14,7 @@ describe('HttpController', function () {
max_doc_length: 2 * 1024 * 1024,
}
this.DocArchiveManager = {
unArchiveAllDocs: sinon.stub().returns(),
unArchiveAllDocs: sinon.stub().yields(),
}
this.DocManager = {}
this.HttpController = SandboxedModule.require(modulePath, {
@ -54,13 +54,15 @@ describe('HttpController', function () {
describe('getDoc', function () {
describe('without deleted docs', function () {
beforeEach(async function () {
beforeEach(function () {
this.req.params = {
project_id: this.projectId,
doc_id: this.docId,
}
this.DocManager.getFullDoc = sinon.stub().resolves(this.doc)
await this.HttpController.getDoc(this.req, this.res, this.next)
this.DocManager.getFullDoc = sinon
.stub()
.callsArgWith(2, null, this.doc)
this.HttpController.getDoc(this.req, this.res, this.next)
})
it('should get the document with the version (including deleted)', function () {
@ -87,24 +89,26 @@ describe('HttpController', function () {
project_id: this.projectId,
doc_id: this.docId,
}
this.DocManager.getFullDoc = sinon.stub().resolves(this.deletedDoc)
this.DocManager.getFullDoc = sinon
.stub()
.callsArgWith(2, null, this.deletedDoc)
})
it('should get the doc from the doc manager', async function () {
await this.HttpController.getDoc(this.req, this.res, this.next)
it('should get the doc from the doc manager', function () {
this.HttpController.getDoc(this.req, this.res, this.next)
this.DocManager.getFullDoc
.calledWith(this.projectId, this.docId)
.should.equal(true)
})
it('should return 404 if the query string delete is not set ', async function () {
await this.HttpController.getDoc(this.req, this.res, this.next)
it('should return 404 if the query string delete is not set ', function () {
this.HttpController.getDoc(this.req, this.res, this.next)
this.res.sendStatus.calledWith(404).should.equal(true)
})
it('should return the doc as JSON if include_deleted is set to true', async function () {
it('should return the doc as JSON if include_deleted is set to true', function () {
this.req.query.include_deleted = 'true'
await this.HttpController.getDoc(this.req, this.res, this.next)
this.HttpController.getDoc(this.req, this.res, this.next)
this.res.json
.calledWith({
_id: this.docId,
@ -119,15 +123,13 @@ describe('HttpController', function () {
})
describe('getRawDoc', function () {
beforeEach(async function () {
beforeEach(function () {
this.req.params = {
project_id: this.projectId,
doc_id: this.docId,
}
this.DocManager.getDocLines = sinon
.stub()
.resolves(this.doc.lines.join('\n'))
await this.HttpController.getRawDoc(this.req, this.res, this.next)
this.DocManager.getDocLines = sinon.stub().callsArgWith(2, null, this.doc)
this.HttpController.getRawDoc(this.req, this.res, this.next)
})
it('should get the document without the version', function () {
@ -152,7 +154,7 @@ describe('HttpController', function () {
describe('getAllDocs', function () {
describe('normally', function () {
beforeEach(async function () {
beforeEach(function () {
this.req.params = { project_id: this.projectId }
this.docs = [
{
@ -166,8 +168,10 @@ describe('HttpController', function () {
rev: 4,
},
]
this.DocManager.getAllNonDeletedDocs = sinon.stub().resolves(this.docs)
await this.HttpController.getAllDocs(this.req, this.res, this.next)
this.DocManager.getAllNonDeletedDocs = sinon
.stub()
.callsArgWith(2, null, this.docs)
this.HttpController.getAllDocs(this.req, this.res, this.next)
})
it('should get all the (non-deleted) docs', function () {
@ -195,7 +199,7 @@ describe('HttpController', function () {
})
describe('with null lines', function () {
beforeEach(async function () {
beforeEach(function () {
this.req.params = { project_id: this.projectId }
this.docs = [
{
@ -209,8 +213,10 @@ describe('HttpController', function () {
rev: 4,
},
]
this.DocManager.getAllNonDeletedDocs = sinon.stub().resolves(this.docs)
await this.HttpController.getAllDocs(this.req, this.res, this.next)
this.DocManager.getAllNonDeletedDocs = sinon
.stub()
.callsArgWith(2, null, this.docs)
this.HttpController.getAllDocs(this.req, this.res, this.next)
})
it('should return the doc with fallback lines', function () {
@ -232,7 +238,7 @@ describe('HttpController', function () {
})
describe('with a null doc', function () {
beforeEach(async function () {
beforeEach(function () {
this.req.params = { project_id: this.projectId }
this.docs = [
{
@ -247,8 +253,10 @@ describe('HttpController', function () {
rev: 4,
},
]
this.DocManager.getAllNonDeletedDocs = sinon.stub().resolves(this.docs)
await this.HttpController.getAllDocs(this.req, this.res, this.next)
this.DocManager.getAllNonDeletedDocs = sinon
.stub()
.callsArgWith(2, null, this.docs)
this.HttpController.getAllDocs(this.req, this.res, this.next)
})
it('should return the non null docs as JSON', function () {
@ -284,7 +292,7 @@ describe('HttpController', function () {
describe('getAllRanges', function () {
describe('normally', function () {
beforeEach(async function () {
beforeEach(function () {
this.req.params = { project_id: this.projectId }
this.docs = [
{
@ -296,8 +304,10 @@ describe('HttpController', function () {
ranges: { mock_ranges: 'two' },
},
]
this.DocManager.getAllNonDeletedDocs = sinon.stub().resolves(this.docs)
await this.HttpController.getAllRanges(this.req, this.res, this.next)
this.DocManager.getAllNonDeletedDocs = sinon
.stub()
.callsArgWith(2, null, this.docs)
this.HttpController.getAllRanges(this.req, this.res, this.next)
})
it('should get all the (non-deleted) doc ranges', function () {
@ -332,17 +342,16 @@ describe('HttpController', function () {
})
describe('when the doc lines exist and were updated', function () {
beforeEach(async function () {
beforeEach(function () {
this.req.body = {
lines: (this.lines = ['hello', 'world']),
version: (this.version = 42),
ranges: (this.ranges = { changes: 'mock' }),
}
this.rev = 5
this.DocManager.updateDoc = sinon
.stub()
.resolves({ modified: true, rev: this.rev })
await this.HttpController.updateDoc(this.req, this.res, this.next)
.yields(null, true, (this.rev = 5))
this.HttpController.updateDoc(this.req, this.res, this.next)
})
it('should update the document', function () {
@ -365,17 +374,16 @@ describe('HttpController', function () {
})
describe('when the doc lines exist and were not updated', function () {
beforeEach(async function () {
beforeEach(function () {
this.req.body = {
lines: (this.lines = ['hello', 'world']),
version: (this.version = 42),
ranges: {},
}
this.rev = 5
this.DocManager.updateDoc = sinon
.stub()
.resolves({ modified: false, rev: this.rev })
await this.HttpController.updateDoc(this.req, this.res, this.next)
.yields(null, false, (this.rev = 5))
this.HttpController.updateDoc(this.req, this.res, this.next)
})
it('should return a modified status', function () {
@ -386,12 +394,10 @@ describe('HttpController', function () {
})
describe('when the doc lines are not provided', function () {
beforeEach(async function () {
beforeEach(function () {
this.req.body = { version: 42, ranges: {} }
this.DocManager.updateDoc = sinon
.stub()
.resolves({ modified: false, rev: 0 })
await this.HttpController.updateDoc(this.req, this.res, this.next)
this.DocManager.updateDoc = sinon.stub().yields(null, false)
this.HttpController.updateDoc(this.req, this.res, this.next)
})
it('should not update the document', function () {
@ -404,12 +410,10 @@ describe('HttpController', function () {
})
describe('when the doc version are not provided', function () {
beforeEach(async function () {
beforeEach(function () {
this.req.body = { version: 42, lines: ['hello world'] }
this.DocManager.updateDoc = sinon
.stub()
.resolves({ modified: false, rev: 0 })
await this.HttpController.updateDoc(this.req, this.res, this.next)
this.DocManager.updateDoc = sinon.stub().yields(null, false)
this.HttpController.updateDoc(this.req, this.res, this.next)
})
it('should not update the document', function () {
@ -422,12 +426,10 @@ describe('HttpController', function () {
})
describe('when the doc ranges is not provided', function () {
beforeEach(async function () {
beforeEach(function () {
this.req.body = { lines: ['foo'], version: 42 }
this.DocManager.updateDoc = sinon
.stub()
.resolves({ modified: false, rev: 0 })
await this.HttpController.updateDoc(this.req, this.res, this.next)
this.DocManager.updateDoc = sinon.stub().yields(null, false)
this.HttpController.updateDoc(this.req, this.res, this.next)
})
it('should not update the document', function () {
@ -440,20 +442,13 @@ describe('HttpController', function () {
})
describe('when the doc body is too large', function () {
beforeEach(async function () {
beforeEach(function () {
this.req.body = {
lines: (this.lines = Array(2049).fill('a'.repeat(1024))),
version: (this.version = 42),
ranges: (this.ranges = { changes: 'mock' }),
}
this.DocManager.updateDoc = sinon
.stub()
.resolves({ modified: false, rev: 0 })
await this.HttpController.updateDoc(this.req, this.res, this.next)
})
it('should not update the document', function () {
this.DocManager.updateDoc.called.should.equal(false)
this.HttpController.updateDoc(this.req, this.res, this.next)
})
it('should return a 413 (too large) response', function () {
@ -467,14 +462,14 @@ describe('HttpController', function () {
})
describe('patchDoc', function () {
beforeEach(async function () {
beforeEach(function () {
this.req.params = {
project_id: this.projectId,
doc_id: this.docId,
}
this.req.body = { name: 'foo.tex' }
this.DocManager.patchDoc = sinon.stub().resolves()
await this.HttpController.patchDoc(this.req, this.res, this.next)
this.DocManager.patchDoc = sinon.stub().yields(null)
this.HttpController.patchDoc(this.req, this.res, this.next)
})
it('should delete the document', function () {
@ -489,11 +484,11 @@ describe('HttpController', function () {
})
describe('with an invalid payload', function () {
beforeEach(async function () {
beforeEach(function () {
this.req.body = { cannot: 'happen' }
this.DocManager.patchDoc = sinon.stub().resolves()
await this.HttpController.patchDoc(this.req, this.res, this.next)
this.DocManager.patchDoc = sinon.stub().yields(null)
this.HttpController.patchDoc(this.req, this.res, this.next)
})
it('should log a message', function () {
@ -514,10 +509,10 @@ describe('HttpController', function () {
})
describe('archiveAllDocs', function () {
beforeEach(async function () {
beforeEach(function () {
this.req.params = { project_id: this.projectId }
this.DocArchiveManager.archiveAllDocs = sinon.stub().resolves()
await this.HttpController.archiveAllDocs(this.req, this.res, this.next)
this.DocArchiveManager.archiveAllDocs = sinon.stub().callsArg(1)
this.HttpController.archiveAllDocs(this.req, this.res, this.next)
})
it('should archive the project', function () {
@ -537,12 +532,9 @@ describe('HttpController', function () {
})
describe('on success', function () {
beforeEach(async function () {
await this.HttpController.unArchiveAllDocs(
this.req,
this.res,
this.next
)
beforeEach(function (done) {
this.res.sendStatus.callsFake(() => done())
this.HttpController.unArchiveAllDocs(this.req, this.res, this.next)
})
it('returns a 200', function () {
@ -551,15 +543,12 @@ describe('HttpController', function () {
})
describe("when the archived rev doesn't match", function () {
beforeEach(async function () {
this.DocArchiveManager.unArchiveAllDocs.rejects(
beforeEach(function (done) {
this.res.sendStatus.callsFake(() => done())
this.DocArchiveManager.unArchiveAllDocs.yields(
new Errors.DocRevValueError('bad rev')
)
await this.HttpController.unArchiveAllDocs(
this.req,
this.res,
this.next
)
this.HttpController.unArchiveAllDocs(this.req, this.res, this.next)
})
it('returns a 409', function () {
@ -569,10 +558,10 @@ describe('HttpController', function () {
})
describe('destroyProject', function () {
beforeEach(async function () {
beforeEach(function () {
this.req.params = { project_id: this.projectId }
this.DocArchiveManager.destroyProject = sinon.stub().resolves()
await this.HttpController.destroyProject(this.req, this.res, this.next)
this.DocArchiveManager.destroyProject = sinon.stub().callsArg(1)
this.HttpController.destroyProject(this.req, this.res, this.next)
})
it('should destroy the docs', function () {

View file

@ -41,7 +41,7 @@ describe('MongoManager', function () {
this.doc = { name: 'mock-doc' }
this.db.docs.findOne = sinon.stub().resolves(this.doc)
this.filter = { lines: true }
this.result = await this.MongoManager.findDoc(
this.result = await this.MongoManager.promises.findDoc(
this.projectId,
this.docId,
this.filter
@ -70,7 +70,11 @@ describe('MongoManager', function () {
describe('patchDoc', function () {
beforeEach(async function () {
this.meta = { name: 'foo.tex' }
await this.MongoManager.patchDoc(this.projectId, this.docId, this.meta)
await this.MongoManager.promises.patchDoc(
this.projectId,
this.docId,
this.meta
)
})
it('should pass the parameter along', function () {
@ -100,7 +104,7 @@ describe('MongoManager', function () {
describe('with included_deleted = false', function () {
beforeEach(async function () {
this.result = await this.MongoManager.getProjectsDocs(
this.result = await this.MongoManager.promises.getProjectsDocs(
this.projectId,
{ include_deleted: false },
this.filter
@ -128,7 +132,7 @@ describe('MongoManager', function () {
describe('with included_deleted = true', function () {
beforeEach(async function () {
this.result = await this.MongoManager.getProjectsDocs(
this.result = await this.MongoManager.promises.getProjectsDocs(
this.projectId,
{ include_deleted: true },
this.filter
@ -163,7 +167,7 @@ describe('MongoManager', function () {
this.db.docs.find = sinon.stub().returns({
toArray: sinon.stub().resolves([this.doc1, this.doc2, this.doc3]),
})
this.result = await this.MongoManager.getProjectsDeletedDocs(
this.result = await this.MongoManager.promises.getProjectsDeletedDocs(
this.projectId,
this.filter
)
@ -199,7 +203,7 @@ describe('MongoManager', function () {
})
it('should upsert the document', async function () {
await this.MongoManager.upsertIntoDocCollection(
await this.MongoManager.promises.upsertIntoDocCollection(
this.projectId,
this.docId,
this.oldRev,
@ -219,7 +223,7 @@ describe('MongoManager', function () {
it('should handle update error', async function () {
this.db.docs.updateOne.rejects(this.stubbedErr)
await expect(
this.MongoManager.upsertIntoDocCollection(
this.MongoManager.promises.upsertIntoDocCollection(
this.projectId,
this.docId,
this.rev,
@ -231,7 +235,7 @@ describe('MongoManager', function () {
})
it('should insert without a previous rev', async function () {
await this.MongoManager.upsertIntoDocCollection(
await this.MongoManager.promises.upsertIntoDocCollection(
this.projectId,
this.docId,
null,
@ -250,7 +254,7 @@ describe('MongoManager', function () {
it('should handle generic insert error', async function () {
this.db.docs.insertOne.rejects(this.stubbedErr)
await expect(
this.MongoManager.upsertIntoDocCollection(
this.MongoManager.promises.upsertIntoDocCollection(
this.projectId,
this.docId,
null,
@ -262,7 +266,7 @@ describe('MongoManager', function () {
it('should handle duplicate insert error', async function () {
this.db.docs.insertOne.rejects({ code: 11000 })
await expect(
this.MongoManager.upsertIntoDocCollection(
this.MongoManager.promises.upsertIntoDocCollection(
this.projectId,
this.docId,
null,
@ -276,7 +280,7 @@ describe('MongoManager', function () {
beforeEach(async function () {
this.projectId = new ObjectId()
this.db.docs.deleteMany = sinon.stub().resolves()
await this.MongoManager.destroyProject(this.projectId)
await this.MongoManager.promises.destroyProject(this.projectId)
})
it('should destroy all docs', function () {
@ -293,13 +297,13 @@ describe('MongoManager', function () {
it('should not error when the rev has not changed', async function () {
this.db.docs.findOne = sinon.stub().resolves({ rev: 1 })
await this.MongoManager.checkRevUnchanged(this.doc)
await this.MongoManager.promises.checkRevUnchanged(this.doc)
})
it('should return an error when the rev has changed', async function () {
this.db.docs.findOne = sinon.stub().resolves({ rev: 2 })
await expect(
this.MongoManager.checkRevUnchanged(this.doc)
this.MongoManager.promises.checkRevUnchanged(this.doc)
).to.be.rejectedWith(Errors.DocModifiedError)
})
@ -307,14 +311,14 @@ describe('MongoManager', function () {
this.db.docs.findOne = sinon.stub().resolves({ rev: 2 })
this.doc = { _id: new ObjectId(), name: 'mock-doc', rev: NaN }
await expect(
this.MongoManager.checkRevUnchanged(this.doc)
this.MongoManager.promises.checkRevUnchanged(this.doc)
).to.be.rejectedWith(Errors.DocRevValueError)
})
it('should return a value error if checked doc rev is NaN', async function () {
this.db.docs.findOne = sinon.stub().resolves({ rev: NaN })
await expect(
this.MongoManager.checkRevUnchanged(this.doc)
this.MongoManager.promises.checkRevUnchanged(this.doc)
).to.be.rejectedWith(Errors.DocRevValueError)
})
})
@ -330,7 +334,7 @@ describe('MongoManager', function () {
describe('complete doc', function () {
beforeEach(async function () {
await this.MongoManager.restoreArchivedDoc(
await this.MongoManager.promises.restoreArchivedDoc(
this.projectId,
this.docId,
this.archivedDoc
@ -360,7 +364,7 @@ describe('MongoManager', function () {
describe('without ranges', function () {
beforeEach(async function () {
delete this.archivedDoc.ranges
await this.MongoManager.restoreArchivedDoc(
await this.MongoManager.promises.restoreArchivedDoc(
this.projectId,
this.docId,
this.archivedDoc
@ -391,7 +395,7 @@ describe('MongoManager', function () {
it('throws a DocRevValueError', async function () {
this.db.docs.updateOne.resolves({ matchedCount: 0 })
await expect(
this.MongoManager.restoreArchivedDoc(
this.MongoManager.promises.restoreArchivedDoc(
this.projectId,
this.docId,
this.archivedDoc

View file

@ -30,7 +30,7 @@ describe('RangeManager', function () {
})
describe('jsonRangesToMongo', function () {
it('should convert ObjectIds and dates to proper objects and fix comment id', function () {
it('should convert ObjectIds and dates to proper objects', function () {
const changeId = new ObjectId().toString()
const commentId = new ObjectId().toString()
const userId = new ObjectId().toString()
@ -66,7 +66,7 @@ describe('RangeManager', function () {
],
comments: [
{
id: new ObjectId(threadId),
id: new ObjectId(commentId),
op: { c: 'foo', p: 3, t: new ObjectId(threadId) },
},
],
@ -110,6 +110,7 @@ describe('RangeManager', function () {
return it('should be consistent when transformed through json -> mongo -> json', function () {
const changeId = new ObjectId().toString()
const commentId = new ObjectId().toString()
const userId = new ObjectId().toString()
const threadId = new ObjectId().toString()
const ts = new Date().toJSON()
@ -126,7 +127,7 @@ describe('RangeManager', function () {
],
comments: [
{
id: threadId,
id: commentId,
op: { c: 'foo', p: 3, t: threadId },
},
],
@ -141,7 +142,6 @@ describe('RangeManager', function () {
return describe('shouldUpdateRanges', function () {
beforeEach(function () {
const threadId = new ObjectId()
this.ranges = {
changes: [
{
@ -155,8 +155,8 @@ describe('RangeManager', function () {
],
comments: [
{
id: threadId,
op: { c: 'foo', p: 3, t: threadId },
id: new ObjectId(),
op: { c: 'foo', p: 3, t: new ObjectId() },
},
],
}

View file

@ -1,4 +1,3 @@
const OError = require('@overleaf/o-error')
const DMP = require('diff-match-patch')
const { TextOperation } = require('overleaf-editor-core')
const dmp = new DMP()
@ -39,62 +38,23 @@ module.exports = {
return ops
},
/**
* @param {import("overleaf-editor-core").StringFileData} file
* @param {string} after
* @return {TextOperation}
*/
diffAsHistoryOTEditOperation(file, after) {
const beforeWithoutTrackedDeletes = file.getContent({
filterTrackedDeletes: true,
})
const diffs = dmp.diff_main(beforeWithoutTrackedDeletes, after)
diffAsHistoryV1EditOperation(before, after) {
const diffs = dmp.diff_main(before, after)
dmp.diff_cleanupSemantic(diffs)
const trackedChanges = file.trackedChanges.asSorted()
let nextTc = trackedChanges.shift()
const op = new TextOperation()
for (const diff of diffs) {
let [type, content] = diff
const [type, content] = diff
if (type === this.ADDED) {
op.insert(content)
} else if (type === this.REMOVED || type === this.UNCHANGED) {
while (op.baseLength + content.length > nextTc?.range.start) {
if (nextTc.tracking.type === 'delete') {
const untilRange = nextTc.range.start - op.baseLength
if (type === this.REMOVED) {
op.remove(untilRange)
} else if (type === this.UNCHANGED) {
op.retain(untilRange)
}
op.retain(nextTc.range.end - nextTc.range.start)
content = content.slice(untilRange)
}
nextTc = trackedChanges.shift()
}
if (type === this.REMOVED) {
} else if (type === this.REMOVED) {
op.remove(content.length)
} else if (type === this.UNCHANGED) {
op.retain(content.length)
}
} else {
throw new Error('Unknown type')
}
}
while (nextTc) {
if (
nextTc.tracking.type !== 'delete' ||
nextTc.range.start !== op.baseLength
) {
throw new OError(
'StringFileData.trackedChanges out of sync: unexpected range after end of diff',
{ nextTc, baseLength: op.baseLength }
)
}
op.retain(nextTc.range.end - nextTc.range.start)
nextTc = trackedChanges.shift()
}
return op
},
}

View file

@ -194,8 +194,9 @@ const DocumentManager = {
let op
if (type === 'history-ot') {
const file = StringFileData.fromRaw(oldLines)
const operation = DiffCodec.diffAsHistoryOTEditOperation(
file,
const operation = DiffCodec.diffAsHistoryV1EditOperation(
// TODO(24596): tc support for history-ot
file.getContent({ filterTrackedDeletes: true }),
newLines.join('\n')
)
if (operation.isNoop()) {
@ -535,6 +536,11 @@ const DocumentManager = {
if (opts.historyRangesMigration) {
historyRangesSupport = opts.historyRangesMigration === 'forwards'
}
if (!Array.isArray(lines)) {
const file = StringFileData.fromRaw(lines)
// TODO(24596): tc support for history-ot
lines = file.getLines()
}
await ProjectHistoryRedisManager.promises.queueResyncDocContent(
projectId,

View file

@ -28,19 +28,4 @@ module.exports = {
// since we didn't hit the limit in the loop, the document is within the allowed length
return false
},
/**
* @param {StringFileRawData} raw
* @param {number} maxDocLength
*/
stringFileDataContentIsTooLarge(raw, maxDocLength) {
let n = raw.content.length
if (n <= maxDocLength) return false // definitely under the limit, no need to calculate the total size
for (const tc of raw.trackedChanges ?? []) {
if (tc.tracking.type !== 'delete') continue
n -= tc.range.length
if (n <= maxDocLength) return false // under the limit now, no need to calculate the exact size
}
return true
},
}

View file

@ -8,14 +8,13 @@ const rclient = require('@overleaf/redis-wrapper').createClient(
)
const logger = require('@overleaf/logger')
const metrics = require('./Metrics')
const { docIsTooLarge, stringFileDataContentIsTooLarge } = require('./Limits')
const { docIsTooLarge } = require('./Limits')
const { addTrackedDeletesToContent, extractOriginOrSource } = require('./Utils')
const HistoryConversions = require('./HistoryConversions')
const OError = require('@overleaf/o-error')
/**
* @import { Ranges } from './types'
* @import { StringFileRawData } from 'overleaf-editor-core/lib/types'
*/
const ProjectHistoryRedisManager = {
@ -181,7 +180,7 @@ const ProjectHistoryRedisManager = {
* @param {string} projectId
* @param {string} projectHistoryId
* @param {string} docId
* @param {string[] | StringFileRawData} lines
* @param {string[]} lines
* @param {Ranges} ranges
* @param {string[]} resolvedCommentIds
* @param {number} version
@ -205,8 +204,13 @@ const ProjectHistoryRedisManager = {
'queue doc content resync'
)
let content = lines.join('\n')
if (historyRangesSupport) {
content = addTrackedDeletesToContent(content, ranges.changes ?? [])
}
const projectUpdate = {
resyncDocContent: { version },
resyncDocContent: { content, version },
projectHistoryId,
path: pathname,
doc: docId,
@ -215,43 +219,22 @@ const ProjectHistoryRedisManager = {
},
}
let content = ''
if (Array.isArray(lines)) {
content = lines.join('\n')
if (historyRangesSupport) {
content = addTrackedDeletesToContent(content, ranges.changes ?? [])
projectUpdate.resyncDocContent.ranges =
HistoryConversions.toHistoryRanges(ranges)
projectUpdate.resyncDocContent.resolvedCommentIds = resolvedCommentIds
}
} else {
content = lines.content
projectUpdate.resyncDocContent.historyOTRanges = {
comments: lines.comments,
trackedChanges: lines.trackedChanges,
}
}
projectUpdate.resyncDocContent.content = content
const jsonUpdate = JSON.stringify(projectUpdate)
// Do an optimised size check on the docLines using the serialised
// project update length as an upper bound
const sizeBound = jsonUpdate.length
if (Array.isArray(lines)) {
if (docIsTooLarge(sizeBound, lines, Settings.max_doc_length)) {
throw new OError(
'blocking resync doc content insert into project history queue: doc is too large',
{ projectId, docId, docSize: sizeBound }
)
}
} else if (
stringFileDataContentIsTooLarge(lines, Settings.max_doc_length)
) {
throw new OError(
'blocking resync doc content insert into project history queue: doc is too large',
{ projectId, docId, docSize: sizeBound }
)
}
return await ProjectHistoryRedisManager.queueOps(projectId, jsonUpdate)
},
}

View file

@ -28,15 +28,12 @@ services:
MOCHA_GREP: ${MOCHA_GREP}
NODE_ENV: test
NODE_OPTIONS: "--unhandled-rejections=strict"
volumes:
- ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it
depends_on:
mongo:
condition: service_started
redis:
condition: service_healthy
user: node
entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 --
command: npm run test:acceptance
@ -48,7 +45,7 @@ services:
command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs .
user: root
redis:
image: redis:7.4.3
image: redis
healthcheck:
test: ping="$$(redis-cli ping)" && [ "$$ping" = 'PONG' ]
interval: 1s

View file

@ -26,7 +26,6 @@ services:
- .:/overleaf/services/document-updater
- ../../node_modules:/overleaf/node_modules
- ../../libraries:/overleaf/libraries
- ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it
working_dir: /overleaf/services/document-updater
environment:
ELASTIC_SEARCH_DSN: es:9200
@ -46,11 +45,10 @@ services:
condition: service_started
redis:
condition: service_healthy
entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 --
command: npm run --silent test:acceptance
redis:
image: redis:7.4.3
image: redis
healthcheck:
test: ping=$$(redis-cli ping) && [ "$$ping" = 'PONG' ]
interval: 1s

View file

@ -15,7 +15,6 @@ const request = require('requestretry').defaults({
retryDelay: 10,
})
const ONLY_PROJECT_ID = process.env.ONLY_PROJECT_ID
const AUTO_FIX_VERSION_MISMATCH =
process.env.AUTO_FIX_VERSION_MISMATCH === 'true'
const AUTO_FIX_PARTIALLY_DELETED_DOC_METADATA =
@ -320,9 +319,7 @@ async function processProject(projectId) {
* @return {Promise<{perIterationOutOfSync: number, done: boolean}>}
*/
async function scanOnce(processed, outOfSync) {
const projectIds = ONLY_PROJECT_ID
? [ONLY_PROJECT_ID]
: await ProjectFlusher.promises.flushAllProjects({
const projectIds = await ProjectFlusher.promises.flushAllProjects({
limit: LIMIT,
dryRun: true,
})

View file

@ -686,285 +686,4 @@ describe('Setting a document', function () {
})
})
})
describe('with track changes (history-ot)', function () {
const lines = ['one', 'one and a half', 'two', 'three']
const userId = DocUpdaterClient.randomId()
const ts = new Date().toISOString()
beforeEach(function (done) {
numberOfReceivedUpdates = 0
this.newLines = ['one', 'two', 'three']
this.project_id = DocUpdaterClient.randomId()
this.doc_id = DocUpdaterClient.randomId()
this.historyOTUpdate = {
doc: this.doc_id,
op: [
{
textOperation: [
4,
{
r: 'one and a half\n'.length,
tracking: {
type: 'delete',
userId,
ts,
},
},
9,
],
},
],
v: this.version,
meta: { source: 'random-publicId' },
}
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines,
version: this.version,
otMigrationStage: 1,
})
DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, error => {
if (error) {
throw error
}
DocUpdaterClient.sendUpdate(
this.project_id,
this.doc_id,
this.historyOTUpdate,
error => {
if (error) {
throw error
}
DocUpdaterClient.waitForPendingUpdates(
this.project_id,
this.doc_id,
done
)
}
)
})
})
afterEach(function () {
MockProjectHistoryApi.flushProject.resetHistory()
MockWebApi.setDocument.resetHistory()
})
it('should record tracked changes', function (done) {
docUpdaterRedis.get(
Keys.docLines({ doc_id: this.doc_id }),
(error, data) => {
if (error) {
throw error
}
expect(JSON.parse(data)).to.deep.equal({
content: lines.join('\n'),
trackedChanges: [
{
range: {
pos: 4,
length: 15,
},
tracking: {
ts,
type: 'delete',
userId,
},
},
],
})
done()
}
)
})
it('should apply the change', function (done) {
DocUpdaterClient.getDoc(
this.project_id,
this.doc_id,
(error, res, data) => {
if (error) {
throw error
}
expect(data.lines).to.deep.equal(this.newLines)
done()
}
)
})
const cases = [
{
name: 'when resetting the content',
lines,
want: {
content: 'one\none and a half\none and a half\ntwo\nthree',
trackedChanges: [
{
range: {
pos: 'one and a half\n'.length + 4,
length: 15,
},
tracking: {
ts,
type: 'delete',
userId,
},
},
],
},
},
{
name: 'when adding content before a tracked delete',
lines: ['one', 'INSERT', 'two', 'three'],
want: {
content: 'one\nINSERT\none and a half\ntwo\nthree',
trackedChanges: [
{
range: {
pos: 'INSERT\n'.length + 4,
length: 15,
},
tracking: {
ts,
type: 'delete',
userId,
},
},
],
},
},
{
name: 'when adding content after a tracked delete',
lines: ['one', 'two', 'INSERT', 'three'],
want: {
content: 'one\none and a half\ntwo\nINSERT\nthree',
trackedChanges: [
{
range: {
pos: 4,
length: 15,
},
tracking: {
ts,
type: 'delete',
userId,
},
},
],
},
},
{
name: 'when deleting content before a tracked delete',
lines: ['two', 'three'],
want: {
content: 'one and a half\ntwo\nthree',
trackedChanges: [
{
range: {
pos: 0,
length: 15,
},
tracking: {
ts,
type: 'delete',
userId,
},
},
],
},
},
{
name: 'when deleting content after a tracked delete',
lines: ['one', 'two'],
want: {
content: 'one\none and a half\ntwo',
trackedChanges: [
{
range: {
pos: 4,
length: 15,
},
tracking: {
ts,
type: 'delete',
userId,
},
},
],
},
},
{
name: 'when deleting content immediately after a tracked delete',
lines: ['one', 'three'],
want: {
content: 'one\none and a half\nthree',
trackedChanges: [
{
range: {
pos: 4,
length: 15,
},
tracking: {
ts,
type: 'delete',
userId,
},
},
],
},
},
{
name: 'when deleting content across a tracked delete',
lines: ['onethree'],
want: {
content: 'oneone and a half\nthree',
trackedChanges: [
{
range: {
pos: 3,
length: 15,
},
tracking: {
ts,
type: 'delete',
userId,
},
},
],
},
},
]
for (const { name, lines, want } of cases) {
describe(name, function () {
beforeEach(function (done) {
DocUpdaterClient.setDocLines(
this.project_id,
this.doc_id,
lines,
this.source,
userId,
false,
(error, res, body) => {
if (error) {
return done(error)
}
this.statusCode = res.statusCode
this.body = body
done()
}
)
})
it('should update accordingly', function (done) {
docUpdaterRedis.get(
Keys.docLines({ doc_id: this.doc_id }),
(error, data) => {
if (error) {
throw error
}
expect(JSON.parse(data)).to.deep.equal(want)
done()
}
)
})
})
}
})
})

View file

@ -81,88 +81,4 @@ describe('Limits', function () {
})
})
})
describe('stringFileDataContentIsTooLarge', function () {
it('should handle small docs', function () {
expect(
this.Limits.stringFileDataContentIsTooLarge({ content: '' }, 123)
).to.equal(false)
})
it('should handle docs at the limit', function () {
expect(
this.Limits.stringFileDataContentIsTooLarge(
{ content: 'x'.repeat(123) },
123
)
).to.equal(false)
})
it('should handle docs above the limit', function () {
expect(
this.Limits.stringFileDataContentIsTooLarge(
{ content: 'x'.repeat(123 + 1) },
123
)
).to.equal(true)
})
it('should handle docs above the limit and below with tracked-deletes removed', function () {
expect(
this.Limits.stringFileDataContentIsTooLarge(
{
content: 'x'.repeat(123 + 1),
trackedChanges: [
{
range: { pos: 1, length: 1 },
tracking: {
type: 'delete',
ts: '2025-06-16T14:31:44.910Z',
userId: 'user-id',
},
},
],
},
123
)
).to.equal(false)
})
it('should handle docs above the limit and above with tracked-deletes removed', function () {
expect(
this.Limits.stringFileDataContentIsTooLarge(
{
content: 'x'.repeat(123 + 2),
trackedChanges: [
{
range: { pos: 1, length: 1 },
tracking: {
type: 'delete',
ts: '2025-06-16T14:31:44.910Z',
userId: 'user-id',
},
},
],
},
123
)
).to.equal(true)
})
it('should handle docs above the limit and with tracked-inserts', function () {
expect(
this.Limits.stringFileDataContentIsTooLarge(
{
content: 'x'.repeat(123 + 1),
trackedChanges: [
{
range: { pos: 1, length: 1 },
tracking: {
type: 'insert',
ts: '2025-06-16T14:31:44.910Z',
userId: 'user-id',
},
},
],
},
123
)
).to.equal(true)
})
})
})

View file

@ -15,7 +15,6 @@ describe('ProjectHistoryRedisManager', function () {
this.Limits = {
docIsTooLarge: sinon.stub().returns(false),
stringFileDataContentIsTooLarge: sinon.stub().returns(false),
}
this.ProjectHistoryRedisManager = SandboxedModule.require(modulePath, {
@ -62,18 +61,22 @@ describe('ProjectHistoryRedisManager', function () {
})
it('should queue an update', function () {
this.multi.rpush.should.have.been.calledWithExactly(
this.multi.rpush
.calledWithExactly(
`ProjectHistory:Ops:${this.project_id}`,
this.ops[0],
this.ops[1]
)
.should.equal(true)
})
it('should set the queue timestamp if not present', function () {
this.multi.setnx.should.have.been.calledWithExactly(
this.multi.setnx
.calledWithExactly(
`ProjectHistory:FirstOpTimestamp:${this.project_id}`,
Date.now()
)
.should.equal(true)
})
})
@ -115,10 +118,9 @@ describe('ProjectHistoryRedisManager', function () {
file: this.file_id,
}
this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly(
this.project_id,
JSON.stringify(update)
)
this.ProjectHistoryRedisManager.promises.queueOps
.calledWithExactly(this.project_id, JSON.stringify(update))
.should.equal(true)
})
})
@ -164,10 +166,9 @@ describe('ProjectHistoryRedisManager', function () {
doc: this.doc_id,
}
this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly(
this.project_id,
JSON.stringify(update)
)
this.ProjectHistoryRedisManager.promises.queueOps
.calledWithExactly(this.project_id, JSON.stringify(update))
.should.equal(true)
})
it('should queue an update with file metadata', async function () {
@ -349,10 +350,9 @@ describe('ProjectHistoryRedisManager', function () {
doc: this.doc_id,
}
this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly(
this.project_id,
JSON.stringify(update)
)
this.ProjectHistoryRedisManager.promises.queueOps
.calledWithExactly(this.project_id, JSON.stringify(update))
.should.equal(true)
})
it('should not forward ranges if history ranges support is undefined', async function () {
@ -402,10 +402,9 @@ describe('ProjectHistoryRedisManager', function () {
doc: this.doc_id,
}
this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly(
this.project_id,
JSON.stringify(update)
)
this.ProjectHistoryRedisManager.promises.queueOps
.calledWithExactly(this.project_id, JSON.stringify(update))
.should.equal(true)
})
it('should pass "false" as the createdBlob field if not provided', async function () {
@ -433,10 +432,9 @@ describe('ProjectHistoryRedisManager', function () {
doc: this.doc_id,
}
this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly(
this.project_id,
JSON.stringify(update)
)
this.ProjectHistoryRedisManager.promises.queueOps
.calledWithExactly(this.project_id, JSON.stringify(update))
.should.equal(true)
})
it('should pass through the value of the createdBlob field', async function () {
@ -465,10 +463,9 @@ describe('ProjectHistoryRedisManager', function () {
doc: this.doc_id,
}
this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly(
this.project_id,
JSON.stringify(update)
)
this.ProjectHistoryRedisManager.promises.queueOps
.calledWithExactly(this.project_id, JSON.stringify(update))
.should.equal(true)
})
})
@ -496,8 +493,8 @@ describe('ProjectHistoryRedisManager', function () {
beforeEach(async function () {
this.update = {
resyncDocContent: {
version: this.version,
content: 'one\ntwo',
version: this.version,
},
projectHistoryId: this.projectHistoryId,
path: this.pathname,
@ -519,18 +516,19 @@ describe('ProjectHistoryRedisManager', function () {
})
it('should check if the doc is too large', function () {
this.Limits.docIsTooLarge.should.have.been.calledWith(
this.Limits.docIsTooLarge
.calledWith(
JSON.stringify(this.update).length,
this.lines,
this.settings.max_doc_length
)
.should.equal(true)
})
it('should queue an update', function () {
this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly(
this.project_id,
JSON.stringify(this.update)
)
this.ProjectHistoryRedisManager.promises.queueOps
.calledWithExactly(this.project_id, JSON.stringify(this.update))
.should.equal(true)
})
})
@ -553,8 +551,9 @@ describe('ProjectHistoryRedisManager', function () {
})
it('should not queue an update if the doc is too large', function () {
this.ProjectHistoryRedisManager.promises.queueOps.should.not.have.been
.called
this.ProjectHistoryRedisManager.promises.queueOps.called.should.equal(
false
)
})
})
@ -562,10 +561,10 @@ describe('ProjectHistoryRedisManager', function () {
beforeEach(async function () {
this.update = {
resyncDocContent: {
content: 'onedeleted\ntwo',
version: this.version,
ranges: this.ranges,
resolvedCommentIds: this.resolvedCommentIds,
content: 'onedeleted\ntwo',
},
projectHistoryId: this.projectHistoryId,
path: this.pathname,
@ -602,76 +601,9 @@ describe('ProjectHistoryRedisManager', function () {
})
it('should queue an update', function () {
this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly(
this.project_id,
JSON.stringify(this.update)
)
})
})
describe('history-ot', function () {
beforeEach(async function () {
this.lines = {
content: 'onedeleted\ntwo',
comments: [{ id: 'id1', ranges: [{ pos: 0, length: 3 }] }],
trackedChanges: [
{
range: { pos: 3, length: 7 },
tracking: {
type: 'delete',
userId: 'user-id',
ts: '2025-06-16T14:31:44.910Z',
},
},
],
}
this.update = {
resyncDocContent: {
version: this.version,
historyOTRanges: {
comments: this.lines.comments,
trackedChanges: this.lines.trackedChanges,
},
content: this.lines.content,
},
projectHistoryId: this.projectHistoryId,
path: this.pathname,
doc: this.doc_id,
meta: { ts: new Date() },
}
await this.ProjectHistoryRedisManager.promises.queueResyncDocContent(
this.project_id,
this.projectHistoryId,
this.doc_id,
this.lines,
this.ranges,
this.resolvedCommentIds,
this.version,
this.pathname,
true
)
})
it('should include tracked deletes in the update', function () {
this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly(
this.project_id,
JSON.stringify(this.update)
)
})
it('should check the doc length without tracked deletes', function () {
this.Limits.stringFileDataContentIsTooLarge.should.have.been.calledWith(
this.lines,
this.settings.max_doc_length
)
})
it('should queue an update', function () {
this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly(
this.project_id,
JSON.stringify(this.update)
)
this.ProjectHistoryRedisManager.promises.queueOps
.calledWithExactly(this.project_id, JSON.stringify(this.update))
.should.equal(true)
})
})
})

View file

@ -1,76 +0,0 @@
const crypto = require('node:crypto')
class Rollout {
constructor(config) {
// The history buffer level is used to determine whether to queue changes
// in Redis or persist them directly to the chunk store.
// If defaults to 0 (no queuing) if not set.
this.historyBufferLevel = config.has('historyBufferLevel')
? parseInt(config.get('historyBufferLevel'), 10)
: 0
// The forcePersistBuffer flag will ensure the buffer is fully persisted before
// any persist operation. Set this to true if you want to make the persisted-version
// in Redis match the endVersion of the latest chunk. This should be set to true
// when downgrading from a history buffer level that queues changes in Redis
// without persisting them immediately.
this.forcePersistBuffer = config.has('forcePersistBuffer')
? config.get('forcePersistBuffer') === 'true'
: false
// Support gradual rollout of the next history buffer level
// with a percentage of projects using it.
this.nextHistoryBufferLevel = config.has('nextHistoryBufferLevel')
? parseInt(config.get('nextHistoryBufferLevel'), 10)
: null
this.nextHistoryBufferLevelRolloutPercentage = config.has(
'nextHistoryBufferLevelRolloutPercentage'
)
? parseInt(config.get('nextHistoryBufferLevelRolloutPercentage'), 10)
: 0
}
report(logger) {
logger.info(
{
historyBufferLevel: this.historyBufferLevel,
forcePersistBuffer: this.forcePersistBuffer,
nextHistoryBufferLevel: this.nextHistoryBufferLevel,
nextHistoryBufferLevelRolloutPercentage:
this.nextHistoryBufferLevelRolloutPercentage,
},
this.historyBufferLevel > 0 || this.forcePersistBuffer
? 'using history buffer'
: 'history buffer disabled'
)
}
/**
* Get the history buffer level for a project.
* @param {string} projectId
* @returns {Object} - An object containing the history buffer level and force persist buffer flag.
* @property {number} historyBufferLevel - The history buffer level to use for processing changes.
* @property {boolean} forcePersistBuffer - If true, forces the buffer to be persisted before any operation.
*/
getHistoryBufferLevelOptions(projectId) {
if (
this.nextHistoryBufferLevel > this.historyBufferLevel &&
this.nextHistoryBufferLevelRolloutPercentage > 0
) {
const hash = crypto.createHash('sha1').update(projectId).digest('hex')
const percentage = parseInt(hash.slice(0, 8), 16) % 100
// If the project is in the rollout percentage, we use the next history buffer level.
if (percentage < this.nextHistoryBufferLevelRolloutPercentage) {
return {
historyBufferLevel: this.nextHistoryBufferLevel,
forcePersistBuffer: this.forcePersistBuffer,
}
}
}
return {
historyBufferLevel: this.historyBufferLevel,
forcePersistBuffer: this.forcePersistBuffer,
}
}
}
module.exports = Rollout

View file

@ -2,7 +2,6 @@
'use strict'
const config = require('config')
const { expressify } = require('@overleaf/promise-utils')
const HTTPStatus = require('http-status')
@ -22,15 +21,10 @@ const BatchBlobStore = storage.BatchBlobStore
const BlobStore = storage.BlobStore
const chunkStore = storage.chunkStore
const HashCheckBlobStore = storage.HashCheckBlobStore
const commitChanges = storage.commitChanges
const persistBuffer = storage.persistBuffer
const persistChanges = storage.persistChanges
const InvalidChangeError = storage.InvalidChangeError
const render = require('./render')
const Rollout = require('../app/rollout')
const rollout = new Rollout(config)
rollout.report(logger) // display the rollout configuration in the logs
async function importSnapshot(req, res) {
const projectId = req.swagger.params.project_id.value
@ -41,7 +35,6 @@ async function importSnapshot(req, res) {
try {
snapshot = Snapshot.fromRaw(rawSnapshot)
} catch (err) {
logger.warn({ err, projectId }, 'failed to import snapshot')
return render.unprocessableEntity(res)
}
@ -50,7 +43,6 @@ async function importSnapshot(req, res) {
historyId = await chunkStore.initializeProject(projectId, snapshot)
} catch (err) {
if (err instanceof chunkStore.AlreadyInitialized) {
logger.warn({ err, projectId }, 'already initialized')
return render.conflict(res)
} else {
throw err
@ -116,12 +108,7 @@ async function importChanges(req, res, next) {
let result
try {
const { historyBufferLevel, forcePersistBuffer } =
rollout.getHistoryBufferLevelOptions(projectId)
result = await commitChanges(projectId, changes, limits, endVersion, {
historyBufferLevel,
forcePersistBuffer,
})
result = await persistChanges(projectId, changes, limits, endVersion)
} catch (err) {
if (
err instanceof Chunk.ConflictingEndVersion ||
@ -154,29 +141,5 @@ async function importChanges(req, res, next) {
}
}
async function flushChanges(req, res, next) {
const projectId = req.swagger.params.project_id.value
// Use the same limits importChanges, since these are passed to persistChanges
const farFuture = new Date()
farFuture.setTime(farFuture.getTime() + 7 * 24 * 3600 * 1000)
const limits = {
maxChanges: 0,
minChangeTimestamp: farFuture,
maxChangeTimestamp: farFuture,
autoResync: true,
}
try {
await persistBuffer(projectId, limits)
res.status(HTTPStatus.OK).end()
} catch (err) {
if (err instanceof Chunk.NotFoundError) {
render.notFound(res)
} else {
throw err
}
}
}
exports.importSnapshot = expressify(importSnapshot)
exports.importChanges = expressify(importChanges)
exports.flushChanges = expressify(flushChanges)

View file

@ -34,7 +34,6 @@ async function initializeProject(req, res, next) {
res.status(HTTPStatus.OK).json({ projectId })
} catch (err) {
if (err instanceof chunkStore.AlreadyInitialized) {
logger.warn({ err, projectId }, 'failed to initialize')
render.conflict(res)
} else {
throw err
@ -243,15 +242,11 @@ async function createProjectBlob(req, res, next) {
const sizeLimit = new StreamSizeLimit(maxUploadSize)
await pipeline(req, sizeLimit, fs.createWriteStream(tmpPath))
if (sizeLimit.sizeLimitExceeded) {
logger.warn(
{ projectId, expectedHash, maxUploadSize },
'blob exceeds size threshold'
)
return render.requestEntityTooLarge(res)
}
const hash = await blobHash.fromFile(tmpPath)
if (hash !== expectedHash) {
logger.warn({ projectId, hash, expectedHash }, 'Hash mismatch')
logger.debug({ hash, expectedHash }, 'Hash mismatch')
return render.conflict(res, 'File hash mismatch')
}
@ -348,10 +343,6 @@ async function copyProjectBlob(req, res, next) {
targetBlobStore.getBlob(blobHash),
])
if (!sourceBlob) {
logger.warn(
{ sourceProjectId, targetProjectId, blobHash },
'missing source blob when copying across projects'
)
return render.notFound(res)
}
// Exit early if the blob exists in the target project.

View file

@ -139,45 +139,9 @@ const getChanges = {
],
}
const flushChanges = {
'x-swagger-router-controller': 'project_import',
operationId: 'flushChanges',
tags: ['ProjectImport'],
description: 'Flush project changes from buffer to the chunk store.',
parameters: [
{
name: 'project_id',
in: 'path',
description: 'project id',
required: true,
type: 'string',
},
],
responses: {
200: {
description: 'Success',
schema: {
$ref: '#/definitions/Project',
},
},
404: {
description: 'Not Found',
schema: {
$ref: '#/definitions/Error',
},
},
},
security: [
{
basic: [],
},
],
}
exports.paths = {
'/projects/{project_id}/import': { post: importSnapshot },
'/projects/{project_id}/legacy_import': { post: importSnapshot },
'/projects/{project_id}/changes': { get: getChanges, post: importChanges },
'/projects/{project_id}/legacy_changes': { post: importChanges },
'/projects/{project_id}/flush': { post: flushChanges },
}

View file

@ -100,13 +100,11 @@ function setupErrorHandling() {
})
}
if (err.code === 'ENUM_MISMATCH') {
logger.warn({ err, projectId }, err.message)
return res.status(HTTPStatus.UNPROCESSABLE_ENTITY).json({
message: 'invalid enum value: ' + err.paramName,
})
}
if (err.code === 'REQUIRED') {
logger.warn({ err, projectId }, err.message)
return res.status(HTTPStatus.UNPROCESSABLE_ENTITY).json({
message: err.message,
})

View file

@ -84,10 +84,6 @@
"maxFileUploadSize": "MAX_FILE_UPLOAD_SIZE",
"httpsOnly": "HTTPS_ONLY",
"httpRequestTimeout": "HTTP_REQUEST_TIMEOUT",
"historyBufferLevel": "HISTORY_BUFFER_LEVEL",
"forcePersistBuffer": "FORCE_PERSIST_BUFFER",
"nextHistoryBufferLevel": "NEXT_HISTORY_BUFFER_LEVEL",
"nextHistoryBufferLevelRolloutPercentage": "NEXT_HISTORY_BUFFER_LEVEL_ROLLOUT_PERCENTAGE",
"redis": {
"queue": {
"host": "QUEUES_REDIS_HOST",
@ -104,9 +100,5 @@
"password": "REDIS_PASSWORD",
"port": "REDIS_PORT"
}
},
"projectHistory": {
"host": "PROJECT_HISTORY_HOST",
"port": "PROJECT_HISTORY_PORT"
}
}

View file

@ -39,8 +39,5 @@
"databasePoolMin": "2",
"databasePoolMax": "10",
"httpsOnly": "false",
"httpRequestTimeout": "300000",
"projectHistory": {
"port": "3054"
}
"httpRequestTimeout": "300000"
}

View file

@ -39,7 +39,6 @@ services:
NODE_OPTIONS: "--unhandled-rejections=strict"
volumes:
- ./test/acceptance/certs:/certs
- ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it
depends_on:
mongo:
condition: service_started
@ -56,7 +55,6 @@ services:
gcs:
condition: service_healthy
user: node
entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 --
command: npm run test:acceptance
@ -68,7 +66,7 @@ services:
command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs .
user: root
redis:
image: redis:7.4.3
image: redis
healthcheck:
test: ping="$$(redis-cli ping)" && [ "$$ping" = 'PONG' ]
interval: 1s

View file

@ -33,7 +33,6 @@ services:
- ../../node_modules:/overleaf/node_modules
- ../../libraries:/overleaf/libraries
- ./test/acceptance/certs:/certs
- ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it
working_dir: /overleaf/services/history-v1
environment:
ELASTIC_SEARCH_DSN: es:9200
@ -72,11 +71,10 @@ services:
condition: service_completed_successfully
gcs:
condition: service_healthy
entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 --
command: npm run --silent test:acceptance
redis:
image: redis:7.4.3
image: redis
healthcheck:
test: ping=$$(redis-cli ping) && [ "$$ping" = 'PONG' ]
interval: 1s

View file

@ -7,7 +7,6 @@
"private": true,
"dependencies": {
"@google-cloud/secret-manager": "^5.6.0",
"@overleaf/fetch-utils": "*",
"@overleaf/logger": "*",
"@overleaf/metrics": "*",
"@overleaf/mongo-utils": "*",
@ -37,7 +36,6 @@
"mongodb": "6.12.0",
"overleaf-editor-core": "*",
"p-limit": "^6.2.0",
"p-queue": "^8.1.0",
"pg": "^8.7.1",
"pg-query-stream": "^4.2.4",
"swagger-tools": "^0.10.4",

View file

@ -8,9 +8,6 @@ exports.mongodb = require('./lib/mongodb')
exports.redis = require('./lib/redis')
exports.persistChanges = require('./lib/persist_changes')
exports.persistor = require('./lib/persistor')
exports.persistBuffer = require('./lib/persist_buffer')
exports.commitChanges = require('./lib/commit_changes')
exports.queueChanges = require('./lib/queue_changes')
exports.ProjectArchive = require('./lib/project_archive')
exports.streams = require('./lib/streams')
exports.temp = require('./lib/temp')

View file

@ -151,48 +151,23 @@ async function loadAtVersion(projectId, version, opts = {}) {
const backend = getBackend(projectId)
const blobStore = new BlobStore(projectId)
const batchBlobStore = new BatchBlobStore(blobStore)
const latestChunkMetadata = await getLatestChunkMetadata(projectId)
// When loading a chunk for a version there are three cases to consider:
// 1. If `persistedOnly` is true, we always use the requested version
// to fetch the chunk.
// 2. If `persistedOnly` is false and the requested version is in the
// persisted chunk version range, we use the requested version.
// 3. If `persistedOnly` is false and the requested version is ahead of
// the persisted chunk versions, we fetch the latest chunk and see if
// the non-persisted changes include the requested version.
const targetChunkVersion = opts.persistedOnly
? version
: Math.min(latestChunkMetadata.endVersion, version)
const chunkRecord = await backend.getChunkForVersion(
projectId,
targetChunkVersion,
{
const chunkRecord = await backend.getChunkForVersion(projectId, version, {
preferNewer: opts.preferNewer,
}
)
})
const rawHistory = await historyStore.loadRaw(projectId, chunkRecord.id)
const history = History.fromRaw(rawHistory)
const startVersion = chunkRecord.endVersion - history.countChanges()
if (!opts.persistedOnly) {
// Try to extend the chunk with any non-persisted changes that
// follow the chunk's end version.
const nonPersistedChanges = await getChunkExtension(
projectId,
chunkRecord.endVersion
)
history.pushChanges(nonPersistedChanges)
// Check that the changes do actually contain the requested version
if (version > chunkRecord.endVersion + nonPersistedChanges.length) {
throw new Chunk.VersionNotFoundError(projectId, version)
}
}
await lazyLoadHistoryFiles(history, batchBlobStore)
return new Chunk(history, startVersion)
return new Chunk(history, chunkRecord.endVersion - history.countChanges())
}
/**
@ -215,7 +190,6 @@ async function loadAtTimestamp(projectId, timestamp, opts = {}) {
const chunkRecord = await backend.getChunkForTimestamp(projectId, timestamp)
const rawHistory = await historyStore.loadRaw(projectId, chunkRecord.id)
const history = History.fromRaw(rawHistory)
const startVersion = chunkRecord.endVersion - history.countChanges()
if (!opts.persistedOnly) {
const nonPersistedChanges = await getChunkExtension(
@ -226,7 +200,7 @@ async function loadAtTimestamp(projectId, timestamp, opts = {}) {
}
await lazyLoadHistoryFiles(history, batchBlobStore)
return new Chunk(history, startVersion)
return new Chunk(history, chunkRecord.endVersion - history.countChanges())
}
/**

View file

@ -286,27 +286,6 @@ async function updateProjectRecord(
)
}
/**
* @param {number} historyId
* @return {Promise<string>}
*/
async function lookupMongoProjectIdFromHistoryId(historyId) {
const project = await mongodb.projects.findOne(
// string for Object ids, number for postgres ids
{ 'overleaf.history.id': historyId },
{ projection: { _id: 1 } }
)
if (!project) {
// should not happen: We flush before allowing a project to be soft-deleted.
throw new OError('mongo project not found by history id', { historyId })
}
return project._id.toString()
}
async function resolveHistoryIdToMongoProjectId(projectId) {
return projectId
}
/**
* Record that a chunk was replaced by a new one.
*
@ -554,6 +533,4 @@ module.exports = {
deleteProjectChunks,
getOldChunksBatch,
deleteOldChunks,
lookupMongoProjectIdFromHistoryId,
resolveHistoryIdToMongoProjectId,
}

View file

@ -5,10 +5,7 @@ const assert = require('../assert')
const knex = require('../knex')
const knexReadOnly = require('../knex_read_only')
const { ChunkVersionConflictError } = require('./errors')
const {
updateProjectRecord,
lookupMongoProjectIdFromHistoryId,
} = require('./mongo')
const { updateProjectRecord } = require('./mongo')
const DUPLICATE_KEY_ERROR_CODE = '23505'
@ -475,10 +472,6 @@ async function generateProjectId() {
return record.doc_id.toString()
}
async function resolveHistoryIdToMongoProjectId(projectId) {
return await lookupMongoProjectIdFromHistoryId(parseInt(projectId, 10))
}
module.exports = {
getLatestChunk,
getFirstChunkBeforeTimestamp,
@ -495,5 +488,4 @@ module.exports = {
getOldChunksBatch,
deleteOldChunks,
generateProjectId,
resolveHistoryIdToMongoProjectId,
}

View file

@ -480,12 +480,11 @@ async function getNonPersistedChanges(projectId, baseVersion) {
}
rclient.defineCommand('set_persisted_version', {
numberOfKeys: 4,
numberOfKeys: 3,
lua: `
local headVersionKey = KEYS[1]
local persistedVersionKey = KEYS[2]
local persistTimeKey = KEYS[3]
local changesKey = KEYS[4]
local changesKey = KEYS[3]
local newPersistedVersion = tonumber(ARGV[1])
local maxPersistedChanges = tonumber(ARGV[2])
@ -502,19 +501,9 @@ rclient.defineCommand('set_persisted_version', {
return 'too_low'
end
-- Refuse to set a persisted version that is higher than the head version
if newPersistedVersion > headVersion then
return 'too_high'
end
-- Set the persisted version
redis.call('SET', persistedVersionKey, newPersistedVersion)
-- Clear the persist time if the persisted version now matches the head version
if newPersistedVersion == headVersion then
redis.call('DEL', persistTimeKey)
end
-- Calculate the starting index, to keep only maxPersistedChanges beyond the persisted version
-- Using negative indexing to count backwards from the end of the list
local startIndex = newPersistedVersion - headVersion - maxPersistedChanges
@ -541,7 +530,6 @@ async function setPersistedVersion(projectId, persistedVersion) {
const keys = [
keySchema.headVersion({ projectId }),
keySchema.persistedVersion({ projectId }),
keySchema.persistTime({ projectId }),
keySchema.changes({ projectId }),
]
@ -553,13 +541,6 @@ async function setPersistedVersion(projectId, persistedVersion) {
status,
})
if (status === 'too_high') {
throw new VersionOutOfBoundsError(
'Persisted version cannot be higher than head version',
{ projectId, persistedVersion }
)
}
return status
} catch (err) {
metrics.inc('chunk_store.redis.set_persisted_version', 1, {
@ -650,7 +631,6 @@ async function expireProject(projectId) {
metrics.inc('chunk_store.redis.set_persisted_version', 1, {
status,
})
return status
} catch (err) {
metrics.inc('chunk_store.redis.set_persisted_version', 1, {
status: 'error',

View file

@ -1,159 +0,0 @@
// @ts-check
'use strict'
const metrics = require('@overleaf/metrics')
const redisBackend = require('./chunk_store/redis')
const logger = require('@overleaf/logger')
const queueChanges = require('./queue_changes')
const persistChanges = require('./persist_changes')
const persistBuffer = require('./persist_buffer')
/**
* @typedef {import('overleaf-editor-core').Change} Change
*/
/**
* Handle incoming changes by processing them according to the specified options.
* @param {string} projectId
* @param {Change[]} changes
* @param {Object} limits
* @param {number} endVersion
* @param {Object} options
* @param {number} [options.historyBufferLevel] - The history buffer level to use for processing changes.
* @param {Boolean} [options.forcePersistBuffer] - If true, forces the buffer to be persisted before any operation.
* @return {Promise.<Object?>}
*/
async function commitChanges(
projectId,
changes,
limits,
endVersion,
options = {}
) {
const { historyBufferLevel, forcePersistBuffer } = options
// Force the buffer to be persisted if specified.
if (forcePersistBuffer) {
try {
const status = await redisBackend.expireProject(projectId) // clear the project from Redis if it is persisted, returns 'not-persisted' if it was not persisted
if (status === 'not-persisted') {
await persistBuffer(projectId, limits)
await redisBackend.expireProject(projectId) // clear the project from Redis after persisting
metrics.inc('persist_buffer_force', 1, { status: 'persisted' })
}
} catch (err) {
metrics.inc('persist_buffer_force', 1, { status: 'error' })
logger.error(
{ err, projectId },
'failed to persist buffer before committing changes'
)
}
}
metrics.inc('commit_changes', 1, {
history_buffer_level: historyBufferLevel || 0,
})
// Now handle the changes based on the configured history buffer level.
switch (historyBufferLevel) {
case 4: // Queue changes and only persist them in the background
await queueChanges(projectId, changes, endVersion)
return {}
case 3: // Queue changes and immediately persist with persistBuffer
await queueChanges(projectId, changes, endVersion)
return await persistBuffer(projectId, limits)
case 2: // Equivalent to queueChangesInRedis:true
await queueChangesFake(projectId, changes, endVersion)
return await persistChanges(projectId, changes, limits, endVersion)
case 1: // Queue changes with fake persist only for projects in redis already
await queueChangesFakeOnlyIfExists(projectId, changes, endVersion)
return await persistChanges(projectId, changes, limits, endVersion)
case 0: // Persist changes directly to the chunk store
return await persistChanges(projectId, changes, limits, endVersion)
default:
throw new Error(`Invalid history buffer level: ${historyBufferLevel}`)
}
}
/**
* Queues a set of changes in redis as if they had been persisted, ignoring any errors.
* @param {string} projectId
* @param {Change[]} changes
* @param {number} endVersion
* @param {Object} [options]
* @param {boolean} [options.onlyIfExists] - If true, only queue changes if the project
* already exists in Redis.
*/
async function queueChangesFake(projectId, changes, endVersion, options = {}) {
try {
await queueChanges(projectId, changes, endVersion)
await fakePersistRedisChanges(projectId, changes, endVersion)
} catch (err) {
logger.error({ err }, 'Chunk buffer verification failed')
}
}
/**
* Queues changes in Redis, simulating persistence, but only if the project already exists.
* @param {string} projectId - The ID of the project.
* @param {Change[]} changes - An array of changes to be queued.
* @param {number} endVersion - The expected version of the project before these changes are applied.
*/
async function queueChangesFakeOnlyIfExists(projectId, changes, endVersion) {
await queueChangesFake(projectId, changes, endVersion, {
onlyIfExists: true,
})
}
/**
* Simulates the persistence of changes by verifying a given set of changes against
* what is currently stored as non-persisted in Redis, and then updates the
* persisted version number in Redis.
*
* @async
* @param {string} projectId - The ID of the project.
* @param {Change[]} changesToPersist - An array of changes that are expected to be
* persisted. These are used for verification
* against the changes currently in Redis.
* @param {number} baseVersion - The base version number from which to calculate
* the new persisted version.
* @returns {Promise<void>} A promise that resolves when the persisted version
* in Redis has been updated.
*/
async function fakePersistRedisChanges(
projectId,
changesToPersist,
baseVersion
) {
const nonPersistedChanges = await redisBackend.getNonPersistedChanges(
projectId,
baseVersion
)
if (
serializeChanges(nonPersistedChanges) === serializeChanges(changesToPersist)
) {
metrics.inc('persist_redis_changes_verification', 1, { status: 'match' })
} else {
logger.warn({ projectId }, 'mismatch of non-persisted changes from Redis')
metrics.inc('persist_redis_changes_verification', 1, {
status: 'mismatch',
})
}
const persistedVersion = baseVersion + nonPersistedChanges.length
await redisBackend.setPersistedVersion(projectId, persistedVersion)
}
/**
* @param {Change[]} changes
*/
function serializeChanges(changes) {
return JSON.stringify(changes.map(change => change.toRaw()))
}
module.exports = commitChanges

View file

@ -1,206 +0,0 @@
// @ts-check
'use strict'
const logger = require('@overleaf/logger')
const metrics = require('@overleaf/metrics')
const OError = require('@overleaf/o-error')
const assert = require('./assert')
const chunkStore = require('./chunk_store')
const { BlobStore } = require('./blob_store')
const BatchBlobStore = require('./batch_blob_store')
const persistChanges = require('./persist_changes')
const resyncProject = require('./resync_project')
const redisBackend = require('./chunk_store/redis')
/**
* Persist the changes from Redis buffer to the main storage
*
* Algorithm Outline:
* 1. Get the latest chunk's endVersion from the database
* 2. Get non-persisted changes from Redis that are after this endVersion.
* 3. If no such changes, exit.
* 4. Load file blobs for these Redis changes.
* 5. Run the persistChanges() algorithm to store these changes into a new chunk(s) in GCS.
* - This must not decrease the endVersion. If changes were processed, it must advance.
* 6. Set the new persisted version (endVersion of the latest persisted chunk) in Redis.
*
* @param {string} projectId
* @param {Object} limits
* @throws {Error | OError} If a critical error occurs during persistence.
*/
async function persistBuffer(projectId, limits) {
assert.projectId(projectId)
logger.debug({ projectId }, 'starting persistBuffer operation')
// 1. Get the latest chunk's endVersion from GCS/main store
let endVersion
const latestChunkMetadata = await chunkStore.getLatestChunkMetadata(projectId)
if (latestChunkMetadata) {
endVersion = latestChunkMetadata.endVersion
} else {
endVersion = 0 // No chunks found, start from version 0
logger.debug({ projectId }, 'no existing chunks found in main storage')
}
logger.debug({ projectId, endVersion }, 'got latest persisted chunk')
// 2. Get non-persisted changes from Redis
const changesToPersist = await redisBackend.getNonPersistedChanges(
projectId,
endVersion
)
if (changesToPersist.length === 0) {
logger.debug(
{ projectId, endVersion },
'no new changes in Redis buffer to persist'
)
metrics.inc('persist_buffer', 1, { status: 'no_changes' })
// No changes to persist, update the persisted version in Redis
// to match the current endVersion. This shouldn't be needed
// unless a worker failed to update the persisted version.
await redisBackend.setPersistedVersion(projectId, endVersion)
const { chunk } = await chunkStore.loadByChunkRecord(
projectId,
latestChunkMetadata
)
// Return the result in the same format as persistChanges
// so that the caller can handle it uniformly.
return {
numberOfChangesPersisted: changesToPersist.length,
originalEndVersion: endVersion,
currentChunk: chunk,
}
}
logger.debug(
{
projectId,
endVersion,
count: changesToPersist.length,
},
'found changes in Redis to persist'
)
// 4. Load file blobs for these Redis changes. Errors will propagate.
const blobStore = new BlobStore(projectId)
const batchBlobStore = new BatchBlobStore(blobStore)
const blobHashes = new Set()
for (const change of changesToPersist) {
change.findBlobHashes(blobHashes)
}
if (blobHashes.size > 0) {
await batchBlobStore.preload(Array.from(blobHashes))
}
for (const change of changesToPersist) {
await change.loadFiles('lazy', blobStore)
}
// 5. Run the persistChanges() algorithm. Errors will propagate.
logger.debug(
{
projectId,
endVersion,
changeCount: changesToPersist.length,
},
'calling persistChanges'
)
const persistResult = await persistChanges(
projectId,
changesToPersist,
limits,
endVersion
)
if (!persistResult || !persistResult.currentChunk) {
metrics.inc('persist_buffer', 1, { status: 'no-chunk-error' })
throw new OError(
'persistChanges did not produce a new chunk for non-empty changes',
{
projectId,
endVersion,
changeCount: changesToPersist.length,
}
)
}
const newPersistedChunk = persistResult.currentChunk
const newEndVersion = newPersistedChunk.getEndVersion()
if (newEndVersion <= endVersion) {
metrics.inc('persist_buffer', 1, { status: 'chunk-version-mismatch' })
throw new OError(
'persisted chunk endVersion must be greater than current persisted chunk end version for non-empty changes',
{
projectId,
newEndVersion,
endVersion,
changeCount: changesToPersist.length,
}
)
}
logger.debug(
{
projectId,
oldVersion: endVersion,
newVersion: newEndVersion,
},
'successfully persisted changes from Redis to main storage'
)
// 6. Set the persisted version in Redis. Errors will propagate.
const status = await redisBackend.setPersistedVersion(
projectId,
newEndVersion
)
if (status !== 'ok') {
metrics.inc('persist_buffer', 1, { status: 'error-on-persisted-version' })
throw new OError('failed to update persisted version in Redis', {
projectId,
newEndVersion,
status,
})
}
logger.debug(
{ projectId, newEndVersion },
'updated persisted version in Redis'
)
// 7. Resync the project if content hash validation failed
if (limits.autoResync && persistResult.resyncNeeded) {
if (
changesToPersist.some(
change => change.getOrigin()?.getKind() === 'history-resync'
)
) {
// To avoid an infinite loop, do not resync if the current batch of
// changes contains a history resync.
logger.warn(
{ projectId },
'content hash validation failed while persisting a history resync, skipping additional resync'
)
} else {
const backend = chunkStore.getBackend(projectId)
const mongoProjectId =
await backend.resolveHistoryIdToMongoProjectId(projectId)
await resyncProject(mongoProjectId)
}
}
logger.debug(
{ projectId, finalPersistedVersion: newEndVersion },
'persistBuffer operation completed successfully'
)
metrics.inc('persist_buffer', 1, { status: 'persisted' })
return persistResult
}
module.exports = persistBuffer

View file

@ -4,6 +4,7 @@
const _ = require('lodash')
const logger = require('@overleaf/logger')
const metrics = require('@overleaf/metrics')
const core = require('overleaf-editor-core')
const Chunk = core.Chunk
@ -14,6 +15,7 @@ const chunkStore = require('./chunk_store')
const { BlobStore } = require('./blob_store')
const { InvalidChangeError } = require('./errors')
const { getContentHash } = require('./content_hash')
const redisBackend = require('./chunk_store/redis')
function countChangeBytes(change) {
// Note: This is not quite accurate, because the raw change may contain raw
@ -200,6 +202,45 @@ async function persistChanges(projectId, allChanges, limits, clientEndVersion) {
currentSnapshot.applyAll(currentChunk.getChanges())
}
async function queueChangesInRedis() {
const hollowSnapshot = currentSnapshot.clone()
// We're transforming a lazy snapshot to a hollow snapshot, so loadFiles()
// doesn't really need a blobStore, but its signature still requires it.
const blobStore = new BlobStore(projectId)
await hollowSnapshot.loadFiles('hollow', blobStore)
hollowSnapshot.applyAll(changesToPersist, { strict: true })
const baseVersion = currentChunk.getEndVersion()
await redisBackend.queueChanges(
projectId,
hollowSnapshot,
baseVersion,
changesToPersist
)
}
async function fakePersistRedisChanges() {
const baseVersion = currentChunk.getEndVersion()
const nonPersistedChanges = await redisBackend.getNonPersistedChanges(
projectId,
baseVersion
)
if (
serializeChanges(nonPersistedChanges) ===
serializeChanges(changesToPersist)
) {
metrics.inc('persist_redis_changes_verification', 1, { status: 'match' })
} else {
logger.warn({ projectId }, 'mismatch of non-persisted changes from Redis')
metrics.inc('persist_redis_changes_verification', 1, {
status: 'mismatch',
})
}
const persistedVersion = baseVersion + nonPersistedChanges.length
await redisBackend.setPersistedVersion(projectId, persistedVersion)
}
async function extendLastChunkIfPossible() {
const timer = new Timer()
const changesPushed = await fillChunk(currentChunk, changesToPersist)
@ -248,6 +289,12 @@ async function persistChanges(projectId, allChanges, limits, clientEndVersion) {
const numberOfChangesToPersist = oldChanges.length
await loadLatestChunk()
try {
await queueChangesInRedis()
await fakePersistRedisChanges()
} catch (err) {
logger.error({ err }, 'Chunk buffer verification failed')
}
await extendLastChunkIfPossible()
await createNewChunksAsNeeded()
@ -262,4 +309,11 @@ async function persistChanges(projectId, allChanges, limits, clientEndVersion) {
}
}
/**
* @param {core.Change[]} changes
*/
function serializeChanges(changes) {
return JSON.stringify(changes.map(change => change.toRaw()))
}
module.exports = persistChanges

View file

@ -1,75 +0,0 @@
// @ts-check
'use strict'
const redisBackend = require('./chunk_store/redis')
const { BlobStore } = require('./blob_store')
const chunkStore = require('./chunk_store')
const core = require('overleaf-editor-core')
const Chunk = core.Chunk
/**
* Queues an incoming set of changes after validating them against the current snapshot.
*
* @async
* @function queueChanges
* @param {string} projectId - The project to queue changes for.
* @param {Array<Object>} changesToQueue - An array of change objects to be applied and queued.
* @param {number} endVersion - The expected version of the project before these changes are applied.
* This is used for optimistic concurrency control.
* @param {Object} [opts] - Additional options for queuing changes.
* @throws {Chunk.ConflictingEndVersion} If the provided `endVersion` does not match the
* current version of the project.
* @returns {Promise<any>} A promise that resolves with the status returned by the
* `redisBackend.queueChanges` operation.
*/
async function queueChanges(projectId, changesToQueue, endVersion, opts) {
const result = await redisBackend.getHeadSnapshot(projectId)
let currentSnapshot = null
let currentVersion = null
if (result) {
// If we have a snapshot in redis, we can use it to check the current state
// of the project and apply changes to it.
currentSnapshot = result.snapshot
currentVersion = result.version
} else {
// Otherwise, load the latest chunk from the chunk store.
const latestChunk = await chunkStore.loadLatest(projectId, {
persistedOnly: true,
})
// Throw an error if no latest chunk is found, indicating the project has not been initialised.
if (!latestChunk) {
throw new Chunk.NotFoundError(projectId)
}
currentSnapshot = latestChunk.getSnapshot()
currentSnapshot.applyAll(latestChunk.getChanges())
currentVersion = latestChunk.getEndVersion()
}
// Ensure the endVersion matches the current version of the project.
if (endVersion !== currentVersion) {
throw new Chunk.ConflictingEndVersion(endVersion, currentVersion)
}
// Compute the new hollow snapshot to be saved to redis.
const hollowSnapshot = currentSnapshot
const blobStore = new BlobStore(projectId)
await hollowSnapshot.loadFiles('hollow', blobStore)
// Clone the changes to avoid modifying the original ones when computing the hollow snapshot.
const hollowChanges = changesToQueue.map(change => change.clone())
for (const change of hollowChanges) {
await change.loadFiles('hollow', blobStore)
}
hollowSnapshot.applyAll(hollowChanges, { strict: true })
const baseVersion = currentVersion
const status = await redisBackend.queueChanges(
projectId,
hollowSnapshot,
baseVersion,
changesToQueue,
opts
)
return status
}
module.exports = queueChanges

Some files were not shown because too many files have changed in this diff Show more