mirror of
https://github.com/yu-i-i/overleaf-cep.git
synced 2025-08-05 08:00:05 +02:00
Compare commits
47 commits
ext-ce
...
v5.5.0-ext
Author | SHA1 | Date | |
---|---|---|---|
![]() |
7ca63b2754 | ||
![]() |
df3d39972e | ||
![]() |
ef6d90f62a | ||
![]() |
20dcd8e86f | ||
![]() |
f225e2c531 | ||
23249a427e | |||
![]() |
942125b199 | ||
![]() |
8c027569b1 | ||
![]() |
5a46224b9d | ||
![]() |
bd947329dc | ||
![]() |
d3e5cf92c6 | ||
![]() |
3adbf60d70 | ||
![]() |
9fd3485d3e | ||
![]() |
f54cafb39f | ||
![]() |
1de1914025 | ||
![]() |
1b107cc9ff | ||
![]() |
e178d934d5 | ||
![]() |
1eeda6fc7a | ||
![]() |
72421492b4 | ||
![]() |
6609d893b1 | ||
![]() |
72073f6f51 | ||
![]() |
de18677136 | ||
![]() |
0b60c26765 | ||
![]() |
da2e5fc5b4 | ||
![]() |
2cc359c824 | ||
![]() |
157ad24cc6 | ||
![]() |
60cca48561 | ||
![]() |
4d3ef529d7 | ||
![]() |
8eaf6950c6 | ||
![]() |
b407700a55 | ||
![]() |
400d800a1a | ||
![]() |
fd9c29ba09 | ||
![]() |
21b20cd86d | ||
![]() |
0371e8d956 | ||
![]() |
b993eda1f1 | ||
![]() |
8590e180d3 | ||
![]() |
5a6ed3334f | ||
![]() |
320999b0d8 | ||
![]() |
fcb5926db8 | ||
![]() |
04e0acbce5 | ||
![]() |
4df5135936 | ||
![]() |
504590d129 | ||
![]() |
6f8c951b7d | ||
![]() |
6282e4b0eb | ||
![]() |
928a514705 | ||
![]() |
884e7d81c8 | ||
![]() |
1c499496c6 |
1417 changed files with 35452 additions and 66885 deletions
|
@ -1,12 +1,3 @@
|
||||||
---
|
|
||||||
name: Bug report
|
|
||||||
about: Report a bug
|
|
||||||
title: ''
|
|
||||||
labels: type:bug
|
|
||||||
assignees: ''
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
<!--
|
<!--
|
||||||
|
|
||||||
Note: If you are using www.overleaf.com and have a problem,
|
Note: If you are using www.overleaf.com and have a problem,
|
|
@ -37,12 +37,6 @@ The present "extended" version of Overleaf CE includes:
|
||||||
- Symbol Palette
|
- Symbol Palette
|
||||||
- "From External URL" feature
|
- "From External URL" feature
|
||||||
|
|
||||||
> [!CAUTION]
|
|
||||||
> Overleaf Community Edition is intended for use in environments where **all** users are trusted. Community Edition is **not** appropriate for scenarios where isolation of users is required due to Sandbox Compiles not being available. When not using Sandboxed Compiles, users have full read and write access to the `sharelatex` container resources (filesystem, network, environment variables) when running LaTeX compiles.
|
|
||||||
Therefore, in any environment where not all users can be fully trusted, it is strongly recommended to enable the Sandboxed Compiles feature available in the Extended Community Edition.
|
|
||||||
|
|
||||||
For more information on Sandbox Compiles check out Overleaf [documentation](https://docs.overleaf.com/on-premises/configuration/overleaf-toolkit/server-pro-only-configuration/sandboxed-compiles).
|
|
||||||
|
|
||||||
## Enterprise
|
## Enterprise
|
||||||
|
|
||||||
If you want help installing and maintaining Overleaf in your lab or workplace, Overleaf offers an officially supported version called [Overleaf Server Pro](https://www.overleaf.com/for/enterprises).
|
If you want help installing and maintaining Overleaf in your lab or workplace, Overleaf offers an officially supported version called [Overleaf Server Pro](https://www.overleaf.com/for/enterprises).
|
||||||
|
|
|
@ -42,7 +42,7 @@ To do this, use the included `bin/dev` script:
|
||||||
bin/dev
|
bin/dev
|
||||||
```
|
```
|
||||||
|
|
||||||
This will start all services using `node --watch`, which will automatically monitor the code and restart the services as necessary.
|
This will start all services using `nodemon`, which will automatically monitor the code and restart the services as necessary.
|
||||||
|
|
||||||
To improve performance, you can start only a subset of the services in development mode by providing a space-separated list to the `bin/dev` script:
|
To improve performance, you can start only a subset of the services in development mode by providing a space-separated list to the `bin/dev` script:
|
||||||
|
|
||||||
|
|
|
@ -6,18 +6,15 @@ DOCUMENT_UPDATER_HOST=document-updater
|
||||||
FILESTORE_HOST=filestore
|
FILESTORE_HOST=filestore
|
||||||
GRACEFUL_SHUTDOWN_DELAY_SECONDS=0
|
GRACEFUL_SHUTDOWN_DELAY_SECONDS=0
|
||||||
HISTORY_V1_HOST=history-v1
|
HISTORY_V1_HOST=history-v1
|
||||||
HISTORY_REDIS_HOST=redis
|
|
||||||
LISTEN_ADDRESS=0.0.0.0
|
LISTEN_ADDRESS=0.0.0.0
|
||||||
MONGO_HOST=mongo
|
MONGO_HOST=mongo
|
||||||
MONGO_URL=mongodb://mongo/sharelatex?directConnection=true
|
MONGO_URL=mongodb://mongo/sharelatex?directConnection=true
|
||||||
NOTIFICATIONS_HOST=notifications
|
NOTIFICATIONS_HOST=notifications
|
||||||
PROJECT_HISTORY_HOST=project-history
|
PROJECT_HISTORY_HOST=project-history
|
||||||
QUEUES_REDIS_HOST=redis
|
|
||||||
REALTIME_HOST=real-time
|
REALTIME_HOST=real-time
|
||||||
REDIS_HOST=redis
|
REDIS_HOST=redis
|
||||||
REFERENCES_HOST=references
|
REFERENCES_HOST=references
|
||||||
SESSION_SECRET=foo
|
SESSION_SECRET=foo
|
||||||
V1_HISTORY_HOST=history-v1
|
|
||||||
WEBPACK_HOST=webpack
|
WEBPACK_HOST=webpack
|
||||||
WEB_API_PASSWORD=overleaf
|
WEB_API_PASSWORD=overleaf
|
||||||
WEB_API_USER=overleaf
|
WEB_API_USER=overleaf
|
||||||
|
|
|
@ -124,7 +124,7 @@ services:
|
||||||
- ../services/references/app.js:/overleaf/services/references/app.js
|
- ../services/references/app.js:/overleaf/services/references/app.js
|
||||||
|
|
||||||
web:
|
web:
|
||||||
command: ["node", "--watch", "app.mjs", "--watch-locales"]
|
command: ["node", "--watch", "app.js", "--watch-locales"]
|
||||||
environment:
|
environment:
|
||||||
- NODE_OPTIONS=--inspect=0.0.0.0:9229
|
- NODE_OPTIONS=--inspect=0.0.0.0:9229
|
||||||
ports:
|
ports:
|
||||||
|
|
|
@ -25,10 +25,10 @@ services:
|
||||||
env_file:
|
env_file:
|
||||||
- dev.env
|
- dev.env
|
||||||
environment:
|
environment:
|
||||||
|
- DOCKER_RUNNER=true
|
||||||
- TEXLIVE_IMAGE=texlive-full # docker build texlive -t texlive-full
|
- TEXLIVE_IMAGE=texlive-full # docker build texlive -t texlive-full
|
||||||
- SANDBOXED_COMPILES=true
|
- COMPILES_HOST_DIR=${PWD}/compiles
|
||||||
- SANDBOXED_COMPILES_HOST_DIR_COMPILES=${PWD}/compiles
|
- OUTPUT_HOST_DIR=${PWD}/output
|
||||||
- SANDBOXED_COMPILES_HOST_DIR_OUTPUT=${PWD}/output
|
|
||||||
user: root
|
user: root
|
||||||
volumes:
|
volumes:
|
||||||
- ${PWD}/compiles:/overleaf/services/clsi/compiles
|
- ${PWD}/compiles:/overleaf/services/clsi/compiles
|
||||||
|
|
BIN
doc/logo.png
BIN
doc/logo.png
Binary file not shown.
Before Width: | Height: | Size: 13 KiB After Width: | Height: | Size: 71 KiB |
|
@ -1,3 +1,4 @@
|
||||||
|
version: '2.2'
|
||||||
services:
|
services:
|
||||||
sharelatex:
|
sharelatex:
|
||||||
restart: always
|
restart: always
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
22.17.0
|
22.15.1
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
access-token-encryptor
|
access-token-encryptor
|
||||||
--dependencies=None
|
--dependencies=None
|
||||||
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
|
--docker-repos=gcr.io/overleaf-ops
|
||||||
--env-add=
|
--env-add=
|
||||||
--env-pass-through=
|
--env-pass-through=
|
||||||
--esmock-loader=False
|
--esmock-loader=False
|
||||||
--is-library=True
|
--is-library=True
|
||||||
--node-version=22.17.0
|
--node-version=22.15.1
|
||||||
--public-repo=False
|
--public-repo=False
|
||||||
--script-version=4.7.0
|
--script-version=4.7.0
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
22.17.0
|
22.15.1
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
fetch-utils
|
fetch-utils
|
||||||
--dependencies=None
|
--dependencies=None
|
||||||
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
|
--docker-repos=gcr.io/overleaf-ops
|
||||||
--env-add=
|
--env-add=
|
||||||
--env-pass-through=
|
--env-pass-through=
|
||||||
--esmock-loader=False
|
--esmock-loader=False
|
||||||
--is-library=True
|
--is-library=True
|
||||||
--node-version=22.17.0
|
--node-version=22.15.1
|
||||||
--public-repo=False
|
--public-repo=False
|
||||||
--script-version=4.7.0
|
--script-version=4.7.0
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
22.17.0
|
22.15.1
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
logger
|
logger
|
||||||
--dependencies=None
|
--dependencies=None
|
||||||
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
|
--docker-repos=gcr.io/overleaf-ops
|
||||||
--env-add=
|
--env-add=
|
||||||
--env-pass-through=
|
--env-pass-through=
|
||||||
--esmock-loader=False
|
--esmock-loader=False
|
||||||
--is-library=True
|
--is-library=True
|
||||||
--node-version=22.17.0
|
--node-version=22.15.1
|
||||||
--public-repo=False
|
--public-repo=False
|
||||||
--script-version=4.7.0
|
--script-version=4.7.0
|
||||||
|
|
|
@ -11,7 +11,7 @@ const LoggingManager = {
|
||||||
/**
|
/**
|
||||||
* @param {string} name - The name of the logger
|
* @param {string} name - The name of the logger
|
||||||
*/
|
*/
|
||||||
initialize(name, options = {}) {
|
initialize(name) {
|
||||||
this.isProduction =
|
this.isProduction =
|
||||||
(process.env.NODE_ENV || '').toLowerCase() === 'production'
|
(process.env.NODE_ENV || '').toLowerCase() === 'production'
|
||||||
const isTest = (process.env.NODE_ENV || '').toLowerCase() === 'test'
|
const isTest = (process.env.NODE_ENV || '').toLowerCase() === 'test'
|
||||||
|
@ -27,7 +27,7 @@ const LoggingManager = {
|
||||||
req: Serializers.req,
|
req: Serializers.req,
|
||||||
res: Serializers.res,
|
res: Serializers.res,
|
||||||
},
|
},
|
||||||
streams: options.streams ?? [this._getOutputStreamConfig()],
|
streams: [this._getOutputStreamConfig()],
|
||||||
})
|
})
|
||||||
this._setupRingBuffer()
|
this._setupRingBuffer()
|
||||||
this._setupLogLevelChecker()
|
this._setupLogLevelChecker()
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
22.17.0
|
22.15.1
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
metrics
|
metrics
|
||||||
--dependencies=None
|
--dependencies=None
|
||||||
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
|
--docker-repos=gcr.io/overleaf-ops
|
||||||
--env-add=
|
--env-add=
|
||||||
--env-pass-through=
|
--env-pass-through=
|
||||||
--esmock-loader=False
|
--esmock-loader=False
|
||||||
--is-library=True
|
--is-library=True
|
||||||
--node-version=22.17.0
|
--node-version=22.15.1
|
||||||
--public-repo=False
|
--public-repo=False
|
||||||
--script-version=4.7.0
|
--script-version=4.7.0
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
22.17.0
|
22.15.1
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
mongo-utils
|
mongo-utils
|
||||||
--dependencies=None
|
--dependencies=None
|
||||||
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
|
--docker-repos=gcr.io/overleaf-ops
|
||||||
--env-add=
|
--env-add=
|
||||||
--env-pass-through=
|
--env-pass-through=
|
||||||
--esmock-loader=False
|
--esmock-loader=False
|
||||||
--is-library=True
|
--is-library=True
|
||||||
--node-version=22.17.0
|
--node-version=22.15.1
|
||||||
--public-repo=False
|
--public-repo=False
|
||||||
--script-version=4.7.0
|
--script-version=4.7.0
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
22.17.0
|
22.15.1
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
o-error
|
o-error
|
||||||
--dependencies=None
|
--dependencies=None
|
||||||
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
|
--docker-repos=gcr.io/overleaf-ops
|
||||||
--env-add=
|
--env-add=
|
||||||
--env-pass-through=
|
--env-pass-through=
|
||||||
--esmock-loader=False
|
--esmock-loader=False
|
||||||
--is-library=True
|
--is-library=True
|
||||||
--node-version=22.17.0
|
--node-version=22.15.1
|
||||||
--public-repo=False
|
--public-repo=False
|
||||||
--script-version=4.7.0
|
--script-version=4.7.0
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
22.17.0
|
22.15.1
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
object-persistor
|
object-persistor
|
||||||
--dependencies=None
|
--dependencies=None
|
||||||
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
|
--docker-repos=gcr.io/overleaf-ops
|
||||||
--env-add=
|
--env-add=
|
||||||
--env-pass-through=
|
--env-pass-through=
|
||||||
--esmock-loader=False
|
--esmock-loader=False
|
||||||
--is-library=True
|
--is-library=True
|
||||||
--node-version=22.17.0
|
--node-version=22.15.1
|
||||||
--public-repo=False
|
--public-repo=False
|
||||||
--script-version=4.7.0
|
--script-version=4.7.0
|
||||||
|
|
|
@ -86,7 +86,7 @@ module.exports = class FSPersistor extends AbstractPersistor {
|
||||||
metric: 'fs.ingress', // ingress to us from disk
|
metric: 'fs.ingress', // ingress to us from disk
|
||||||
bucket: location,
|
bucket: location,
|
||||||
})
|
})
|
||||||
const fsPath = this._getFsPath(location, name, opts.useSubdirectories)
|
const fsPath = this._getFsPath(location, name)
|
||||||
|
|
||||||
try {
|
try {
|
||||||
opts.fd = await fsPromises.open(fsPath, 'r')
|
opts.fd = await fsPromises.open(fsPath, 'r')
|
||||||
|
@ -295,9 +295,9 @@ module.exports = class FSPersistor extends AbstractPersistor {
|
||||||
await fsPromises.rm(dirPath, { force: true, recursive: true })
|
await fsPromises.rm(dirPath, { force: true, recursive: true })
|
||||||
}
|
}
|
||||||
|
|
||||||
_getFsPath(location, key, useSubdirectories = false) {
|
_getFsPath(location, key) {
|
||||||
key = key.replace(/\/$/, '')
|
key = key.replace(/\/$/, '')
|
||||||
if (!this.useSubdirectories && !useSubdirectories) {
|
if (!this.useSubdirectories) {
|
||||||
key = key.replace(/\//g, '_')
|
key = key.replace(/\//g, '_')
|
||||||
}
|
}
|
||||||
return Path.join(location, key)
|
return Path.join(location, key)
|
||||||
|
|
|
@ -33,10 +33,6 @@ const AES256_KEY_LENGTH = 32
|
||||||
* @property {() => Promise<Array<RootKeyEncryptionKey>>} getRootKeyEncryptionKeys
|
* @property {() => Promise<Array<RootKeyEncryptionKey>>} getRootKeyEncryptionKeys
|
||||||
*/
|
*/
|
||||||
|
|
||||||
/**
|
|
||||||
* @typedef {import('./types').ListDirectoryResult} ListDirectoryResult
|
|
||||||
*/
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Helper function to make TS happy when accessing error properties
|
* Helper function to make TS happy when accessing error properties
|
||||||
* AWSError is not an actual class, so we cannot use instanceof.
|
* AWSError is not an actual class, so we cannot use instanceof.
|
||||||
|
@ -395,9 +391,9 @@ class PerProjectEncryptedS3Persistor extends S3Persistor {
|
||||||
* A general "cache" for project keys is another alternative. For now, use a helper class.
|
* A general "cache" for project keys is another alternative. For now, use a helper class.
|
||||||
*/
|
*/
|
||||||
class CachedPerProjectEncryptedS3Persistor {
|
class CachedPerProjectEncryptedS3Persistor {
|
||||||
/** @type SSECOptions */
|
/** @type SSECOptions */
|
||||||
#projectKeyOptions
|
#projectKeyOptions
|
||||||
/** @type PerProjectEncryptedS3Persistor */
|
/** @type PerProjectEncryptedS3Persistor */
|
||||||
#parent
|
#parent
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -428,16 +424,6 @@ class CachedPerProjectEncryptedS3Persistor {
|
||||||
return await this.#parent.getObjectSize(bucketName, path)
|
return await this.#parent.getObjectSize(bucketName, path)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
* @param {string} bucketName
|
|
||||||
* @param {string} path
|
|
||||||
* @return {Promise<ListDirectoryResult>}
|
|
||||||
*/
|
|
||||||
async listDirectory(bucketName, path) {
|
|
||||||
return await this.#parent.listDirectory(bucketName, path)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {string} bucketName
|
* @param {string} bucketName
|
||||||
* @param {string} path
|
* @param {string} path
|
||||||
|
|
|
@ -20,18 +20,6 @@ const { URL } = require('node:url')
|
||||||
const { WriteError, ReadError, NotFoundError } = require('./Errors')
|
const { WriteError, ReadError, NotFoundError } = require('./Errors')
|
||||||
const zlib = require('node:zlib')
|
const zlib = require('node:zlib')
|
||||||
|
|
||||||
/**
|
|
||||||
* @typedef {import('aws-sdk/clients/s3').ListObjectsV2Output} ListObjectsV2Output
|
|
||||||
*/
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @typedef {import('aws-sdk/clients/s3').Object} S3Object
|
|
||||||
*/
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @typedef {import('./types').ListDirectoryResult} ListDirectoryResult
|
|
||||||
*/
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Wrapper with private fields to avoid revealing them on console, JSON.stringify or similar.
|
* Wrapper with private fields to avoid revealing them on console, JSON.stringify or similar.
|
||||||
*/
|
*/
|
||||||
|
@ -278,12 +266,26 @@ class S3Persistor extends AbstractPersistor {
|
||||||
* @return {Promise<void>}
|
* @return {Promise<void>}
|
||||||
*/
|
*/
|
||||||
async deleteDirectory(bucketName, key, continuationToken) {
|
async deleteDirectory(bucketName, key, continuationToken) {
|
||||||
const { contents, response } = await this.listDirectory(
|
let response
|
||||||
bucketName,
|
const options = { Bucket: bucketName, Prefix: key }
|
||||||
key,
|
if (continuationToken) {
|
||||||
continuationToken
|
options.ContinuationToken = continuationToken
|
||||||
)
|
}
|
||||||
const objects = contents.map(item => ({ Key: item.Key || '' }))
|
|
||||||
|
try {
|
||||||
|
response = await this._getClientForBucket(bucketName)
|
||||||
|
.listObjectsV2(options)
|
||||||
|
.promise()
|
||||||
|
} catch (err) {
|
||||||
|
throw PersistorHelper.wrapError(
|
||||||
|
err,
|
||||||
|
'failed to list objects in S3',
|
||||||
|
{ bucketName, key },
|
||||||
|
ReadError
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const objects = response.Contents?.map(item => ({ Key: item.Key || '' }))
|
||||||
if (objects?.length) {
|
if (objects?.length) {
|
||||||
try {
|
try {
|
||||||
await this._getClientForBucket(bucketName)
|
await this._getClientForBucket(bucketName)
|
||||||
|
@ -314,36 +316,6 @@ class S3Persistor extends AbstractPersistor {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
* @param {string} bucketName
|
|
||||||
* @param {string} key
|
|
||||||
* @param {string} [continuationToken]
|
|
||||||
* @return {Promise<ListDirectoryResult>}
|
|
||||||
*/
|
|
||||||
async listDirectory(bucketName, key, continuationToken) {
|
|
||||||
let response
|
|
||||||
const options = { Bucket: bucketName, Prefix: key }
|
|
||||||
if (continuationToken) {
|
|
||||||
options.ContinuationToken = continuationToken
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
response = await this._getClientForBucket(bucketName)
|
|
||||||
.listObjectsV2(options)
|
|
||||||
.promise()
|
|
||||||
} catch (err) {
|
|
||||||
throw PersistorHelper.wrapError(
|
|
||||||
err,
|
|
||||||
'failed to list objects in S3',
|
|
||||||
{ bucketName, key },
|
|
||||||
ReadError
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
return { contents: response.Contents ?? [], response }
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {string} bucketName
|
* @param {string} bucketName
|
||||||
* @param {string} key
|
* @param {string} key
|
||||||
|
|
6
libraries/object-persistor/src/types.d.ts
vendored
6
libraries/object-persistor/src/types.d.ts
vendored
|
@ -1,6 +0,0 @@
|
||||||
import type { ListObjectsV2Output, Object } from 'aws-sdk/clients/s3'
|
|
||||||
|
|
||||||
export type ListDirectoryResult = {
|
|
||||||
contents: Array<Object>
|
|
||||||
response: ListObjectsV2Output
|
|
||||||
}
|
|
|
@ -1 +1 @@
|
||||||
22.17.0
|
22.15.1
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
overleaf-editor-core
|
overleaf-editor-core
|
||||||
--dependencies=None
|
--dependencies=None
|
||||||
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
|
--docker-repos=gcr.io/overleaf-ops
|
||||||
--env-add=
|
--env-add=
|
||||||
--env-pass-through=
|
--env-pass-through=
|
||||||
--esmock-loader=False
|
--esmock-loader=False
|
||||||
--is-library=True
|
--is-library=True
|
||||||
--node-version=22.17.0
|
--node-version=22.15.1
|
||||||
--public-repo=False
|
--public-repo=False
|
||||||
--script-version=4.7.0
|
--script-version=4.7.0
|
||||||
|
|
|
@ -13,7 +13,7 @@ const V2DocVersions = require('./v2_doc_versions')
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @import Author from "./author"
|
* @import Author from "./author"
|
||||||
* @import { BlobStore, RawChange, ReadonlyBlobStore } from "./types"
|
* @import { BlobStore, RawChange } from "./types"
|
||||||
*/
|
*/
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -219,7 +219,7 @@ class Change {
|
||||||
* If this Change contains any File objects, load them.
|
* If this Change contains any File objects, load them.
|
||||||
*
|
*
|
||||||
* @param {string} kind see {File#load}
|
* @param {string} kind see {File#load}
|
||||||
* @param {ReadonlyBlobStore} blobStore
|
* @param {BlobStore} blobStore
|
||||||
* @return {Promise<void>}
|
* @return {Promise<void>}
|
||||||
*/
|
*/
|
||||||
async loadFiles(kind, blobStore) {
|
async loadFiles(kind, blobStore) {
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
// @ts-check
|
// @ts-check
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @import { ClearTrackingPropsRawData, TrackingDirective } from '../types'
|
* @import { ClearTrackingPropsRawData } from '../types'
|
||||||
*/
|
*/
|
||||||
|
|
||||||
class ClearTrackingProps {
|
class ClearTrackingProps {
|
||||||
|
@ -11,27 +11,12 @@ class ClearTrackingProps {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {any} other
|
* @param {any} other
|
||||||
* @returns {other is ClearTrackingProps}
|
* @returns {boolean}
|
||||||
*/
|
*/
|
||||||
equals(other) {
|
equals(other) {
|
||||||
return other instanceof ClearTrackingProps
|
return other instanceof ClearTrackingProps
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {TrackingDirective} other
|
|
||||||
* @returns {other is ClearTrackingProps}
|
|
||||||
*/
|
|
||||||
canMergeWith(other) {
|
|
||||||
return other instanceof ClearTrackingProps
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {TrackingDirective} other
|
|
||||||
*/
|
|
||||||
mergeWith(other) {
|
|
||||||
return this
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @returns {ClearTrackingPropsRawData}
|
* @returns {ClearTrackingPropsRawData}
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -11,7 +11,7 @@ const EditOperation = require('../operation/edit_operation')
|
||||||
const EditOperationBuilder = require('../operation/edit_operation_builder')
|
const EditOperationBuilder = require('../operation/edit_operation_builder')
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @import { BlobStore, ReadonlyBlobStore, RangesBlob, RawHashFileData, RawLazyStringFileData } from '../types'
|
* @import { BlobStore, ReadonlyBlobStore, RangesBlob, RawFileData, RawLazyStringFileData } from '../types'
|
||||||
*/
|
*/
|
||||||
|
|
||||||
class LazyStringFileData extends FileData {
|
class LazyStringFileData extends FileData {
|
||||||
|
@ -159,11 +159,11 @@ class LazyStringFileData extends FileData {
|
||||||
|
|
||||||
/** @inheritdoc
|
/** @inheritdoc
|
||||||
* @param {BlobStore} blobStore
|
* @param {BlobStore} blobStore
|
||||||
* @return {Promise<RawHashFileData>}
|
* @return {Promise<RawFileData>}
|
||||||
*/
|
*/
|
||||||
async store(blobStore) {
|
async store(blobStore) {
|
||||||
if (this.operations.length === 0) {
|
if (this.operations.length === 0) {
|
||||||
/** @type RawHashFileData */
|
/** @type RawFileData */
|
||||||
const raw = { hash: this.hash }
|
const raw = { hash: this.hash }
|
||||||
if (this.rangesHash) {
|
if (this.rangesHash) {
|
||||||
raw.rangesHash = this.rangesHash
|
raw.rangesHash = this.rangesHash
|
||||||
|
@ -171,11 +171,9 @@ class LazyStringFileData extends FileData {
|
||||||
return raw
|
return raw
|
||||||
}
|
}
|
||||||
const eager = await this.toEager(blobStore)
|
const eager = await this.toEager(blobStore)
|
||||||
const raw = await eager.store(blobStore)
|
|
||||||
this.hash = raw.hash
|
|
||||||
this.rangesHash = raw.rangesHash
|
|
||||||
this.operations.length = 0
|
this.operations.length = 0
|
||||||
return raw
|
/** @type RawFileData */
|
||||||
|
return await eager.store(blobStore)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -8,7 +8,7 @@ const CommentList = require('./comment_list')
|
||||||
const TrackedChangeList = require('./tracked_change_list')
|
const TrackedChangeList = require('./tracked_change_list')
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @import { StringFileRawData, RawHashFileData, BlobStore, CommentRawData } from "../types"
|
* @import { StringFileRawData, RawFileData, BlobStore, CommentRawData } from "../types"
|
||||||
* @import { TrackedChangeRawData, RangesBlob } from "../types"
|
* @import { TrackedChangeRawData, RangesBlob } from "../types"
|
||||||
* @import EditOperation from "../operation/edit_operation"
|
* @import EditOperation from "../operation/edit_operation"
|
||||||
*/
|
*/
|
||||||
|
@ -139,7 +139,7 @@ class StringFileData extends FileData {
|
||||||
/**
|
/**
|
||||||
* @inheritdoc
|
* @inheritdoc
|
||||||
* @param {BlobStore} blobStore
|
* @param {BlobStore} blobStore
|
||||||
* @return {Promise<RawHashFileData>}
|
* @return {Promise<RawFileData>}
|
||||||
*/
|
*/
|
||||||
async store(blobStore) {
|
async store(blobStore) {
|
||||||
const blob = await blobStore.putString(this.content)
|
const blob = await blobStore.putString(this.content)
|
||||||
|
|
|
@ -84,21 +84,6 @@ class TrackedChange {
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Return an equivalent tracked change whose extent is limited to the given
|
|
||||||
* range
|
|
||||||
*
|
|
||||||
* @param {Range} range
|
|
||||||
* @returns {TrackedChange | null} - the result or null if the intersection is empty
|
|
||||||
*/
|
|
||||||
intersectRange(range) {
|
|
||||||
const intersection = this.range.intersect(range)
|
|
||||||
if (intersection == null) {
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
return new TrackedChange(intersection, this.tracking)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = TrackedChange
|
module.exports = TrackedChange
|
||||||
|
|
|
@ -2,11 +2,9 @@
|
||||||
const Range = require('../range')
|
const Range = require('../range')
|
||||||
const TrackedChange = require('./tracked_change')
|
const TrackedChange = require('./tracked_change')
|
||||||
const TrackingProps = require('../file_data/tracking_props')
|
const TrackingProps = require('../file_data/tracking_props')
|
||||||
const { InsertOp, RemoveOp, RetainOp } = require('../operation/scan_op')
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @import { TrackingDirective, TrackedChangeRawData } from "../types"
|
* @import { TrackingDirective, TrackedChangeRawData } from "../types"
|
||||||
* @import TextOperation from "../operation/text_operation"
|
|
||||||
*/
|
*/
|
||||||
|
|
||||||
class TrackedChangeList {
|
class TrackedChangeList {
|
||||||
|
@ -60,22 +58,6 @@ class TrackedChangeList {
|
||||||
return this._trackedChanges.filter(change => range.contains(change.range))
|
return this._trackedChanges.filter(change => range.contains(change.range))
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns tracked changes that overlap with the given range
|
|
||||||
* @param {Range} range
|
|
||||||
* @returns {TrackedChange[]}
|
|
||||||
*/
|
|
||||||
intersectRange(range) {
|
|
||||||
const changes = []
|
|
||||||
for (const change of this._trackedChanges) {
|
|
||||||
const intersection = change.intersectRange(range)
|
|
||||||
if (intersection != null) {
|
|
||||||
changes.push(intersection)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return changes
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the tracking props for a given range.
|
* Returns the tracking props for a given range.
|
||||||
* @param {Range} range
|
* @param {Range} range
|
||||||
|
@ -107,8 +89,6 @@ class TrackedChangeList {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Collapses consecutive (and compatible) ranges
|
* Collapses consecutive (and compatible) ranges
|
||||||
*
|
|
||||||
* @private
|
|
||||||
* @returns {void}
|
* @returns {void}
|
||||||
*/
|
*/
|
||||||
_mergeRanges() {
|
_mergeRanges() {
|
||||||
|
@ -137,28 +117,12 @@ class TrackedChangeList {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Apply an insert operation
|
|
||||||
*
|
*
|
||||||
* @param {number} cursor
|
* @param {number} cursor
|
||||||
* @param {string} insertedText
|
* @param {string} insertedText
|
||||||
* @param {{tracking?: TrackingProps}} opts
|
* @param {{tracking?: TrackingProps}} opts
|
||||||
*/
|
*/
|
||||||
applyInsert(cursor, insertedText, opts = {}) {
|
applyInsert(cursor, insertedText, opts = {}) {
|
||||||
this._applyInsert(cursor, insertedText, opts)
|
|
||||||
this._mergeRanges()
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Apply an insert operation
|
|
||||||
*
|
|
||||||
* This method will not merge ranges at the end
|
|
||||||
*
|
|
||||||
* @private
|
|
||||||
* @param {number} cursor
|
|
||||||
* @param {string} insertedText
|
|
||||||
* @param {{tracking?: TrackingProps}} [opts]
|
|
||||||
*/
|
|
||||||
_applyInsert(cursor, insertedText, opts = {}) {
|
|
||||||
const newTrackedChanges = []
|
const newTrackedChanges = []
|
||||||
for (const trackedChange of this._trackedChanges) {
|
for (const trackedChange of this._trackedChanges) {
|
||||||
if (
|
if (
|
||||||
|
@ -207,29 +171,15 @@ class TrackedChangeList {
|
||||||
newTrackedChanges.push(newTrackedChange)
|
newTrackedChanges.push(newTrackedChange)
|
||||||
}
|
}
|
||||||
this._trackedChanges = newTrackedChanges
|
this._trackedChanges = newTrackedChanges
|
||||||
|
this._mergeRanges()
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Apply a delete operation to the list of tracked changes
|
|
||||||
*
|
*
|
||||||
* @param {number} cursor
|
* @param {number} cursor
|
||||||
* @param {number} length
|
* @param {number} length
|
||||||
*/
|
*/
|
||||||
applyDelete(cursor, length) {
|
applyDelete(cursor, length) {
|
||||||
this._applyDelete(cursor, length)
|
|
||||||
this._mergeRanges()
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Apply a delete operation to the list of tracked changes
|
|
||||||
*
|
|
||||||
* This method will not merge ranges at the end
|
|
||||||
*
|
|
||||||
* @private
|
|
||||||
* @param {number} cursor
|
|
||||||
* @param {number} length
|
|
||||||
*/
|
|
||||||
_applyDelete(cursor, length) {
|
|
||||||
const newTrackedChanges = []
|
const newTrackedChanges = []
|
||||||
for (const trackedChange of this._trackedChanges) {
|
for (const trackedChange of this._trackedChanges) {
|
||||||
const deletedRange = new Range(cursor, length)
|
const deletedRange = new Range(cursor, length)
|
||||||
|
@ -255,31 +205,15 @@ class TrackedChangeList {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
this._trackedChanges = newTrackedChanges
|
this._trackedChanges = newTrackedChanges
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Apply a retain operation to the list of tracked changes
|
|
||||||
*
|
|
||||||
* @param {number} cursor
|
|
||||||
* @param {number} length
|
|
||||||
* @param {{tracking?: TrackingDirective}} [opts]
|
|
||||||
*/
|
|
||||||
applyRetain(cursor, length, opts = {}) {
|
|
||||||
this._applyRetain(cursor, length, opts)
|
|
||||||
this._mergeRanges()
|
this._mergeRanges()
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Apply a retain operation to the list of tracked changes
|
|
||||||
*
|
|
||||||
* This method will not merge ranges at the end
|
|
||||||
*
|
|
||||||
* @private
|
|
||||||
* @param {number} cursor
|
* @param {number} cursor
|
||||||
* @param {number} length
|
* @param {number} length
|
||||||
* @param {{tracking?: TrackingDirective}} opts
|
* @param {{tracking?: TrackingDirective}} opts
|
||||||
*/
|
*/
|
||||||
_applyRetain(cursor, length, opts = {}) {
|
applyRetain(cursor, length, opts = {}) {
|
||||||
// If there's no tracking info, leave everything as-is
|
// If there's no tracking info, leave everything as-is
|
||||||
if (!opts.tracking) {
|
if (!opts.tracking) {
|
||||||
return
|
return
|
||||||
|
@ -335,31 +269,6 @@ class TrackedChangeList {
|
||||||
newTrackedChanges.push(newTrackedChange)
|
newTrackedChanges.push(newTrackedChange)
|
||||||
}
|
}
|
||||||
this._trackedChanges = newTrackedChanges
|
this._trackedChanges = newTrackedChanges
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Apply a text operation to the list of tracked changes
|
|
||||||
*
|
|
||||||
* Ranges are merged only once at the end, for performance and to avoid
|
|
||||||
* problematic edge cases where intermediate ranges get incorrectly merged.
|
|
||||||
*
|
|
||||||
* @param {TextOperation} operation
|
|
||||||
*/
|
|
||||||
applyTextOperation(operation) {
|
|
||||||
// this cursor tracks the destination document that gets modified as
|
|
||||||
// operations are applied to it.
|
|
||||||
let cursor = 0
|
|
||||||
for (const op of operation.ops) {
|
|
||||||
if (op instanceof InsertOp) {
|
|
||||||
this._applyInsert(cursor, op.insertion, { tracking: op.tracking })
|
|
||||||
cursor += op.insertion.length
|
|
||||||
} else if (op instanceof RemoveOp) {
|
|
||||||
this._applyDelete(cursor, op.length)
|
|
||||||
} else if (op instanceof RetainOp) {
|
|
||||||
this._applyRetain(cursor, op.length, { tracking: op.tracking })
|
|
||||||
cursor += op.length
|
|
||||||
}
|
|
||||||
}
|
|
||||||
this._mergeRanges()
|
this._mergeRanges()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -62,35 +62,6 @@ class TrackingProps {
|
||||||
this.ts.getTime() === other.ts.getTime()
|
this.ts.getTime() === other.ts.getTime()
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Are these tracking props compatible with the other tracking props for merging
|
|
||||||
* ranges?
|
|
||||||
*
|
|
||||||
* @param {TrackingDirective} other
|
|
||||||
* @returns {other is TrackingProps}
|
|
||||||
*/
|
|
||||||
canMergeWith(other) {
|
|
||||||
if (!(other instanceof TrackingProps)) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
return this.type === other.type && this.userId === other.userId
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Merge two tracking props
|
|
||||||
*
|
|
||||||
* Assumes that `canMerge(other)` returns true
|
|
||||||
*
|
|
||||||
* @param {TrackingDirective} other
|
|
||||||
*/
|
|
||||||
mergeWith(other) {
|
|
||||||
if (!this.canMergeWith(other)) {
|
|
||||||
throw new Error('Cannot merge with incompatible tracking props')
|
|
||||||
}
|
|
||||||
const ts = this.ts <= other.ts ? this.ts : other.ts
|
|
||||||
return new TrackingProps(this.type, this.userId, ts)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = TrackingProps
|
module.exports = TrackingProps
|
||||||
|
|
|
@ -7,7 +7,7 @@ const Change = require('./change')
|
||||||
const Snapshot = require('./snapshot')
|
const Snapshot = require('./snapshot')
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @import { BlobStore, ReadonlyBlobStore } from "./types"
|
* @import { BlobStore } from "./types"
|
||||||
*/
|
*/
|
||||||
|
|
||||||
class History {
|
class History {
|
||||||
|
@ -85,7 +85,7 @@ class History {
|
||||||
* If this History contains any File objects, load them.
|
* If this History contains any File objects, load them.
|
||||||
*
|
*
|
||||||
* @param {string} kind see {File#load}
|
* @param {string} kind see {File#load}
|
||||||
* @param {ReadonlyBlobStore} blobStore
|
* @param {BlobStore} blobStore
|
||||||
* @return {Promise<void>}
|
* @return {Promise<void>}
|
||||||
*/
|
*/
|
||||||
async loadFiles(kind, blobStore) {
|
async loadFiles(kind, blobStore) {
|
||||||
|
|
|
@ -13,7 +13,7 @@ let EditFileOperation = null
|
||||||
let SetFileMetadataOperation = null
|
let SetFileMetadataOperation = null
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @import { ReadonlyBlobStore } from "../types"
|
* @import { BlobStore } from "../types"
|
||||||
* @import Snapshot from "../snapshot"
|
* @import Snapshot from "../snapshot"
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
@ -80,7 +80,7 @@ class Operation {
|
||||||
* If this operation references any files, load the files.
|
* If this operation references any files, load the files.
|
||||||
*
|
*
|
||||||
* @param {string} kind see {File#load}
|
* @param {string} kind see {File#load}
|
||||||
* @param {ReadOnlyBlobStore} blobStore
|
* @param {BlobStore} blobStore
|
||||||
* @return {Promise<void>}
|
* @return {Promise<void>}
|
||||||
*/
|
*/
|
||||||
async loadFiles(kind, blobStore) {}
|
async loadFiles(kind, blobStore) {}
|
||||||
|
|
|
@ -175,7 +175,7 @@ class InsertOp extends ScanOp {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
if (this.tracking) {
|
if (this.tracking) {
|
||||||
if (!other.tracking || !this.tracking.canMergeWith(other.tracking)) {
|
if (!this.tracking.equals(other.tracking)) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
} else if (other.tracking) {
|
} else if (other.tracking) {
|
||||||
|
@ -198,10 +198,7 @@ class InsertOp extends ScanOp {
|
||||||
throw new Error('Cannot merge with incompatible operation')
|
throw new Error('Cannot merge with incompatible operation')
|
||||||
}
|
}
|
||||||
this.insertion += other.insertion
|
this.insertion += other.insertion
|
||||||
if (this.tracking != null && other.tracking != null) {
|
// We already have the same tracking info and commentIds
|
||||||
this.tracking = this.tracking.mergeWith(other.tracking)
|
|
||||||
}
|
|
||||||
// We already have the same commentIds
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -309,13 +306,9 @@ class RetainOp extends ScanOp {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
if (this.tracking) {
|
if (this.tracking) {
|
||||||
if (!other.tracking || !this.tracking.canMergeWith(other.tracking)) {
|
return this.tracking.equals(other.tracking)
|
||||||
return false
|
|
||||||
}
|
|
||||||
} else if (other.tracking) {
|
|
||||||
return false
|
|
||||||
}
|
}
|
||||||
return true
|
return !other.tracking
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -326,9 +319,6 @@ class RetainOp extends ScanOp {
|
||||||
throw new Error('Cannot merge with incompatible operation')
|
throw new Error('Cannot merge with incompatible operation')
|
||||||
}
|
}
|
||||||
this.length += other.length
|
this.length += other.length
|
||||||
if (this.tracking != null && other.tracking != null) {
|
|
||||||
this.tracking = this.tracking.mergeWith(other.tracking)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -314,18 +314,25 @@ class TextOperation extends EditOperation {
|
||||||
str
|
str
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
file.trackedChanges.applyRetain(result.length, op.length, {
|
||||||
|
tracking: op.tracking,
|
||||||
|
})
|
||||||
result += str.slice(inputCursor, inputCursor + op.length)
|
result += str.slice(inputCursor, inputCursor + op.length)
|
||||||
inputCursor += op.length
|
inputCursor += op.length
|
||||||
} else if (op instanceof InsertOp) {
|
} else if (op instanceof InsertOp) {
|
||||||
if (containsNonBmpChars(op.insertion)) {
|
if (containsNonBmpChars(op.insertion)) {
|
||||||
throw new InvalidInsertionError(str, op.toJSON())
|
throw new InvalidInsertionError(str, op.toJSON())
|
||||||
}
|
}
|
||||||
|
file.trackedChanges.applyInsert(result.length, op.insertion, {
|
||||||
|
tracking: op.tracking,
|
||||||
|
})
|
||||||
file.comments.applyInsert(
|
file.comments.applyInsert(
|
||||||
new Range(result.length, op.insertion.length),
|
new Range(result.length, op.insertion.length),
|
||||||
{ commentIds: op.commentIds }
|
{ commentIds: op.commentIds }
|
||||||
)
|
)
|
||||||
result += op.insertion
|
result += op.insertion
|
||||||
} else if (op instanceof RemoveOp) {
|
} else if (op instanceof RemoveOp) {
|
||||||
|
file.trackedChanges.applyDelete(result.length, op.length)
|
||||||
file.comments.applyDelete(new Range(result.length, op.length))
|
file.comments.applyDelete(new Range(result.length, op.length))
|
||||||
inputCursor += op.length
|
inputCursor += op.length
|
||||||
} else {
|
} else {
|
||||||
|
@ -345,8 +352,6 @@ class TextOperation extends EditOperation {
|
||||||
throw new TextOperation.TooLongError(operation, result.length)
|
throw new TextOperation.TooLongError(operation, result.length)
|
||||||
}
|
}
|
||||||
|
|
||||||
file.trackedChanges.applyTextOperation(this)
|
|
||||||
|
|
||||||
file.content = result
|
file.content = result
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -395,36 +400,44 @@ class TextOperation extends EditOperation {
|
||||||
for (let i = 0, l = ops.length; i < l; i++) {
|
for (let i = 0, l = ops.length; i < l; i++) {
|
||||||
const op = ops[i]
|
const op = ops[i]
|
||||||
if (op instanceof RetainOp) {
|
if (op instanceof RetainOp) {
|
||||||
if (op.tracking) {
|
// Where we need to end up after the retains
|
||||||
// Where we need to end up after the retains
|
const target = strIndex + op.length
|
||||||
const target = strIndex + op.length
|
// A previous retain could have overriden some tracking info. Now we
|
||||||
// A previous retain could have overriden some tracking info. Now we
|
// need to restore it.
|
||||||
// need to restore it.
|
const previousRanges = previousState.trackedChanges.inRange(
|
||||||
const previousChanges = previousState.trackedChanges.intersectRange(
|
new Range(strIndex, op.length)
|
||||||
new Range(strIndex, op.length)
|
)
|
||||||
)
|
|
||||||
|
|
||||||
for (const change of previousChanges) {
|
let removeTrackingInfoIfNeeded
|
||||||
if (strIndex < change.range.start) {
|
if (op.tracking) {
|
||||||
inverse.retain(change.range.start - strIndex, {
|
removeTrackingInfoIfNeeded = new ClearTrackingProps()
|
||||||
tracking: new ClearTrackingProps(),
|
}
|
||||||
})
|
|
||||||
strIndex = change.range.start
|
for (const trackedChange of previousRanges) {
|
||||||
}
|
if (strIndex < trackedChange.range.start) {
|
||||||
inverse.retain(change.range.length, {
|
inverse.retain(trackedChange.range.start - strIndex, {
|
||||||
tracking: change.tracking,
|
tracking: removeTrackingInfoIfNeeded,
|
||||||
})
|
})
|
||||||
strIndex += change.range.length
|
strIndex = trackedChange.range.start
|
||||||
}
|
}
|
||||||
if (strIndex < target) {
|
if (trackedChange.range.end < strIndex + op.length) {
|
||||||
inverse.retain(target - strIndex, {
|
inverse.retain(trackedChange.range.length, {
|
||||||
tracking: new ClearTrackingProps(),
|
tracking: trackedChange.tracking,
|
||||||
})
|
})
|
||||||
strIndex = target
|
strIndex = trackedChange.range.end
|
||||||
}
|
}
|
||||||
} else {
|
if (trackedChange.range.end !== strIndex) {
|
||||||
inverse.retain(op.length)
|
// No need to split the range at the end
|
||||||
strIndex += op.length
|
const [left] = trackedChange.range.splitAt(strIndex)
|
||||||
|
inverse.retain(left.length, { tracking: trackedChange.tracking })
|
||||||
|
strIndex = left.end
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (strIndex < target) {
|
||||||
|
inverse.retain(target - strIndex, {
|
||||||
|
tracking: removeTrackingInfoIfNeeded,
|
||||||
|
})
|
||||||
|
strIndex = target
|
||||||
}
|
}
|
||||||
} else if (op instanceof InsertOp) {
|
} else if (op instanceof InsertOp) {
|
||||||
inverse.remove(op.insertion.length)
|
inverse.remove(op.insertion.length)
|
||||||
|
|
|
@ -86,32 +86,10 @@ class Range {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Does this range overlap another range?
|
* @param {Range} range
|
||||||
*
|
|
||||||
* Overlapping means that the two ranges have at least one character in common
|
|
||||||
*
|
|
||||||
* @param {Range} other - the other range
|
|
||||||
*/
|
*/
|
||||||
overlaps(other) {
|
overlaps(range) {
|
||||||
return this.start < other.end && this.end > other.start
|
return this.start < range.end && this.end > range.start
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Does this range overlap the start of another range?
|
|
||||||
*
|
|
||||||
* @param {Range} other - the other range
|
|
||||||
*/
|
|
||||||
overlapsStart(other) {
|
|
||||||
return this.start <= other.start && this.end > other.start
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Does this range overlap the end of another range?
|
|
||||||
*
|
|
||||||
* @param {Range} other - the other range
|
|
||||||
*/
|
|
||||||
overlapsEnd(other) {
|
|
||||||
return this.start < other.end && this.end >= other.end
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -249,26 +227,6 @@ class Range {
|
||||||
)
|
)
|
||||||
return [rangeUpToCursor, rangeAfterCursor]
|
return [rangeUpToCursor, rangeAfterCursor]
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns the intersection of this range with another range
|
|
||||||
*
|
|
||||||
* @param {Range} other - the other range
|
|
||||||
* @return {Range | null} the intersection or null if the intersection is empty
|
|
||||||
*/
|
|
||||||
intersect(other) {
|
|
||||||
if (this.contains(other)) {
|
|
||||||
return other
|
|
||||||
} else if (other.contains(this)) {
|
|
||||||
return this
|
|
||||||
} else if (other.overlapsStart(this)) {
|
|
||||||
return new Range(this.pos, other.end - this.start)
|
|
||||||
} else if (other.overlapsEnd(this)) {
|
|
||||||
return new Range(other.pos, this.end - other.start)
|
|
||||||
} else {
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = Range
|
module.exports = Range
|
||||||
|
|
|
@ -193,13 +193,4 @@ describe('LazyStringFileData', function () {
|
||||||
expect(fileData.getStringLength()).to.equal(longString.length)
|
expect(fileData.getStringLength()).to.equal(longString.length)
|
||||||
expect(fileData.getOperations()).to.have.length(1)
|
expect(fileData.getOperations()).to.have.length(1)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('truncates its operations after being stored', async function () {
|
|
||||||
const testHash = File.EMPTY_FILE_HASH
|
|
||||||
const fileData = new LazyStringFileData(testHash, undefined, 0)
|
|
||||||
fileData.edit(new TextOperation().insert('abc'))
|
|
||||||
const stored = await fileData.store(this.blobStore)
|
|
||||||
expect(fileData.hash).to.equal(stored.hash)
|
|
||||||
expect(fileData.operations).to.deep.equal([])
|
|
||||||
})
|
|
||||||
})
|
})
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
// @ts-check
|
||||||
'use strict'
|
'use strict'
|
||||||
|
|
||||||
const { expect } = require('chai')
|
const { expect } = require('chai')
|
||||||
|
@ -448,44 +449,4 @@ describe('Range', function () {
|
||||||
expect(() => range.insertAt(16, 3)).to.throw()
|
expect(() => range.insertAt(16, 3)).to.throw()
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('intersect', function () {
|
|
||||||
it('should handle partially overlapping ranges', function () {
|
|
||||||
const range1 = new Range(5, 10)
|
|
||||||
const range2 = new Range(3, 6)
|
|
||||||
const intersection1 = range1.intersect(range2)
|
|
||||||
expect(intersection1.pos).to.equal(5)
|
|
||||||
expect(intersection1.length).to.equal(4)
|
|
||||||
const intersection2 = range2.intersect(range1)
|
|
||||||
expect(intersection2.pos).to.equal(5)
|
|
||||||
expect(intersection2.length).to.equal(4)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should intersect with itself', function () {
|
|
||||||
const range = new Range(5, 10)
|
|
||||||
const intersection = range.intersect(range)
|
|
||||||
expect(intersection.pos).to.equal(5)
|
|
||||||
expect(intersection.length).to.equal(10)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should handle nested ranges', function () {
|
|
||||||
const range1 = new Range(5, 10)
|
|
||||||
const range2 = new Range(7, 2)
|
|
||||||
const intersection1 = range1.intersect(range2)
|
|
||||||
expect(intersection1.pos).to.equal(7)
|
|
||||||
expect(intersection1.length).to.equal(2)
|
|
||||||
const intersection2 = range2.intersect(range1)
|
|
||||||
expect(intersection2.pos).to.equal(7)
|
|
||||||
expect(intersection2.length).to.equal(2)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should handle disconnected ranges', function () {
|
|
||||||
const range1 = new Range(5, 10)
|
|
||||||
const range2 = new Range(20, 30)
|
|
||||||
const intersection1 = range1.intersect(range2)
|
|
||||||
expect(intersection1).to.be.null
|
|
||||||
const intersection2 = range2.intersect(range1)
|
|
||||||
expect(intersection2).to.be.null
|
|
||||||
})
|
|
||||||
})
|
|
||||||
})
|
})
|
||||||
|
|
|
@ -107,7 +107,7 @@ describe('RetainOp', function () {
|
||||||
expect(op1.equals(new RetainOp(3))).to.be.true
|
expect(op1.equals(new RetainOp(3))).to.be.true
|
||||||
})
|
})
|
||||||
|
|
||||||
it('cannot merge with another RetainOp if the tracking user is different', function () {
|
it('cannot merge with another RetainOp if tracking info is different', function () {
|
||||||
const op1 = new RetainOp(
|
const op1 = new RetainOp(
|
||||||
4,
|
4,
|
||||||
new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z'))
|
new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z'))
|
||||||
|
@ -120,14 +120,14 @@ describe('RetainOp', function () {
|
||||||
expect(() => op1.mergeWith(op2)).to.throw(Error)
|
expect(() => op1.mergeWith(op2)).to.throw(Error)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('can merge with another RetainOp if the tracking user is the same', function () {
|
it('can merge with another RetainOp if tracking info is the same', function () {
|
||||||
const op1 = new RetainOp(
|
const op1 = new RetainOp(
|
||||||
4,
|
4,
|
||||||
new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z'))
|
new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z'))
|
||||||
)
|
)
|
||||||
const op2 = new RetainOp(
|
const op2 = new RetainOp(
|
||||||
4,
|
4,
|
||||||
new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:01.000Z'))
|
new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z'))
|
||||||
)
|
)
|
||||||
op1.mergeWith(op2)
|
op1.mergeWith(op2)
|
||||||
expect(
|
expect(
|
||||||
|
@ -310,7 +310,7 @@ describe('InsertOp', function () {
|
||||||
expect(() => op1.mergeWith(op2)).to.throw(Error)
|
expect(() => op1.mergeWith(op2)).to.throw(Error)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('cannot merge with another InsertOp if tracking user is different', function () {
|
it('cannot merge with another InsertOp if tracking info is different', function () {
|
||||||
const op1 = new InsertOp(
|
const op1 = new InsertOp(
|
||||||
'a',
|
'a',
|
||||||
new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z'))
|
new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z'))
|
||||||
|
@ -323,7 +323,7 @@ describe('InsertOp', function () {
|
||||||
expect(() => op1.mergeWith(op2)).to.throw(Error)
|
expect(() => op1.mergeWith(op2)).to.throw(Error)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('can merge with another InsertOp if tracking user and comment info is the same', function () {
|
it('can merge with another InsertOp if tracking and comment info is the same', function () {
|
||||||
const op1 = new InsertOp(
|
const op1 = new InsertOp(
|
||||||
'a',
|
'a',
|
||||||
new TrackingProps(
|
new TrackingProps(
|
||||||
|
@ -338,7 +338,7 @@ describe('InsertOp', function () {
|
||||||
new TrackingProps(
|
new TrackingProps(
|
||||||
'insert',
|
'insert',
|
||||||
'user1',
|
'user1',
|
||||||
new Date('2024-01-01T00:00:01.000Z')
|
new Date('2024-01-01T00:00:00.000Z')
|
||||||
),
|
),
|
||||||
['1', '2']
|
['1', '2']
|
||||||
)
|
)
|
||||||
|
|
|
@ -322,47 +322,6 @@ describe('TextOperation', function () {
|
||||||
new TextOperation().retain(4).remove(4).retain(3)
|
new TextOperation().retain(4).remove(4).retain(3)
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('undoing a tracked delete restores the tracked changes', function () {
|
|
||||||
expectInverseToLeadToInitialState(
|
|
||||||
new StringFileData(
|
|
||||||
'the quick brown fox jumps over the lazy dog',
|
|
||||||
undefined,
|
|
||||||
[
|
|
||||||
{
|
|
||||||
range: { pos: 5, length: 5 },
|
|
||||||
tracking: {
|
|
||||||
ts: '2023-01-01T00:00:00.000Z',
|
|
||||||
type: 'insert',
|
|
||||||
userId: 'user1',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
range: { pos: 12, length: 3 },
|
|
||||||
tracking: {
|
|
||||||
ts: '2023-01-01T00:00:00.000Z',
|
|
||||||
type: 'delete',
|
|
||||||
userId: 'user1',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
range: { pos: 18, length: 5 },
|
|
||||||
tracking: {
|
|
||||||
ts: '2023-01-01T00:00:00.000Z',
|
|
||||||
type: 'insert',
|
|
||||||
userId: 'user1',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
]
|
|
||||||
),
|
|
||||||
new TextOperation()
|
|
||||||
.retain(7)
|
|
||||||
.retain(13, {
|
|
||||||
tracking: new TrackingProps('delete', 'user1', new Date()),
|
|
||||||
})
|
|
||||||
.retain(23)
|
|
||||||
)
|
|
||||||
})
|
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('compose', function () {
|
describe('compose', function () {
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
22.17.0
|
22.15.1
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
promise-utils
|
promise-utils
|
||||||
--dependencies=None
|
--dependencies=None
|
||||||
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
|
--docker-repos=gcr.io/overleaf-ops
|
||||||
--env-add=
|
--env-add=
|
||||||
--env-pass-through=
|
--env-pass-through=
|
||||||
--esmock-loader=False
|
--esmock-loader=False
|
||||||
--is-library=True
|
--is-library=True
|
||||||
--node-version=22.17.0
|
--node-version=22.15.1
|
||||||
--public-repo=False
|
--public-repo=False
|
||||||
--script-version=4.7.0
|
--script-version=4.7.0
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
22.17.0
|
22.15.1
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
ranges-tracker
|
ranges-tracker
|
||||||
--dependencies=None
|
--dependencies=None
|
||||||
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
|
--docker-repos=gcr.io/overleaf-ops
|
||||||
--env-add=
|
--env-add=
|
||||||
--env-pass-through=
|
--env-pass-through=
|
||||||
--esmock-loader=False
|
--esmock-loader=False
|
||||||
--is-library=True
|
--is-library=True
|
||||||
--node-version=22.17.0
|
--node-version=22.15.1
|
||||||
--public-repo=False
|
--public-repo=False
|
||||||
--script-version=4.7.0
|
--script-version=4.7.0
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
22.17.0
|
22.15.1
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
redis-wrapper
|
redis-wrapper
|
||||||
--dependencies=None
|
--dependencies=None
|
||||||
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
|
--docker-repos=gcr.io/overleaf-ops
|
||||||
--env-add=
|
--env-add=
|
||||||
--env-pass-through=
|
--env-pass-through=
|
||||||
--esmock-loader=False
|
--esmock-loader=False
|
||||||
--is-library=True
|
--is-library=True
|
||||||
--node-version=22.17.0
|
--node-version=22.15.1
|
||||||
--public-repo=False
|
--public-repo=False
|
||||||
--script-version=4.7.0
|
--script-version=4.7.0
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
22.17.0
|
22.15.1
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
settings
|
settings
|
||||||
--dependencies=None
|
--dependencies=None
|
||||||
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
|
--docker-repos=gcr.io/overleaf-ops
|
||||||
--env-add=
|
--env-add=
|
||||||
--env-pass-through=
|
--env-pass-through=
|
||||||
--esmock-loader=False
|
--esmock-loader=False
|
||||||
--is-library=True
|
--is-library=True
|
||||||
--node-version=22.17.0
|
--node-version=22.15.1
|
||||||
--public-repo=False
|
--public-repo=False
|
||||||
--script-version=4.7.0
|
--script-version=4.7.0
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
22.17.0
|
22.15.1
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
stream-utils
|
stream-utils
|
||||||
--dependencies=None
|
--dependencies=None
|
||||||
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
|
--docker-repos=gcr.io/overleaf-ops
|
||||||
--env-add=
|
--env-add=
|
||||||
--env-pass-through=
|
--env-pass-through=
|
||||||
--esmock-loader=False
|
--esmock-loader=False
|
||||||
--is-library=True
|
--is-library=True
|
||||||
--node-version=22.17.0
|
--node-version=22.15.1
|
||||||
--public-repo=False
|
--public-repo=False
|
||||||
--script-version=4.7.0
|
--script-version=4.7.0
|
||||||
|
|
|
@ -145,24 +145,6 @@ class LoggerStream extends Transform {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
class MeteredStream extends Transform {
|
|
||||||
#Metrics
|
|
||||||
#metric
|
|
||||||
#labels
|
|
||||||
|
|
||||||
constructor(Metrics, metric, labels) {
|
|
||||||
super()
|
|
||||||
this.#Metrics = Metrics
|
|
||||||
this.#metric = metric
|
|
||||||
this.#labels = labels
|
|
||||||
}
|
|
||||||
|
|
||||||
_transform(chunk, encoding, callback) {
|
|
||||||
this.#Metrics.count(this.#metric, chunk.byteLength, 1, this.#labels)
|
|
||||||
callback(null, chunk)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Export our classes
|
// Export our classes
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
|
@ -171,7 +153,6 @@ module.exports = {
|
||||||
LoggerStream,
|
LoggerStream,
|
||||||
LimitedStream,
|
LimitedStream,
|
||||||
TimeoutStream,
|
TimeoutStream,
|
||||||
MeteredStream,
|
|
||||||
SizeExceededError,
|
SizeExceededError,
|
||||||
AbortError,
|
AbortError,
|
||||||
}
|
}
|
||||||
|
|
5537
package-lock.json
generated
5537
package-lock.json
generated
File diff suppressed because it is too large
Load diff
45
package.json
45
package.json
|
@ -8,8 +8,8 @@
|
||||||
"@types/chai": "^4.3.0",
|
"@types/chai": "^4.3.0",
|
||||||
"@types/chai-as-promised": "^7.1.8",
|
"@types/chai-as-promised": "^7.1.8",
|
||||||
"@types/mocha": "^10.0.6",
|
"@types/mocha": "^10.0.6",
|
||||||
"@typescript-eslint/eslint-plugin": "^8.30.1",
|
"@typescript-eslint/eslint-plugin": "^8.0.0",
|
||||||
"@typescript-eslint/parser": "^8.30.1",
|
"@typescript-eslint/parser": "^8.0.0",
|
||||||
"eslint": "^8.15.0",
|
"eslint": "^8.15.0",
|
||||||
"eslint-config-prettier": "^8.5.0",
|
"eslint-config-prettier": "^8.5.0",
|
||||||
"eslint-config-standard": "^17.0.0",
|
"eslint-config-standard": "^17.0.0",
|
||||||
|
@ -18,37 +18,28 @@
|
||||||
"eslint-plugin-cypress": "^2.15.1",
|
"eslint-plugin-cypress": "^2.15.1",
|
||||||
"eslint-plugin-import": "^2.26.0",
|
"eslint-plugin-import": "^2.26.0",
|
||||||
"eslint-plugin-mocha": "^10.1.0",
|
"eslint-plugin-mocha": "^10.1.0",
|
||||||
"eslint-plugin-n": "^15.7.0",
|
"eslint-plugin-node": "^11.1.0",
|
||||||
"eslint-plugin-prettier": "^4.0.0",
|
"eslint-plugin-prettier": "^4.0.0",
|
||||||
"eslint-plugin-promise": "^6.0.0",
|
"eslint-plugin-promise": "^6.0.0",
|
||||||
"eslint-plugin-unicorn": "^56.0.0",
|
"eslint-plugin-unicorn": "^56.0.0",
|
||||||
"prettier": "3.6.2",
|
"prettier": "3.3.3",
|
||||||
"typescript": "^5.8.3"
|
"typescript": "^5.5.4"
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"npm": "11.4.2"
|
|
||||||
},
|
},
|
||||||
"overrides": {
|
"overrides": {
|
||||||
"swagger-tools@0.10.4": {
|
"cross-env": {
|
||||||
"path-to-regexp": "3.3.0",
|
"cross-spawn": "^7.0.6"
|
||||||
|
},
|
||||||
|
"fetch-mock": {
|
||||||
|
"path-to-regexp": "3.3.0"
|
||||||
|
},
|
||||||
|
"google-gax": {
|
||||||
|
"protobufjs": "^7.2.5"
|
||||||
|
},
|
||||||
|
"swagger-tools": {
|
||||||
"body-parser": "1.20.3",
|
"body-parser": "1.20.3",
|
||||||
"multer": "2.0.2"
|
"multer": "2.0.0",
|
||||||
},
|
"path-to-regexp": "3.3.0",
|
||||||
"request@2.88.2": {
|
"qs": "6.13.0"
|
||||||
"tough-cookie": "5.1.2",
|
|
||||||
"form-data": "2.5.5"
|
|
||||||
},
|
|
||||||
"superagent@7.1.6": {
|
|
||||||
"form-data": "4.0.4"
|
|
||||||
},
|
|
||||||
"superagent@3.8.3": {
|
|
||||||
"form-data": "2.5.5"
|
|
||||||
},
|
|
||||||
"retry-request@7.0.2": {
|
|
||||||
"form-data": "2.5.5"
|
|
||||||
},
|
|
||||||
"@types/request@2.48.12": {
|
|
||||||
"form-data": "2.5.5"
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
|
|
|
@ -1,13 +0,0 @@
|
||||||
diff --git a/node_modules/multer/lib/make-middleware.js b/node_modules/multer/lib/make-middleware.js
|
|
||||||
index 260dcb4..895b4b2 100644
|
|
||||||
--- a/node_modules/multer/lib/make-middleware.js
|
|
||||||
+++ b/node_modules/multer/lib/make-middleware.js
|
|
||||||
@@ -113,7 +113,7 @@ function makeMiddleware (setup) {
|
|
||||||
if (fieldname == null) return abortWithCode('MISSING_FIELD_NAME')
|
|
||||||
|
|
||||||
// don't attach to the files object, if there is no file
|
|
||||||
- if (!filename) return fileStream.resume()
|
|
||||||
+ if (!filename) filename = 'undefined'
|
|
||||||
|
|
||||||
// Work around bug in Busboy (https://github.com/mscdex/busboy/issues/6)
|
|
||||||
if (limits && Object.prototype.hasOwnProperty.call(limits, 'fieldNameSize')) {
|
|
|
@ -115,3 +115,9 @@ ENV LOG_LEVEL="info"
|
||||||
EXPOSE 80
|
EXPOSE 80
|
||||||
|
|
||||||
ENTRYPOINT ["/sbin/my_init"]
|
ENTRYPOINT ["/sbin/my_init"]
|
||||||
|
|
||||||
|
# Store the revision
|
||||||
|
# ------------------
|
||||||
|
# This should be the last step to optimize docker image caching.
|
||||||
|
ARG MONOREPO_REVISION
|
||||||
|
RUN echo "monorepo-server-ce,$MONOREPO_REVISION" > /var/www/revisions.txt
|
||||||
|
|
|
@ -34,7 +34,7 @@ build-community:
|
||||||
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
||||||
--progress=plain \
|
--progress=plain \
|
||||||
--build-arg OVERLEAF_BASE_TAG \
|
--build-arg OVERLEAF_BASE_TAG \
|
||||||
--label "com.overleaf.ce.revision=$(MONOREPO_REVISION)" \
|
--build-arg MONOREPO_REVISION \
|
||||||
--cache-from $(OVERLEAF_LATEST) \
|
--cache-from $(OVERLEAF_LATEST) \
|
||||||
--cache-from $(OVERLEAF_BRANCH) \
|
--cache-from $(OVERLEAF_BRANCH) \
|
||||||
--file Dockerfile \
|
--file Dockerfile \
|
||||||
|
|
|
@ -5,4 +5,4 @@ set -euo pipefail
|
||||||
source /etc/container_environment.sh
|
source /etc/container_environment.sh
|
||||||
source /etc/overleaf/env.sh
|
source /etc/overleaf/env.sh
|
||||||
cd /overleaf/services/project-history
|
cd /overleaf/services/project-history
|
||||||
exec /sbin/setuser www-data node scripts/flush_all.js 100000
|
node scripts/flush_all.js 100000
|
||||||
|
|
|
@ -5,4 +5,4 @@ set -euo pipefail
|
||||||
source /etc/container_environment.sh
|
source /etc/container_environment.sh
|
||||||
source /etc/overleaf/env.sh
|
source /etc/overleaf/env.sh
|
||||||
cd /overleaf/services/project-history
|
cd /overleaf/services/project-history
|
||||||
exec /sbin/setuser www-data node scripts/force_resync.js 1000 force
|
node scripts/force_resync.js 1000 force
|
||||||
|
|
|
@ -11,22 +11,22 @@ cd /overleaf/services/web
|
||||||
case "$TASK" in
|
case "$TASK" in
|
||||||
user:create-admin)
|
user:create-admin)
|
||||||
echo "The grunt command is deprecated, run the create-user script using node instead"
|
echo "The grunt command is deprecated, run the create-user script using node instead"
|
||||||
exec /sbin/setuser www-data node modules/server-ce-scripts/scripts/create-user.mjs --admin "$@"
|
node modules/server-ce-scripts/scripts/create-user.mjs --admin "$@"
|
||||||
;;
|
;;
|
||||||
|
|
||||||
user:delete)
|
user:delete)
|
||||||
echo "The grunt command is deprecated, run the delete-user script using node instead"
|
echo "The grunt command is deprecated, run the delete-user script using node instead"
|
||||||
exec /sbin/setuser www-data node modules/server-ce-scripts/scripts/delete-user.mjs "$@"
|
node modules/server-ce-scripts/scripts/delete-user.mjs "$@"
|
||||||
;;
|
;;
|
||||||
|
|
||||||
check:mongo)
|
check:mongo)
|
||||||
echo "The grunt command is deprecated, run the check-mongodb script using node instead"
|
echo "The grunt command is deprecated, run the check-mongodb script using node instead"
|
||||||
exec /sbin/setuser www-data node modules/server-ce-scripts/scripts/check-mongodb.mjs
|
node modules/server-ce-scripts/scripts/check-mongodb.mjs
|
||||||
;;
|
;;
|
||||||
|
|
||||||
check:redis)
|
check:redis)
|
||||||
echo "The grunt command is deprecated, run the check-redis script using node instead"
|
echo "The grunt command is deprecated, run the check-redis script using node instead"
|
||||||
exec /sbin/setuser www-data node modules/server-ce-scripts/scripts/check-redis.mjs
|
node modules/server-ce-scripts/scripts/check-redis.mjs
|
||||||
;;
|
;;
|
||||||
|
|
||||||
*)
|
*)
|
||||||
|
|
|
@ -1,7 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
for PR in "$@"; do
|
|
||||||
gh pr diff "$PR" --patch \
|
|
||||||
| node -e 'const blob = require("fs").readFileSync("/dev/stdin", "utf-8"); console.log(blob.replace(/From [\s\S]+?\d+ files? changed,.+/g, ""))' \
|
|
||||||
> "pr_$PR.patch"
|
|
||||||
done
|
|
|
@ -50,14 +50,12 @@
|
||||||
"history": {
|
"history": {
|
||||||
"host": "OVERLEAF_REDIS_HOST",
|
"host": "OVERLEAF_REDIS_HOST",
|
||||||
"password": "OVERLEAF_REDIS_PASS",
|
"password": "OVERLEAF_REDIS_PASS",
|
||||||
"port": "OVERLEAF_REDIS_PORT",
|
"port": "OVERLEAF_REDIS_PORT"
|
||||||
"tls": "OVERLEAF_REDIS_TLS"
|
|
||||||
},
|
},
|
||||||
"lock": {
|
"lock": {
|
||||||
"host": "OVERLEAF_REDIS_HOST",
|
"host": "OVERLEAF_REDIS_HOST",
|
||||||
"password": "OVERLEAF_REDIS_PASS",
|
"password": "OVERLEAF_REDIS_PASS",
|
||||||
"port": "OVERLEAF_REDIS_PORT",
|
"port": "OVERLEAF_REDIS_PORT"
|
||||||
"tls": "OVERLEAF_REDIS_TLS"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -184,10 +184,7 @@ const settings = {
|
||||||
siteUrl: (siteUrl = process.env.OVERLEAF_SITE_URL || 'http://localhost'),
|
siteUrl: (siteUrl = process.env.OVERLEAF_SITE_URL || 'http://localhost'),
|
||||||
|
|
||||||
// Status page URL as displayed on the maintenance/500 pages.
|
// Status page URL as displayed on the maintenance/500 pages.
|
||||||
statusPageUrl: process.env.OVERLEAF_STATUS_PAGE_URL ?
|
statusPageUrl: process.env.OVERLEAF_STATUS_PAGE_URL,
|
||||||
// Add https:// protocol prefix if not set (Allow plain-text http:// for Server Pro/CE).
|
|
||||||
(process.env.OVERLEAF_STATUS_PAGE_URL.startsWith('http://') || process.env.OVERLEAF_STATUS_PAGE_URL.startsWith('https://')) ? process.env.OVERLEAF_STATUS_PAGE_URL : `https://${process.env.OVERLEAF_STATUS_PAGE_URL}`
|
|
||||||
: undefined,
|
|
||||||
|
|
||||||
// The name this is used to describe your Overleaf Community Edition Installation
|
// The name this is used to describe your Overleaf Community Edition Installation
|
||||||
appName: process.env.OVERLEAF_APP_NAME || 'Overleaf Community Edition',
|
appName: process.env.OVERLEAF_APP_NAME || 'Overleaf Community Edition',
|
||||||
|
@ -441,8 +438,6 @@ switch (process.env.OVERLEAF_FILESTORE_BACKEND) {
|
||||||
user_files: process.env.OVERLEAF_FILESTORE_USER_FILES_BUCKET_NAME,
|
user_files: process.env.OVERLEAF_FILESTORE_USER_FILES_BUCKET_NAME,
|
||||||
template_files:
|
template_files:
|
||||||
process.env.OVERLEAF_FILESTORE_TEMPLATE_FILES_BUCKET_NAME,
|
process.env.OVERLEAF_FILESTORE_TEMPLATE_FILES_BUCKET_NAME,
|
||||||
project_blobs: process.env.OVERLEAF_HISTORY_PROJECT_BLOBS_BUCKET,
|
|
||||||
global_blobs: process.env.OVERLEAF_HISTORY_BLOBS_BUCKET,
|
|
||||||
},
|
},
|
||||||
s3: {
|
s3: {
|
||||||
key:
|
key:
|
||||||
|
@ -465,15 +460,6 @@ switch (process.env.OVERLEAF_FILESTORE_BACKEND) {
|
||||||
stores: {
|
stores: {
|
||||||
user_files: Path.join(DATA_DIR, 'user_files'),
|
user_files: Path.join(DATA_DIR, 'user_files'),
|
||||||
template_files: Path.join(DATA_DIR, 'template_files'),
|
template_files: Path.join(DATA_DIR, 'template_files'),
|
||||||
|
|
||||||
// NOTE: The below paths are hard-coded in server-ce/config/production.json, so hard code them here as well.
|
|
||||||
// We can use DATA_DIR after switching history-v1 from 'config' to '@overleaf/settings'.
|
|
||||||
project_blobs:
|
|
||||||
process.env.OVERLEAF_HISTORY_PROJECT_BLOBS_BUCKET ||
|
|
||||||
'/var/lib/overleaf/data/history/overleaf-project-blobs',
|
|
||||||
global_blobs:
|
|
||||||
process.env.OVERLEAF_HISTORY_BLOBS_BUCKET ||
|
|
||||||
'/var/lib/overleaf/data/history/overleaf-global-blobs',
|
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
set -eu
|
set -eux
|
||||||
|
|
||||||
echo "-------------------------"
|
echo "-------------------------"
|
||||||
echo "Deactivating old projects"
|
echo "Deactivating old projects"
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
set -eu
|
set -eux
|
||||||
|
|
||||||
echo "-------------------------"
|
echo "-------------------------"
|
||||||
echo "Expiring deleted projects"
|
echo "Expiring deleted projects"
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
set -eu
|
set -eux
|
||||||
|
|
||||||
echo "----------------------"
|
echo "----------------------"
|
||||||
echo "Expiring deleted users"
|
echo "Expiring deleted users"
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
set -eu
|
set -eux
|
||||||
|
|
||||||
echo "---------------------------------"
|
echo "---------------------------------"
|
||||||
echo "Flush all project-history changes"
|
echo "Flush all project-history changes"
|
||||||
|
@ -9,6 +9,6 @@ date
|
||||||
|
|
||||||
source /etc/container_environment.sh
|
source /etc/container_environment.sh
|
||||||
source /etc/overleaf/env.sh
|
source /etc/overleaf/env.sh
|
||||||
cd /overleaf/services/project-history && /sbin/setuser www-data node scripts/flush_all.js
|
cd /overleaf/services/project-history && node scripts/flush_all.js
|
||||||
|
|
||||||
echo "Done flushing all project-history changes"
|
echo "Done flushing all project-history changes"
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
set -eu
|
set -eux
|
||||||
|
|
||||||
echo "--------------------------"
|
echo "--------------------------"
|
||||||
echo "Flush project-history queue"
|
echo "Flush project-history queue"
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
set -eu
|
set -eux
|
||||||
|
|
||||||
echo "-----------------------------------"
|
echo "-----------------------------------"
|
||||||
echo "Retry project-history errors (hard)"
|
echo "Retry project-history errors (hard)"
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
set -eu
|
set -eux
|
||||||
|
|
||||||
echo "-----------------------------------"
|
echo "-----------------------------------"
|
||||||
echo "Retry project-history errors (soft)"
|
echo "Retry project-history errors (soft)"
|
||||||
|
|
|
@ -24,7 +24,7 @@ switch (process.argv.pop()) {
|
||||||
// run webpack
|
// run webpack
|
||||||
console.log('npm run webpack:production')
|
console.log('npm run webpack:production')
|
||||||
// uninstall webpack and frontend dependencies
|
// uninstall webpack and frontend dependencies
|
||||||
console.log('npm prune --omit=dev')
|
console.log('npm install --omit=dev')
|
||||||
|
|
||||||
// Wait for pug precompile to finish
|
// Wait for pug precompile to finish
|
||||||
console.log('wait "$pug_precompile"')
|
console.log('wait "$pug_precompile"')
|
||||||
|
|
|
@ -1,28 +0,0 @@
|
||||||
FROM sharelatex/sharelatex:5.5.0
|
|
||||||
|
|
||||||
|
|
||||||
# fix tls configuration in redis for history-v1
|
|
||||||
COPY pr_25168.patch .
|
|
||||||
RUN patch -p1 < pr_25168.patch && rm pr_25168.patch
|
|
||||||
|
|
||||||
# improve logging in history system
|
|
||||||
COPY pr_26086.patch .
|
|
||||||
RUN patch -p1 < pr_26086.patch && rm pr_26086.patch
|
|
||||||
|
|
||||||
# fix create-user.mjs script
|
|
||||||
COPY pr_26152.patch .
|
|
||||||
RUN patch -p1 < pr_26152.patch && rm pr_26152.patch
|
|
||||||
|
|
||||||
# check mongo featureCompatibilityVersion
|
|
||||||
COPY pr_26091.patch .
|
|
||||||
RUN patch -p1 < pr_26091.patch && rm pr_26091.patch
|
|
||||||
|
|
||||||
# update multer and tar-fs
|
|
||||||
RUN sed -i 's/"multer": "2.0.0"/"multer": "2.0.1"/g' package.json
|
|
||||||
RUN sed -i 's/"dockerode": "^4.0.5"/"dockerode": "^4.0.7"/g' services/clsi/package.json
|
|
||||||
RUN sed -i 's/"tar-fs": "^3.0.4"/"tar-fs": "^3.0.9"/g' services/clsi/package.json
|
|
||||||
RUN sed -i 's/199c5ff05bd375c508f4074498237baead7f5148/4dbceda355efc3fc8ac3cf5c66c3778c8a6fdb23/g' services/web/package.json
|
|
||||||
COPY package-lock.json.diff .
|
|
||||||
RUN patch package-lock.json < package-lock.json.diff
|
|
||||||
RUN npm install --omit=dev
|
|
||||||
RUN npm install @paralleldrive/cuid2@2.2.2 -w services/history-v1
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,19 +0,0 @@
|
||||||
--- a/services/history-v1/config/custom-environment-variables.json
|
|
||||||
+++ b/services/history-v1/config/custom-environment-variables.json
|
|
||||||
@@ -50,12 +50,14 @@
|
|
||||||
"history": {
|
|
||||||
"host": "OVERLEAF_REDIS_HOST",
|
|
||||||
"password": "OVERLEAF_REDIS_PASS",
|
|
||||||
- "port": "OVERLEAF_REDIS_PORT"
|
|
||||||
+ "port": "OVERLEAF_REDIS_PORT",
|
|
||||||
+ "tls": "OVERLEAF_REDIS_TLS"
|
|
||||||
},
|
|
||||||
"lock": {
|
|
||||||
"host": "OVERLEAF_REDIS_HOST",
|
|
||||||
"password": "OVERLEAF_REDIS_PASS",
|
|
||||||
- "port": "OVERLEAF_REDIS_PORT"
|
|
||||||
+ "port": "OVERLEAF_REDIS_PORT",
|
|
||||||
+ "tls": "OVERLEAF_REDIS_TLS"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,200 +0,0 @@
|
||||||
--- a/services/history-v1/api/controllers/project_import.js
|
|
||||||
+++ b/services/history-v1/api/controllers/project_import.js
|
|
||||||
@@ -35,6 +35,7 @@ async function importSnapshot(req, res) {
|
|
||||||
try {
|
|
||||||
snapshot = Snapshot.fromRaw(rawSnapshot)
|
|
||||||
} catch (err) {
|
|
||||||
+ logger.warn({ err, projectId }, 'failed to import snapshot')
|
|
||||||
return render.unprocessableEntity(res)
|
|
||||||
}
|
|
||||||
|
|
||||||
@@ -43,6 +44,7 @@ async function importSnapshot(req, res) {
|
|
||||||
historyId = await chunkStore.initializeProject(projectId, snapshot)
|
|
||||||
} catch (err) {
|
|
||||||
if (err instanceof chunkStore.AlreadyInitialized) {
|
|
||||||
+ logger.warn({ err, projectId }, 'already initialized')
|
|
||||||
return render.conflict(res)
|
|
||||||
} else {
|
|
||||||
throw err
|
|
||||||
--- a/services/history-v1/api/controllers/projects.js
|
|
||||||
+++ b/services/history-v1/api/controllers/projects.js
|
|
||||||
@@ -34,6 +34,7 @@ async function initializeProject(req, res, next) {
|
|
||||||
res.status(HTTPStatus.OK).json({ projectId })
|
|
||||||
} catch (err) {
|
|
||||||
if (err instanceof chunkStore.AlreadyInitialized) {
|
|
||||||
+ logger.warn({ err, projectId }, 'failed to initialize')
|
|
||||||
render.conflict(res)
|
|
||||||
} else {
|
|
||||||
throw err
|
|
||||||
@@ -242,11 +243,15 @@ async function createProjectBlob(req, res, next) {
|
|
||||||
const sizeLimit = new StreamSizeLimit(maxUploadSize)
|
|
||||||
await pipeline(req, sizeLimit, fs.createWriteStream(tmpPath))
|
|
||||||
if (sizeLimit.sizeLimitExceeded) {
|
|
||||||
+ logger.warn(
|
|
||||||
+ { projectId, expectedHash, maxUploadSize },
|
|
||||||
+ 'blob exceeds size threshold'
|
|
||||||
+ )
|
|
||||||
return render.requestEntityTooLarge(res)
|
|
||||||
}
|
|
||||||
const hash = await blobHash.fromFile(tmpPath)
|
|
||||||
if (hash !== expectedHash) {
|
|
||||||
- logger.debug({ hash, expectedHash }, 'Hash mismatch')
|
|
||||||
+ logger.warn({ projectId, hash, expectedHash }, 'Hash mismatch')
|
|
||||||
return render.conflict(res, 'File hash mismatch')
|
|
||||||
}
|
|
||||||
|
|
||||||
@@ -343,6 +348,10 @@ async function copyProjectBlob(req, res, next) {
|
|
||||||
targetBlobStore.getBlob(blobHash),
|
|
||||||
])
|
|
||||||
if (!sourceBlob) {
|
|
||||||
+ logger.warn(
|
|
||||||
+ { sourceProjectId, targetProjectId, blobHash },
|
|
||||||
+ 'missing source blob when copying across projects'
|
|
||||||
+ )
|
|
||||||
return render.notFound(res)
|
|
||||||
}
|
|
||||||
// Exit early if the blob exists in the target project.
|
|
||||||
--- a/services/history-v1/app.js
|
|
||||||
+++ b/services/history-v1/app.js
|
|
||||||
@@ -100,11 +100,13 @@ function setupErrorHandling() {
|
|
||||||
})
|
|
||||||
}
|
|
||||||
if (err.code === 'ENUM_MISMATCH') {
|
|
||||||
+ logger.warn({ err, projectId }, err.message)
|
|
||||||
return res.status(HTTPStatus.UNPROCESSABLE_ENTITY).json({
|
|
||||||
message: 'invalid enum value: ' + err.paramName,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
if (err.code === 'REQUIRED') {
|
|
||||||
+ logger.warn({ err, projectId }, err.message)
|
|
||||||
return res.status(HTTPStatus.UNPROCESSABLE_ENTITY).json({
|
|
||||||
message: err.message,
|
|
||||||
})
|
|
||||||
--- a/services/project-history/app/js/HistoryStoreManager.js
|
|
||||||
+++ b/services/project-history/app/js/HistoryStoreManager.js
|
|
||||||
@@ -35,7 +35,10 @@ class StringStream extends stream.Readable {
|
|
||||||
_mocks.getMostRecentChunk = (projectId, historyId, callback) => {
|
|
||||||
const path = `projects/${historyId}/latest/history`
|
|
||||||
logger.debug({ projectId, historyId }, 'getting chunk from history service')
|
|
||||||
- _requestChunk({ path, json: true }, callback)
|
|
||||||
+ _requestChunk({ path, json: true }, (err, chunk) => {
|
|
||||||
+ if (err) return callback(OError.tag(err))
|
|
||||||
+ callback(null, chunk)
|
|
||||||
+ })
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
@@ -54,7 +57,10 @@ export function getChunkAtVersion(projectId, historyId, version, callback) {
|
|
||||||
{ projectId, historyId, version },
|
|
||||||
'getting chunk from history service for version'
|
|
||||||
)
|
|
||||||
- _requestChunk({ path, json: true }, callback)
|
|
||||||
+ _requestChunk({ path, json: true }, (err, chunk) => {
|
|
||||||
+ if (err) return callback(OError.tag(err))
|
|
||||||
+ callback(null, chunk)
|
|
||||||
+ })
|
|
||||||
}
|
|
||||||
|
|
||||||
export function getMostRecentVersion(projectId, historyId, callback) {
|
|
||||||
@@ -68,8 +74,10 @@ export function getMostRecentVersion(projectId, historyId, callback) {
|
|
||||||
_.sortBy(chunk.chunk.history.changes || [], x => x.timestamp)
|
|
||||||
)
|
|
||||||
// find the latest project and doc versions in the chunk
|
|
||||||
- _getLatestProjectVersion(projectId, chunk, (err1, projectVersion) =>
|
|
||||||
+ _getLatestProjectVersion(projectId, chunk, (err1, projectVersion) => {
|
|
||||||
+ if (err1) err1 = OError.tag(err1)
|
|
||||||
_getLatestV2DocVersions(projectId, chunk, (err2, v2DocVersions) => {
|
|
||||||
+ if (err2) err2 = OError.tag(err2)
|
|
||||||
// return the project and doc versions
|
|
||||||
const projectStructureAndDocVersions = {
|
|
||||||
project: projectVersion,
|
|
||||||
@@ -83,7 +91,7 @@ export function getMostRecentVersion(projectId, historyId, callback) {
|
|
||||||
chunk
|
|
||||||
)
|
|
||||||
})
|
|
||||||
- )
|
|
||||||
+ })
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
@@ -211,7 +219,10 @@ export function getProjectBlob(historyId, blobHash, callback) {
|
|
||||||
logger.debug({ historyId, blobHash }, 'getting blob from history service')
|
|
||||||
_requestHistoryService(
|
|
||||||
{ path: `projects/${historyId}/blobs/${blobHash}` },
|
|
||||||
- callback
|
|
||||||
+ (err, blob) => {
|
|
||||||
+ if (err) return callback(OError.tag(err))
|
|
||||||
+ callback(null, blob)
|
|
||||||
+ }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
@@ -277,7 +288,10 @@ function createBlobFromString(historyId, data, fileId, callback) {
|
|
||||||
(fsPath, cb) => {
|
|
||||||
_createBlob(historyId, fsPath, cb)
|
|
||||||
},
|
|
||||||
- callback
|
|
||||||
+ (err, hash) => {
|
|
||||||
+ if (err) return callback(OError.tag(err))
|
|
||||||
+ callback(null, hash)
|
|
||||||
+ }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
@@ -330,7 +344,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) {
|
|
||||||
try {
|
|
||||||
ranges = HistoryBlobTranslator.createRangeBlobDataFromUpdate(update)
|
|
||||||
} catch (error) {
|
|
||||||
- return callback(error)
|
|
||||||
+ return callback(OError.tag(error))
|
|
||||||
}
|
|
||||||
createBlobFromString(
|
|
||||||
historyId,
|
|
||||||
@@ -338,7 +352,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) {
|
|
||||||
`project-${projectId}-doc-${update.doc}`,
|
|
||||||
(err, fileHash) => {
|
|
||||||
if (err) {
|
|
||||||
- return callback(err)
|
|
||||||
+ return callback(OError.tag(err))
|
|
||||||
}
|
|
||||||
if (ranges) {
|
|
||||||
createBlobFromString(
|
|
||||||
@@ -347,7 +361,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) {
|
|
||||||
`project-${projectId}-doc-${update.doc}-ranges`,
|
|
||||||
(err, rangesHash) => {
|
|
||||||
if (err) {
|
|
||||||
- return callback(err)
|
|
||||||
+ return callback(OError.tag(err))
|
|
||||||
}
|
|
||||||
logger.debug(
|
|
||||||
{ fileHash, rangesHash },
|
|
||||||
@@ -415,7 +429,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) {
|
|
||||||
},
|
|
||||||
(err, fileHash) => {
|
|
||||||
if (err) {
|
|
||||||
- return callback(err)
|
|
||||||
+ return callback(OError.tag(err))
|
|
||||||
}
|
|
||||||
if (update.hash && update.hash !== fileHash) {
|
|
||||||
logger.warn(
|
|
||||||
@@ -447,7 +461,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) {
|
|
||||||
},
|
|
||||||
(err, fileHash) => {
|
|
||||||
if (err) {
|
|
||||||
- return callback(err)
|
|
||||||
+ return callback(OError.tag(err))
|
|
||||||
}
|
|
||||||
logger.debug({ fileHash }, 'created empty blob for file')
|
|
||||||
callback(null, { file: fileHash })
|
|
||||||
@@ -520,7 +534,10 @@ export function initializeProject(historyId, callback) {
|
|
||||||
export function deleteProject(projectId, callback) {
|
|
||||||
_requestHistoryService(
|
|
||||||
{ method: 'DELETE', path: `projects/${projectId}` },
|
|
||||||
- callback
|
|
||||||
+ err => {
|
|
||||||
+ if (err) return callback(OError.tag(err))
|
|
||||||
+ callback(null)
|
|
||||||
+ }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,60 +0,0 @@
|
||||||
--- a/services/web/modules/server-ce-scripts/scripts/check-mongodb.mjs
|
|
||||||
+++ b/services/web/modules/server-ce-scripts/scripts/check-mongodb.mjs
|
|
||||||
@@ -7,6 +7,7 @@ import {
|
|
||||||
const { ObjectId } = mongodb
|
|
||||||
|
|
||||||
const MIN_MONGO_VERSION = [6, 0]
|
|
||||||
+const MIN_MONGO_FEATURE_COMPATIBILITY_VERSION = [6, 0]
|
|
||||||
|
|
||||||
async function main() {
|
|
||||||
let mongoClient
|
|
||||||
@@ -18,6 +19,7 @@ async function main() {
|
|
||||||
}
|
|
||||||
|
|
||||||
await checkMongoVersion(mongoClient)
|
|
||||||
+ await checkFeatureCompatibilityVersion(mongoClient)
|
|
||||||
|
|
||||||
try {
|
|
||||||
await testTransactions(mongoClient)
|
|
||||||
@@ -53,6 +55,41 @@ async function checkMongoVersion(mongoClient) {
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
+async function checkFeatureCompatibilityVersion(mongoClient) {
|
|
||||||
+ const {
|
|
||||||
+ featureCompatibilityVersion: { version },
|
|
||||||
+ } = await mongoClient
|
|
||||||
+ .db()
|
|
||||||
+ .admin()
|
|
||||||
+ .command({ getParameter: 1, featureCompatibilityVersion: 1 })
|
|
||||||
+ const [major, minor] = version.split('.').map(v => parseInt(v))
|
|
||||||
+ const [minMajor, minMinor] = MIN_MONGO_FEATURE_COMPATIBILITY_VERSION
|
|
||||||
+
|
|
||||||
+ if (major < minMajor || (major === minMajor && minor < minMinor)) {
|
|
||||||
+ const minVersion = MIN_MONGO_FEATURE_COMPATIBILITY_VERSION.join('.')
|
|
||||||
+ console.error(`
|
|
||||||
+The MongoDB server has featureCompatibilityVersion=${version}, but Overleaf requires at least version ${minVersion}.
|
|
||||||
+
|
|
||||||
+Open a mongo shell:
|
|
||||||
+- Overleaf Toolkit deployments: $ bin/mongo
|
|
||||||
+- Legacy docker-compose.yml deployments: $ docker exec -it mongo mongosh localhost/sharelatex
|
|
||||||
+
|
|
||||||
+In the mongo shell:
|
|
||||||
+> db.adminCommand( { setFeatureCompatibilityVersion: "${minMajor}.${minMinor}" } )
|
|
||||||
+
|
|
||||||
+Verify the new value:
|
|
||||||
+> db.adminCommand( { getParameter: 1, featureCompatibilityVersion: 1 } )
|
|
||||||
+ ...
|
|
||||||
+ {
|
|
||||||
+ featureCompatibilityVersion: { version: ${minMajor}.${minMinor}' },
|
|
||||||
+...
|
|
||||||
+
|
|
||||||
+Aborting.
|
|
||||||
+`)
|
|
||||||
+ process.exit(1)
|
|
||||||
+ }
|
|
||||||
+}
|
|
||||||
+
|
|
||||||
main()
|
|
||||||
.then(() => {
|
|
||||||
console.error('Mongodb is up.')
|
|
|
@ -1,16 +0,0 @@
|
||||||
--- a/services/web/modules/server-ce-scripts/scripts/create-user.mjs
|
|
||||||
+++ b/services/web/modules/server-ce-scripts/scripts/create-user.mjs
|
|
||||||
@@ -48,3 +48,13 @@ Please visit the following URL to set a password for ${email} and log in:
|
|
||||||
)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
+
|
|
||||||
+if (filename === process.argv[1]) {
|
|
||||||
+ try {
|
|
||||||
+ await main()
|
|
||||||
+ process.exit(0)
|
|
||||||
+ } catch (error) {
|
|
||||||
+ console.error({ error })
|
|
||||||
+ process.exit(1)
|
|
||||||
+ }
|
|
||||||
+}
|
|
|
@ -1,27 +0,0 @@
|
||||||
FROM sharelatex/sharelatex:5.5.1
|
|
||||||
|
|
||||||
# https://github.com/overleaf/internal/pull/25944
|
|
||||||
# Removed changes to services/web/frontend/js/features/ide-redesign/components/rail.tsx due to incompatibility with 5.5.1
|
|
||||||
COPY pr_25944.patch .
|
|
||||||
RUN patch -p1 < pr_25944.patch && rm pr_25944.patch
|
|
||||||
|
|
||||||
# https://github.com/overleaf/internal/pull/26637
|
|
||||||
# Removed changes to server-ce/test/create-and-compile-project.spec.ts and server-ce/test/helpers/compile.ts due to incompatibility with 5.5.1
|
|
||||||
COPY pr_26637.patch .
|
|
||||||
RUN patch -p1 < pr_26637.patch && rm pr_26637.patch
|
|
||||||
|
|
||||||
# https://github.com/overleaf/internal/pull/26783
|
|
||||||
COPY pr_26783.patch .
|
|
||||||
RUN patch -p1 < pr_26783.patch && rm pr_26783.patch
|
|
||||||
|
|
||||||
# https://github.com/overleaf/internal/pull/26697
|
|
||||||
COPY pr_26697.patch .
|
|
||||||
RUN patch -p1 < pr_26697.patch && rm pr_26697.patch
|
|
||||||
|
|
||||||
# Apply security updates to base image
|
|
||||||
RUN apt update && apt install -y linux-libc-dev \
|
|
||||||
&& unattended-upgrade --verbose --no-minimal-upgrade-steps \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
# Recompile frontend assets
|
|
||||||
RUN node genScript compile | bash
|
|
|
@ -1,219 +0,0 @@
|
||||||
diff --git a/services/web/frontend/js/features/review-panel-new/context/review-panel-providers.tsx b/services/web/frontend/js/features/review-panel-new/context/review-panel-providers.tsx
|
|
||||||
index 20e157dfee9..ad943772d0d 100644
|
|
||||||
--- a/services/web/frontend/js/features/review-panel-new/context/review-panel-providers.tsx
|
|
||||||
+++ b/services/web/frontend/js/features/review-panel-new/context/review-panel-providers.tsx
|
|
||||||
@@ -4,10 +4,16 @@ import { ChangesUsersProvider } from './changes-users-context'
|
|
||||||
import { TrackChangesStateProvider } from './track-changes-state-context'
|
|
||||||
import { ThreadsProvider } from './threads-context'
|
|
||||||
import { ReviewPanelViewProvider } from './review-panel-view-context'
|
|
||||||
+import { useProjectContext } from '@/shared/context/project-context'
|
|
||||||
|
|
||||||
export const ReviewPanelProviders: FC<React.PropsWithChildren> = ({
|
|
||||||
children,
|
|
||||||
}) => {
|
|
||||||
+ const { features } = useProjectContext()
|
|
||||||
+ if (!features.trackChangesVisible) {
|
|
||||||
+ return children
|
|
||||||
+ }
|
|
||||||
+
|
|
||||||
return (
|
|
||||||
<ReviewPanelViewProvider>
|
|
||||||
<ChangesUsersProvider>
|
|
||||||
diff --git a/services/web/frontend/js/features/share-project-modal/components/add-collaborators.tsx b/services/web/frontend/js/features/share-project-modal/components/add-collaborators.tsx
|
|
||||||
index 8606fb11fad..e80fb037116 100644
|
|
||||||
--- a/services/web/frontend/js/features/share-project-modal/components/add-collaborators.tsx
|
|
||||||
+++ b/services/web/frontend/js/features/share-project-modal/components/add-collaborators.tsx
|
|
||||||
@@ -176,24 +176,34 @@ export default function AddCollaborators({ readOnly }: { readOnly?: boolean }) {
|
|
||||||
])
|
|
||||||
|
|
||||||
const privilegeOptions = useMemo(() => {
|
|
||||||
- return [
|
|
||||||
+ const options: {
|
|
||||||
+ key: string
|
|
||||||
+ label: string
|
|
||||||
+ description?: string | null
|
|
||||||
+ }[] = [
|
|
||||||
{
|
|
||||||
key: 'readAndWrite',
|
|
||||||
label: t('editor'),
|
|
||||||
},
|
|
||||||
- {
|
|
||||||
+ ]
|
|
||||||
+
|
|
||||||
+ if (features.trackChangesVisible) {
|
|
||||||
+ options.push({
|
|
||||||
key: 'review',
|
|
||||||
label: t('reviewer'),
|
|
||||||
description: !features.trackChanges
|
|
||||||
? t('comment_only_upgrade_for_track_changes')
|
|
||||||
: null,
|
|
||||||
- },
|
|
||||||
- {
|
|
||||||
- key: 'readOnly',
|
|
||||||
- label: t('viewer'),
|
|
||||||
- },
|
|
||||||
- ]
|
|
||||||
- }, [features.trackChanges, t])
|
|
||||||
+ })
|
|
||||||
+ }
|
|
||||||
+
|
|
||||||
+ options.push({
|
|
||||||
+ key: 'readOnly',
|
|
||||||
+ label: t('viewer'),
|
|
||||||
+ })
|
|
||||||
+
|
|
||||||
+ return options
|
|
||||||
+ }, [features.trackChanges, features.trackChangesVisible, t])
|
|
||||||
|
|
||||||
return (
|
|
||||||
<OLForm className="add-collabs">
|
|
||||||
diff --git a/services/web/frontend/js/features/share-project-modal/components/edit-member.tsx b/services/web/frontend/js/features/share-project-modal/components/edit-member.tsx
|
|
||||||
index 6d806968b12..9f24cddc4ad 100644
|
|
||||||
--- a/services/web/frontend/js/features/share-project-modal/components/edit-member.tsx
|
|
||||||
+++ b/services/web/frontend/js/features/share-project-modal/components/edit-member.tsx
|
|
||||||
@@ -244,14 +244,22 @@ function SelectPrivilege({
|
|
||||||
const { features } = useProjectContext()
|
|
||||||
|
|
||||||
const privileges = useMemo(
|
|
||||||
- (): Privilege[] => [
|
|
||||||
- { key: 'owner', label: t('make_owner') },
|
|
||||||
- { key: 'readAndWrite', label: t('editor') },
|
|
||||||
- { key: 'review', label: t('reviewer') },
|
|
||||||
- { key: 'readOnly', label: t('viewer') },
|
|
||||||
- { key: 'removeAccess', label: t('remove_access') },
|
|
||||||
- ],
|
|
||||||
- [t]
|
|
||||||
+ (): Privilege[] =>
|
|
||||||
+ features.trackChangesVisible
|
|
||||||
+ ? [
|
|
||||||
+ { key: 'owner', label: t('make_owner') },
|
|
||||||
+ { key: 'readAndWrite', label: t('editor') },
|
|
||||||
+ { key: 'review', label: t('reviewer') },
|
|
||||||
+ { key: 'readOnly', label: t('viewer') },
|
|
||||||
+ { key: 'removeAccess', label: t('remove_access') },
|
|
||||||
+ ]
|
|
||||||
+ : [
|
|
||||||
+ { key: 'owner', label: t('make_owner') },
|
|
||||||
+ { key: 'readAndWrite', label: t('editor') },
|
|
||||||
+ { key: 'readOnly', label: t('viewer') },
|
|
||||||
+ { key: 'removeAccess', label: t('remove_access') },
|
|
||||||
+ ],
|
|
||||||
+ [features.trackChangesVisible, t]
|
|
||||||
)
|
|
||||||
|
|
||||||
const downgradedPseudoPrivilege: Privilege = {
|
|
||||||
diff --git a/services/web/frontend/js/features/source-editor/components/codemirror-editor.tsx b/services/web/frontend/js/features/source-editor/components/codemirror-editor.tsx
|
|
||||||
index c1808cbb301..4bdfe2682c8 100644
|
|
||||||
--- a/services/web/frontend/js/features/source-editor/components/codemirror-editor.tsx
|
|
||||||
+++ b/services/web/frontend/js/features/source-editor/components/codemirror-editor.tsx
|
|
||||||
@@ -18,6 +18,7 @@ import {
|
|
||||||
} from './codemirror-context'
|
|
||||||
import MathPreviewTooltip from './math-preview-tooltip'
|
|
||||||
import { useToolbarMenuBarEditorCommands } from '@/features/ide-redesign/hooks/use-toolbar-menu-editor-commands'
|
|
||||||
+import { useProjectContext } from '@/shared/context/project-context'
|
|
||||||
|
|
||||||
// TODO: remove this when definitely no longer used
|
|
||||||
export * from './codemirror-context'
|
|
||||||
@@ -67,6 +68,7 @@ function CodeMirrorEditor() {
|
|
||||||
|
|
||||||
function CodeMirrorEditorComponents() {
|
|
||||||
useToolbarMenuBarEditorCommands()
|
|
||||||
+ const { features } = useProjectContext()
|
|
||||||
|
|
||||||
return (
|
|
||||||
<ReviewPanelProviders>
|
|
||||||
@@ -83,8 +85,8 @@ function CodeMirrorEditorComponents() {
|
|
||||||
<CodeMirrorCommandTooltip />
|
|
||||||
|
|
||||||
<MathPreviewTooltip />
|
|
||||||
- <ReviewTooltipMenu />
|
|
||||||
- <ReviewPanelNew />
|
|
||||||
+ {features.trackChangesVisible && <ReviewTooltipMenu />}
|
|
||||||
+ {features.trackChangesVisible && <ReviewPanelNew />}
|
|
||||||
|
|
||||||
{sourceEditorComponents.map(
|
|
||||||
({ import: { default: Component }, path }) => (
|
|
||||||
diff --git a/services/web/frontend/js/features/source-editor/components/toolbar/toolbar-items.tsx b/services/web/frontend/js/features/source-editor/components/toolbar/toolbar-items.tsx
|
|
||||||
index e70663683fc..c5d9f3d3e47 100644
|
|
||||||
--- a/services/web/frontend/js/features/source-editor/components/toolbar/toolbar-items.tsx
|
|
||||||
+++ b/services/web/frontend/js/features/source-editor/components/toolbar/toolbar-items.tsx
|
|
||||||
@@ -14,6 +14,7 @@ import { LegacyTableDropdown } from './table-inserter-dropdown-legacy'
|
|
||||||
import { withinFormattingCommand } from '@/features/source-editor/utils/tree-operations/formatting'
|
|
||||||
import { isSplitTestEnabled } from '@/utils/splitTestUtils'
|
|
||||||
import { isMac } from '@/shared/utils/os'
|
|
||||||
+import { useProjectContext } from '@/shared/context/project-context'
|
|
||||||
|
|
||||||
export const ToolbarItems: FC<{
|
|
||||||
state: EditorState
|
|
||||||
@@ -31,6 +32,7 @@ export const ToolbarItems: FC<{
|
|
||||||
const { t } = useTranslation()
|
|
||||||
const { toggleSymbolPalette, showSymbolPalette, writefullInstance } =
|
|
||||||
useEditorContext()
|
|
||||||
+ const { features } = useProjectContext()
|
|
||||||
const isActive = withinFormattingCommand(state)
|
|
||||||
|
|
||||||
const symbolPaletteAvailable = getMeta('ol-symbolPaletteAvailable')
|
|
||||||
@@ -127,13 +129,15 @@ export const ToolbarItems: FC<{
|
|
||||||
command={commands.wrapInHref}
|
|
||||||
icon="add_link"
|
|
||||||
/>
|
|
||||||
- <ToolbarButton
|
|
||||||
- id="toolbar-add-comment"
|
|
||||||
- label={t('add_comment')}
|
|
||||||
- disabled={state.selection.main.empty}
|
|
||||||
- command={commands.addComment}
|
|
||||||
- icon="add_comment"
|
|
||||||
- />
|
|
||||||
+ {features.trackChangesVisible && (
|
|
||||||
+ <ToolbarButton
|
|
||||||
+ id="toolbar-add-comment"
|
|
||||||
+ label={t('add_comment')}
|
|
||||||
+ disabled={state.selection.main.empty}
|
|
||||||
+ command={commands.addComment}
|
|
||||||
+ icon="add_comment"
|
|
||||||
+ />
|
|
||||||
+ )}
|
|
||||||
<ToolbarButton
|
|
||||||
id="toolbar-ref"
|
|
||||||
label={t('toolbar_insert_cross_reference')}
|
|
||||||
diff --git a/services/web/test/frontend/features/review-panel/review-panel.spec.tsx b/services/web/test/frontend/features/review-panel/review-panel.spec.tsx
|
|
||||||
index d6677878108..58ac3e443da 100644
|
|
||||||
--- a/services/web/test/frontend/features/review-panel/review-panel.spec.tsx
|
|
||||||
+++ b/services/web/test/frontend/features/review-panel/review-panel.spec.tsx
|
|
||||||
@@ -181,6 +181,7 @@ describe('<ReviewPanel />', function () {
|
|
||||||
removeChangeIds,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
+ projectFeatures: { trackChangesVisible: true },
|
|
||||||
})
|
|
||||||
|
|
||||||
cy.wrap(scope).as('scope')
|
|
||||||
@@ -626,7 +627,7 @@ describe('<ReviewPanel /> for free users', function () {
|
|
||||||
function mountEditor(ownerId = USER_ID) {
|
|
||||||
const scope = mockScope(undefined, {
|
|
||||||
permissions: { write: true, trackedWrite: false, comment: true },
|
|
||||||
- projectFeatures: { trackChanges: false },
|
|
||||||
+ projectFeatures: { trackChanges: false, trackChangesVisible: true },
|
|
||||||
projectOwner: {
|
|
||||||
_id: ownerId,
|
|
||||||
},
|
|
||||||
diff --git a/services/web/test/frontend/features/share-project-modal/components/share-project-modal.test.jsx b/services/web/test/frontend/features/share-project-modal/components/share-project-modal.test.jsx
|
|
||||||
index b86207fb0f7..dfce8134d1c 100644
|
|
||||||
--- a/services/web/test/frontend/features/share-project-modal/components/share-project-modal.test.jsx
|
|
||||||
+++ b/services/web/test/frontend/features/share-project-modal/components/share-project-modal.test.jsx
|
|
||||||
@@ -694,6 +694,7 @@ describe('<ShareProjectModal/>', function () {
|
|
||||||
features: {
|
|
||||||
collaborators: 0,
|
|
||||||
compileGroup: 'standard',
|
|
||||||
+ trackChangesVisible: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
@@ -723,6 +724,7 @@ describe('<ShareProjectModal/>', function () {
|
|
||||||
...project,
|
|
||||||
features: {
|
|
||||||
collaborators: 1,
|
|
||||||
+ trackChangesVisible: true,
|
|
||||||
},
|
|
||||||
members: [
|
|
||||||
{
|
|
|
@ -1,86 +0,0 @@
|
||||||
diff --git a/services/clsi/app/js/LocalCommandRunner.js b/services/clsi/app/js/LocalCommandRunner.js
|
|
||||||
index ce274733585..aa62825443c 100644
|
|
||||||
--- a/services/clsi/app/js/LocalCommandRunner.js
|
|
||||||
+++ b/services/clsi/app/js/LocalCommandRunner.js
|
|
||||||
@@ -54,6 +54,7 @@ module.exports = CommandRunner = {
|
|
||||||
cwd: directory,
|
|
||||||
env,
|
|
||||||
stdio: ['pipe', 'pipe', 'ignore'],
|
|
||||||
+ detached: true,
|
|
||||||
})
|
|
||||||
|
|
||||||
let stdout = ''
|
|
||||||
diff --git a/services/clsi/test/acceptance/js/StopCompile.js b/services/clsi/test/acceptance/js/StopCompile.js
|
|
||||||
new file mode 100644
|
|
||||||
index 00000000000..103a70f37d7
|
|
||||||
--- /dev/null
|
|
||||||
+++ b/services/clsi/test/acceptance/js/StopCompile.js
|
|
||||||
@@ -0,0 +1,47 @@
|
|
||||||
+const Client = require('./helpers/Client')
|
|
||||||
+const ClsiApp = require('./helpers/ClsiApp')
|
|
||||||
+const { expect } = require('chai')
|
|
||||||
+
|
|
||||||
+describe('Stop compile', function () {
|
|
||||||
+ before(function (done) {
|
|
||||||
+ this.request = {
|
|
||||||
+ options: {
|
|
||||||
+ timeout: 100,
|
|
||||||
+ }, // seconds
|
|
||||||
+ resources: [
|
|
||||||
+ {
|
|
||||||
+ path: 'main.tex',
|
|
||||||
+ content: `\
|
|
||||||
+\\documentclass{article}
|
|
||||||
+\\begin{document}
|
|
||||||
+\\def\\x{Hello!\\par\\x}
|
|
||||||
+\\x
|
|
||||||
+\\end{document}\
|
|
||||||
+`,
|
|
||||||
+ },
|
|
||||||
+ ],
|
|
||||||
+ }
|
|
||||||
+ this.project_id = Client.randomId()
|
|
||||||
+ ClsiApp.ensureRunning(() => {
|
|
||||||
+ // start the compile in the background
|
|
||||||
+ Client.compile(this.project_id, this.request, (error, res, body) => {
|
|
||||||
+ this.compileResult = { error, res, body }
|
|
||||||
+ })
|
|
||||||
+ // wait for 1 second before stopping the compile
|
|
||||||
+ setTimeout(() => {
|
|
||||||
+ Client.stopCompile(this.project_id, (error, res, body) => {
|
|
||||||
+ this.stopResult = { error, res, body }
|
|
||||||
+ setTimeout(done, 1000) // allow time for the compile request to terminate
|
|
||||||
+ })
|
|
||||||
+ }, 1000)
|
|
||||||
+ })
|
|
||||||
+ })
|
|
||||||
+
|
|
||||||
+ it('should force a compile response with an error status', function () {
|
|
||||||
+ expect(this.stopResult.error).to.be.null
|
|
||||||
+ expect(this.stopResult.res.statusCode).to.equal(204)
|
|
||||||
+ expect(this.compileResult.res.statusCode).to.equal(200)
|
|
||||||
+ expect(this.compileResult.body.compile.status).to.equal('terminated')
|
|
||||||
+ expect(this.compileResult.body.compile.error).to.equal('terminated')
|
|
||||||
+ })
|
|
||||||
+})
|
|
||||||
diff --git a/services/clsi/test/acceptance/js/helpers/Client.js b/services/clsi/test/acceptance/js/helpers/Client.js
|
|
||||||
index a0bdce734f3..49bf7390c6f 100644
|
|
||||||
--- a/services/clsi/test/acceptance/js/helpers/Client.js
|
|
||||||
+++ b/services/clsi/test/acceptance/js/helpers/Client.js
|
|
||||||
@@ -42,6 +42,16 @@ module.exports = Client = {
|
|
||||||
)
|
|
||||||
},
|
|
||||||
|
|
||||||
+ stopCompile(projectId, callback) {
|
|
||||||
+ if (callback == null) {
|
|
||||||
+ callback = function () {}
|
|
||||||
+ }
|
|
||||||
+ return request.post(
|
|
||||||
+ { url: `${this.host}/project/${projectId}/compile/stop` },
|
|
||||||
+ callback
|
|
||||||
+ )
|
|
||||||
+ },
|
|
||||||
+
|
|
||||||
clearCache(projectId, callback) {
|
|
||||||
if (callback == null) {
|
|
||||||
callback = function () {}
|
|
|
@ -1,172 +0,0 @@
|
||||||
diff --git a/services/web/frontend/js/features/project-list/components/project-list-ds-nav.tsx b/services/web/frontend/js/features/project-list/components/project-list-ds-nav.tsx
|
|
||||||
index 8f3b3a8e5d0..f8c8014e1c0 100644
|
|
||||||
--- a/services/web/frontend/js/features/project-list/components/project-list-ds-nav.tsx
|
|
||||||
+++ b/services/web/frontend/js/features/project-list/components/project-list-ds-nav.tsx
|
|
||||||
@@ -55,7 +55,11 @@ export function ProjectListDsNav() {
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div className="project-ds-nav-page website-redesign">
|
|
||||||
- <DefaultNavbar {...navbarProps} customLogo={overleafLogo} showCloseIcon />
|
|
||||||
+ <DefaultNavbar
|
|
||||||
+ {...navbarProps}
|
|
||||||
+ overleafLogo={overleafLogo}
|
|
||||||
+ showCloseIcon
|
|
||||||
+ />
|
|
||||||
<main className="project-list-wrapper">
|
|
||||||
<SidebarDsNav />
|
|
||||||
<div className="project-ds-nav-content-and-messages">
|
|
||||||
diff --git a/services/web/frontend/js/features/ui/components/bootstrap-5/navbar/default-navbar.tsx b/services/web/frontend/js/features/ui/components/bootstrap-5/navbar/default-navbar.tsx
|
|
||||||
index 2480b7f061f..8e5429dbde6 100644
|
|
||||||
--- a/services/web/frontend/js/features/ui/components/bootstrap-5/navbar/default-navbar.tsx
|
|
||||||
+++ b/services/web/frontend/js/features/ui/components/bootstrap-5/navbar/default-navbar.tsx
|
|
||||||
@@ -1,4 +1,4 @@
|
|
||||||
-import { useState } from 'react'
|
|
||||||
+import React, { useState } from 'react'
|
|
||||||
import { sendMB } from '@/infrastructure/event-tracking'
|
|
||||||
import { useTranslation } from 'react-i18next'
|
|
||||||
import { Button, Container, Nav, Navbar } from 'react-bootstrap'
|
|
||||||
@@ -13,9 +13,15 @@ import MaterialIcon from '@/shared/components/material-icon'
|
|
||||||
import { useContactUsModal } from '@/shared/hooks/use-contact-us-modal'
|
|
||||||
import { UserProvider } from '@/shared/context/user-context'
|
|
||||||
import { X } from '@phosphor-icons/react'
|
|
||||||
+import overleafWhiteLogo from '@/shared/svgs/overleaf-white.svg'
|
|
||||||
+import overleafBlackLogo from '@/shared/svgs/overleaf-black.svg'
|
|
||||||
+import type { CSSPropertiesWithVariables } from '../../../../../../../types/css-properties-with-variables'
|
|
||||||
|
|
||||||
-function DefaultNavbar(props: DefaultNavbarMetadata) {
|
|
||||||
+function DefaultNavbar(
|
|
||||||
+ props: DefaultNavbarMetadata & { overleafLogo?: string }
|
|
||||||
+) {
|
|
||||||
const {
|
|
||||||
+ overleafLogo,
|
|
||||||
customLogo,
|
|
||||||
title,
|
|
||||||
canDisplayAdminMenu,
|
|
||||||
@@ -49,10 +55,20 @@ function DefaultNavbar(props: DefaultNavbarMetadata) {
|
|
||||||
className="navbar-default navbar-main"
|
|
||||||
expand="lg"
|
|
||||||
onToggle={expanded => setExpanded(expanded)}
|
|
||||||
+ style={
|
|
||||||
+ {
|
|
||||||
+ '--navbar-brand-image-default-url': `url("${overleafWhiteLogo}")`,
|
|
||||||
+ '--navbar-brand-image-redesign-url': `url("${overleafBlackLogo}")`,
|
|
||||||
+ } as CSSPropertiesWithVariables
|
|
||||||
+ }
|
|
||||||
>
|
|
||||||
<Container className="navbar-container" fluid>
|
|
||||||
<div className="navbar-header">
|
|
||||||
- <HeaderLogoOrTitle title={title} customLogo={customLogo} />
|
|
||||||
+ <HeaderLogoOrTitle
|
|
||||||
+ title={title}
|
|
||||||
+ overleafLogo={overleafLogo}
|
|
||||||
+ customLogo={customLogo}
|
|
||||||
+ />
|
|
||||||
{enableUpgradeButton ? (
|
|
||||||
<Button
|
|
||||||
as="a"
|
|
||||||
diff --git a/services/web/frontend/js/features/ui/components/bootstrap-5/navbar/header-logo-or-title.tsx b/services/web/frontend/js/features/ui/components/bootstrap-5/navbar/header-logo-or-title.tsx
|
|
||||||
index 44500f1b826..3eefc8e2d1c 100644
|
|
||||||
--- a/services/web/frontend/js/features/ui/components/bootstrap-5/navbar/header-logo-or-title.tsx
|
|
||||||
+++ b/services/web/frontend/js/features/ui/components/bootstrap-5/navbar/header-logo-or-title.tsx
|
|
||||||
@@ -2,11 +2,13 @@ import type { DefaultNavbarMetadata } from '@/features/ui/components/types/defau
|
|
||||||
import getMeta from '@/utils/meta'
|
|
||||||
|
|
||||||
export default function HeaderLogoOrTitle({
|
|
||||||
+ overleafLogo,
|
|
||||||
customLogo,
|
|
||||||
title,
|
|
||||||
-}: Pick<DefaultNavbarMetadata, 'customLogo' | 'title'>) {
|
|
||||||
+}: Pick<DefaultNavbarMetadata, 'customLogo' | 'title'> & {
|
|
||||||
+ overleafLogo?: string
|
|
||||||
+}) {
|
|
||||||
const { appName } = getMeta('ol-ExposedSettings')
|
|
||||||
-
|
|
||||||
if (customLogo) {
|
|
||||||
return (
|
|
||||||
// eslint-disable-next-line jsx-a11y/anchor-has-content
|
|
||||||
@@ -24,9 +26,16 @@ export default function HeaderLogoOrTitle({
|
|
||||||
</a>
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
+ const style = overleafLogo
|
|
||||||
+ ? {
|
|
||||||
+ style: {
|
|
||||||
+ backgroundImage: `url("${overleafLogo}")`,
|
|
||||||
+ },
|
|
||||||
+ }
|
|
||||||
+ : null
|
|
||||||
return (
|
|
||||||
// eslint-disable-next-line jsx-a11y/anchor-has-content
|
|
||||||
- <a href="/" aria-label={appName} className="navbar-brand" />
|
|
||||||
+ <a href="/" aria-label={appName} className="navbar-brand" {...style} />
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
diff --git a/services/web/frontend/js/shared/svgs/overleaf-black.svg b/services/web/frontend/js/shared/svgs/overleaf-black.svg
|
|
||||||
new file mode 100644
|
|
||||||
index 00000000000..ea0678438ba
|
|
||||||
--- /dev/null
|
|
||||||
+++ b/services/web/frontend/js/shared/svgs/overleaf-black.svg
|
|
||||||
@@ -0,0 +1,9 @@
|
|
||||||
+<svg width="129" height="38" viewBox="0 0 129 38" fill="none" xmlns="http://www.w3.org/2000/svg">
|
|
||||||
+<mask id="mask0_2579_355" style="mask-type:alpha" maskUnits="userSpaceOnUse" x="0" y="0" width="72" height="38">
|
|
||||||
+<path d="M71.7643 37.6327H0.0244141V0.0717773H71.7643V37.6327Z" fill="white"/>
|
|
||||||
+</mask>
|
|
||||||
+<g mask="url(#mask0_2579_355)">
|
|
||||||
+<path d="M47.2509 26.4555C47.3948 27.7507 47.8985 28.7821 48.81 29.5257C49.6974 30.2692 50.8487 30.653 52.2638 30.653C53.1993 30.653 54.0387 30.4611 54.7823 30.0773C55.5258 29.6696 56.1255 29.1419 56.5572 28.4223H61.0664C60.2989 30.3891 59.1716 31.9002 57.6365 33.0035C56.1255 34.0829 54.3506 34.6345 52.3598 34.6345C51.0166 34.6345 49.7934 34.3947 48.666 33.915C47.5387 33.4352 46.5314 32.7397 45.6199 31.8043C44.7804 30.9168 44.1089 29.9094 43.6531 28.7341C43.1974 27.5589 42.9576 26.3836 42.9576 25.1603C42.9576 23.9131 43.1734 22.7138 43.6052 21.6105C44.0369 20.5072 44.6605 19.4998 45.5 18.6124C46.4114 17.629 47.4668 16.8854 48.6181 16.3817C49.7694 15.8541 50.9686 15.5902 52.1919 15.5902C53.7509 15.5902 55.214 15.95 56.5572 16.6456C57.9004 17.3651 59.0517 18.3485 60.0111 19.6437C60.5867 20.4113 61.0185 21.2747 61.3063 22.2581C61.5941 23.2175 61.714 24.3209 61.714 25.5681C61.714 25.664 61.714 25.8079 61.69 26.0238C61.69 26.2397 61.6661 26.3836 61.6661 26.4795H47.2509V26.4555ZM57.2048 23.1216C56.845 21.9223 56.2454 21.0109 55.4059 20.3873C54.5664 19.7637 53.4871 19.4519 52.2159 19.4519C51.0886 19.4519 50.1052 19.7876 49.2177 20.4592C48.3303 21.1308 47.7306 22.0183 47.4188 23.1216H57.2048ZM71.7638 19.7637C70.1328 19.8836 69.0055 20.3153 68.3579 21.0349C67.7103 21.7544 67.3985 23.0496 67.3985 24.9205V34.1068H63.2011V16.1179H67.1347V18.2046C67.7583 17.3891 68.4539 16.8135 69.2214 16.4297C69.9649 16.0459 70.8284 15.8541 71.7638 15.8541V19.7637ZM32.428 1.24705C27.3432 -0.743722 8.9465 -1.46328 8.92251 9.52196C3.54982 12.9519 0 18.5404 0 24.5367C0 31.7803 5.87638 37.6567 13.1199 37.6567C20.3635 37.6567 26.2399 31.7803 26.2399 24.5367C26.2399 18.9482 22.738 14.1511 17.797 12.2803C16.8376 11.9205 14.7749 11.2729 13.1439 11.4168C10.7934 12.9039 7.91513 15.974 6.57196 19.0441C8.58672 16.6216 11.7288 15.5662 14.5351 16.022C18.6365 16.6936 21.7786 20.2434 21.7786 24.5607C21.7786 29.3338 17.917 33.1954 13.1439 33.1954C10.5055 33.1954 8.15498 32.0201 6.57196 30.1733C4.19742 27.415 3.59779 24.4408 4.07749 21.5386C5.73247 11.3688 17.797 5.58838 26.7675 3.35775C23.8413 4.9168 18.5646 7.45923 14.8708 10.2175C25.6402 14.391 27.3911 5.30056 32.428 1.24705ZM36.7934 34.1308H33.5074L26.6716 16.1179H31.1328L35.3303 28.0865L39.6476 16.1179H43.9889L36.7934 34.1308Z" fill="#1B222C"/>
|
|
||||||
+</g>
|
|
||||||
+<path d="M83.6127 26.4556C83.7567 27.7508 84.2843 28.7822 85.1718 29.5257C86.0592 30.2692 87.2105 30.653 88.6257 30.653C89.5611 30.653 90.4006 30.4611 91.1441 30.0774C91.8877 29.6696 92.4873 29.1419 92.919 28.4224H97.4282C96.6607 30.3892 95.5334 31.9002 93.9984 33.0036C92.4873 34.0829 90.7124 34.6346 88.7216 34.6346C87.3784 34.6346 86.1552 34.3947 85.0279 33.915C83.9006 33.4353 82.8932 32.7397 81.9817 31.8043C81.1423 30.9168 80.4707 29.9095 80.015 28.7342C79.5353 27.5829 79.3194 26.3836 79.3194 25.1604C79.3194 23.9131 79.5353 22.7139 79.967 21.6106C80.3987 20.5072 81.0223 19.4999 81.8618 18.6124C82.7733 17.629 83.8286 16.8855 84.9799 16.3818C86.1312 15.8541 87.3305 15.5903 88.5537 15.5903C90.1128 15.5903 91.5758 15.95 92.919 16.6456C94.2622 17.3652 95.4135 18.3486 96.3729 19.6438C96.9485 20.4113 97.3803 21.2748 97.6681 22.2582C97.9559 23.2176 98.0758 24.3209 98.0758 25.5681C98.0758 25.6641 98.0758 25.808 98.0519 26.0238C98.0519 26.2397 98.0279 26.3836 98.0279 26.4796H83.6127V26.4556ZM93.5426 23.1216C93.1829 21.9224 92.5832 21.0109 91.7437 20.3873C90.9043 19.7637 89.8249 19.4519 88.5537 19.4519C87.4264 19.4519 86.443 19.7877 85.5556 20.4593C84.6681 21.1309 84.0685 22.0183 83.7567 23.1216H93.5426ZM114.698 34.1309V31.9242C114.194 32.8117 113.498 33.4833 112.587 33.915C111.675 34.3467 110.5 34.5626 109.085 34.5626C106.423 34.5626 104.192 33.6512 102.417 31.8283C100.642 30.0054 99.7308 27.7508 99.7308 25.0644C99.7308 23.7932 99.9467 22.594 100.402 21.4667C100.858 20.3393 101.482 19.332 102.321 18.4685C103.209 17.5091 104.216 16.8135 105.295 16.3578C106.375 15.9021 107.622 15.6862 108.989 15.6862C110.308 15.6862 111.436 15.9021 112.371 16.3338C113.306 16.7655 114.074 17.4371 114.65 18.3246V16.1419H118.727V34.1548H114.698V34.1309ZM104.024 24.9685C104.024 26.4796 104.528 27.7508 105.535 28.7822C106.543 29.8135 107.766 30.3172 109.229 30.3172C110.548 30.3172 111.699 29.8135 112.707 28.7822C113.714 27.7508 114.218 26.5515 114.218 25.1844C114.218 23.7213 113.714 22.474 112.707 21.4187C111.699 20.3633 110.524 19.8357 109.157 19.8357C107.742 19.8357 106.543 20.3393 105.535 21.3227C104.528 22.3301 104.024 23.5294 104.024 24.9685ZM129.904 16.1179V19.8596H126.882V34.1309H122.829V19.8596H120.694V16.1179H122.709V15.6382C122.709 13.7434 123.236 12.3283 124.268 11.3929C125.323 10.4574 126.906 10.0017 129.041 10.0017C129.113 10.0017 129.257 10.0017 129.449 10.0257C129.64 10.0257 129.784 10.0497 129.904 10.0497V13.8154H129.616C128.657 13.8154 127.985 13.9833 127.578 14.2711C127.17 14.5829 126.954 15.0866 126.954 15.8301V16.1659H129.904V16.1179ZM73.5869 34.1309H77.6884V10.2895H73.5869V34.1309Z" fill="#1B222C"/>
|
|
||||||
+</svg>
|
|
||||||
diff --git a/services/web/frontend/js/shared/svgs/overleaf-white.svg b/services/web/frontend/js/shared/svgs/overleaf-white.svg
|
|
||||||
new file mode 100644
|
|
||||||
index 00000000000..2ced81aa46d
|
|
||||||
--- /dev/null
|
|
||||||
+++ b/services/web/frontend/js/shared/svgs/overleaf-white.svg
|
|
||||||
@@ -0,0 +1 @@
|
|
||||||
+<svg id="Layer_1" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 542 157" enable-background="new 0 0 542 157"><style>.st0{filter:url(#Adobe_OpacityMaskFilter);} .st1{fill:#FFFFFF;} .st2{mask:url(#mask-2);fill:#FFFFFF;}</style><g id="Page-1"><g id="Overleaf"><g id="Group-3"><defs><filter id="Adobe_OpacityMaskFilter" filterUnits="userSpaceOnUse" x="0" y=".3" width="299.2" height="156.7"><feColorMatrix values="1 0 0 0 0 0 1 0 0 0 0 0 1 0 0 0 0 0 1 0"/></filter></defs><mask maskUnits="userSpaceOnUse" x="0" y=".3" width="299.2" height="156.7" id="mask-2"><g class="st0"><path id="path-1" class="st1" d="M299.2 156.9H.1V.3h299.1z"/></g></mask><path id="Fill-1" class="st2" d="M197 110.3c.6 5.4 2.7 9.7 6.5 12.8 3.7 3.1 8.5 4.7 14.4 4.7 3.9 0 7.4-.8 10.5-2.4 3.1-1.7 5.6-3.9 7.4-6.9h18.8c-3.2 8.2-7.9 14.5-14.3 19.1-6.3 4.5-13.7 6.8-22 6.8-5.6 0-10.7-1-15.4-3-4.7-2-8.9-4.9-12.7-8.8-3.5-3.7-6.3-7.9-8.2-12.8s-2.9-9.8-2.9-14.9c0-5.2.9-10.2 2.7-14.8 1.8-4.6 4.4-8.8 7.9-12.5 3.8-4.1 8.2-7.2 13-9.3 4.8-2.2 9.8-3.3 14.9-3.3 6.5 0 12.6 1.5 18.2 4.4 5.6 3 10.4 7.1 14.4 12.5 2.4 3.2 4.2 6.8 5.4 10.9 1.2 4 1.7 8.6 1.7 13.8 0 .4 0 1-.1 1.9 0 .9-.1 1.5-.1 1.9H197v-.1zm41.5-13.9c-1.5-5-4-8.8-7.5-11.4-3.5-2.6-8-3.9-13.3-3.9-4.7 0-8.8 1.4-12.5 4.2-3.7 2.8-6.2 6.5-7.5 11.1h40.8zm60.7-14c-6.8.5-11.5 2.3-14.2 5.3-2.7 3-4 8.4-4 16.2v38.3h-17.5v-75h16.4v8.7c2.6-3.4 5.5-5.8 8.7-7.4 3.1-1.6 6.7-2.4 10.6-2.4v16.3zm-164-77.2C114-3.1 37.3-6.1 37.2 39.7 14.8 54 0 77.3 0 102.3 0 132.5 24.5 157 54.7 157c30.2 0 54.7-24.5 54.7-54.7 0-23.3-14.6-43.3-35.2-51.1-4-1.5-12.6-4.2-19.4-3.6-9.8 6.2-21.8 19-27.4 31.8 8.4-10.1 21.5-14.5 33.2-12.6 17.1 2.8 30.2 17.6 30.2 35.6 0 19.9-16.1 36-36 36-11 0-20.8-4.9-27.4-12.6-9.9-11.5-12.4-23.9-10.4-36 6.9-42.4 57.2-66.5 94.6-75.8C99.4 20.5 77.4 31.1 62 42.6c44.9 17.4 52.2-20.5 73.2-37.4zm18.2 137.1h-13.7l-28.5-75.1h18.6l17.5 49.9 18-49.9h18.1l-30 75.1z"/></g><path id="Fill-4" class="st1" d="M348.6 110.3c.6 5.4 2.8 9.7 6.5 12.8 3.7 3.1 8.5 4.7 14.4 4.7 3.9 0 7.4-.8 10.5-2.4 3.1-1.7 5.6-3.9 7.4-6.9h18.8c-3.2 8.2-7.9 14.5-14.3 19.1-6.3 4.5-13.7 6.8-22 6.8-5.6 0-10.7-1-15.4-3-4.7-2-8.9-4.9-12.7-8.8-3.5-3.7-6.3-7.9-8.2-12.8-2-4.8-2.9-9.8-2.9-14.9 0-5.2.9-10.2 2.7-14.8 1.8-4.6 4.4-8.8 7.9-12.5 3.8-4.1 8.2-7.2 13-9.3 4.8-2.2 9.8-3.3 14.9-3.3 6.5 0 12.6 1.5 18.2 4.4 5.6 3 10.4 7.1 14.4 12.5 2.4 3.2 4.2 6.8 5.4 10.9 1.2 4 1.7 8.6 1.7 13.8 0 .4 0 1-.1 1.9 0 .9-.1 1.5-.1 1.9h-60.1v-.1zM390 96.4c-1.5-5-4-8.8-7.5-11.4-3.5-2.6-8-3.9-13.3-3.9-4.7 0-8.8 1.4-12.5 4.2-3.7 2.8-6.2 6.5-7.5 11.1H390zm88.2 45.9v-9.2c-2.1 3.7-5 6.5-8.8 8.3-3.8 1.8-8.7 2.7-14.6 2.7-11.1 0-20.4-3.8-27.8-11.4-7.4-7.6-11.2-17-11.2-28.2 0-5.3.9-10.3 2.8-15 1.9-4.7 4.5-8.9 8-12.5 3.7-4 7.9-6.9 12.4-8.8s9.7-2.8 15.4-2.8c5.5 0 10.2.9 14.1 2.7 3.9 1.8 7.1 4.6 9.5 8.3v-9.1h17v75.1h-16.8v-.1zm-44.5-38.2c0 6.3 2.1 11.6 6.3 15.9 4.2 4.3 9.3 6.4 15.4 6.4 5.5 0 10.3-2.1 14.5-6.4 4.2-4.3 6.3-9.3 6.3-15 0-6.1-2.1-11.3-6.3-15.7-4.2-4.4-9.1-6.6-14.8-6.6-5.9 0-10.9 2.1-15.1 6.2-4.2 4.2-6.3 9.2-6.3 15.2zm107.9-36.9v15.6H529v59.5h-16.9V82.8h-8.9V67.2h8.4v-2c0-7.9 2.2-13.8 6.5-17.7 4.4-3.9 11-5.8 19.9-5.8.3 0 .9 0 1.7.1.8 0 1.4.1 1.9.1v15.7h-1.2c-4 0-6.8.7-8.5 1.9-1.7 1.3-2.6 3.4-2.6 6.5v1.4h12.3v-.2zm-234.8 75.1h17.1V42.9h-17.1v99.4z"/></g></g></svg>
|
|
||||||
\ No newline at end of file
|
|
||||||
diff --git a/services/web/frontend/stylesheets/bootstrap-5/components/nav.scss b/services/web/frontend/stylesheets/bootstrap-5/components/nav.scss
|
|
||||||
index 5d28341cf53..dd0600ed15d 100644
|
|
||||||
--- a/services/web/frontend/stylesheets/bootstrap-5/components/nav.scss
|
|
||||||
+++ b/services/web/frontend/stylesheets/bootstrap-5/components/nav.scss
|
|
||||||
@@ -8,7 +8,10 @@
|
|
||||||
--navbar-padding-h: var(--spacing-05);
|
|
||||||
--navbar-padding: 0 var(--navbar-padding-h);
|
|
||||||
--navbar-brand-width: 130px;
|
|
||||||
- --navbar-brand-image-url: url('../../../../public/img/ol-brand/overleaf-white.svg');
|
|
||||||
+ --navbar-brand-image-url: var(
|
|
||||||
+ --navbar-brand-image-default-url,
|
|
||||||
+ url('../../../../public/img/ol-brand/overleaf-white.svg')
|
|
||||||
+ );
|
|
||||||
|
|
||||||
// Title, when used instead of a logo
|
|
||||||
--navbar-title-font-size: var(--font-size-05);
|
|
||||||
diff --git a/services/web/frontend/stylesheets/bootstrap-5/components/navbar.scss b/services/web/frontend/stylesheets/bootstrap-5/components/navbar.scss
|
|
||||||
index 3b984bb6f36..a8855ea1ca3 100644
|
|
||||||
--- a/services/web/frontend/stylesheets/bootstrap-5/components/navbar.scss
|
|
||||||
+++ b/services/web/frontend/stylesheets/bootstrap-5/components/navbar.scss
|
|
||||||
@@ -216,7 +216,10 @@
|
|
||||||
.website-redesign .navbar-default {
|
|
||||||
--navbar-title-color: var(--content-primary);
|
|
||||||
--navbar-title-color-hover: var(--content-secondary);
|
|
||||||
- --navbar-brand-image-url: url('../../../../public/img/ol-brand/overleaf-black.svg');
|
|
||||||
+ --navbar-brand-image-url: var(
|
|
||||||
+ --navbar-brand-image-redesign-url,
|
|
||||||
+ url('../../../../public/img/ol-brand/overleaf-black.svg')
|
|
||||||
+ );
|
|
||||||
--navbar-subdued-color: var(--content-primary);
|
|
||||||
--navbar-subdued-hover-bg: var(--bg-dark-primary);
|
|
||||||
--navbar-subdued-hover-color: var(--content-primary-dark);
|
|
||||||
diff --git a/services/web/types/css-properties-with-variables.tsx b/services/web/types/css-properties-with-variables.tsx
|
|
||||||
new file mode 100644
|
|
||||||
index 00000000000..fe0e85902a6
|
|
||||||
--- /dev/null
|
|
||||||
+++ b/services/web/types/css-properties-with-variables.tsx
|
|
||||||
@@ -0,0 +1,4 @@
|
|
||||||
+import { CSSProperties } from 'react'
|
|
||||||
+
|
|
||||||
+export type CSSPropertiesWithVariables = CSSProperties &
|
|
||||||
+ Record<`--${string}`, number | string>
|
|
||||||
--
|
|
||||||
2.43.0
|
|
||||||
|
|
|
@ -1,58 +0,0 @@
|
||||||
diff --git a/services/web/modules/server-ce-scripts/scripts/check-mongodb.mjs b/services/web/modules/server-ce-scripts/scripts/check-mongodb.mjs
|
|
||||||
index 29f5e7ffd26..46be91a1d9c 100644
|
|
||||||
--- a/services/web/modules/server-ce-scripts/scripts/check-mongodb.mjs
|
|
||||||
+++ b/services/web/modules/server-ce-scripts/scripts/check-mongodb.mjs
|
|
||||||
@@ -9,6 +9,34 @@ const { ObjectId } = mongodb
|
|
||||||
const MIN_MONGO_VERSION = [6, 0]
|
|
||||||
const MIN_MONGO_FEATURE_COMPATIBILITY_VERSION = [6, 0]
|
|
||||||
|
|
||||||
+// Allow ignoring admin check failures via an environment variable
|
|
||||||
+const OVERRIDE_ENV_VAR_NAME = 'ALLOW_MONGO_ADMIN_CHECK_FAILURES'
|
|
||||||
+
|
|
||||||
+function shouldSkipAdminChecks() {
|
|
||||||
+ return process.env[OVERRIDE_ENV_VAR_NAME] === 'true'
|
|
||||||
+}
|
|
||||||
+
|
|
||||||
+function handleUnauthorizedError(err, feature) {
|
|
||||||
+ if (
|
|
||||||
+ err instanceof mongodb.MongoServerError &&
|
|
||||||
+ err.codeName === 'Unauthorized'
|
|
||||||
+ ) {
|
|
||||||
+ console.warn(`Warning: failed to check ${feature} (not authorised)`)
|
|
||||||
+ if (!shouldSkipAdminChecks()) {
|
|
||||||
+ console.error(
|
|
||||||
+ `Please ensure the MongoDB user has the required admin permissions, or\n` +
|
|
||||||
+ `set the environment variable ${OVERRIDE_ENV_VAR_NAME}=true to ignore this check.`
|
|
||||||
+ )
|
|
||||||
+ process.exit(1)
|
|
||||||
+ }
|
|
||||||
+ console.warn(
|
|
||||||
+ `Ignoring ${feature} check failure (${OVERRIDE_ENV_VAR_NAME}=${process.env[OVERRIDE_ENV_VAR_NAME]})`
|
|
||||||
+ )
|
|
||||||
+ } else {
|
|
||||||
+ throw err
|
|
||||||
+ }
|
|
||||||
+}
|
|
||||||
+
|
|
||||||
async function main() {
|
|
||||||
let mongoClient
|
|
||||||
try {
|
|
||||||
@@ -18,8 +46,16 @@ async function main() {
|
|
||||||
throw err
|
|
||||||
}
|
|
||||||
|
|
||||||
- await checkMongoVersion(mongoClient)
|
|
||||||
- await checkFeatureCompatibilityVersion(mongoClient)
|
|
||||||
+ try {
|
|
||||||
+ await checkMongoVersion(mongoClient)
|
|
||||||
+ } catch (err) {
|
|
||||||
+ handleUnauthorizedError(err, 'MongoDB version')
|
|
||||||
+ }
|
|
||||||
+ try {
|
|
||||||
+ await checkFeatureCompatibilityVersion(mongoClient)
|
|
||||||
+ } catch (err) {
|
|
||||||
+ handleUnauthorizedError(err, 'MongoDB feature compatibility version')
|
|
||||||
+ }
|
|
||||||
|
|
||||||
try {
|
|
||||||
await testTransactions(mongoClient)
|
|
|
@ -1,29 +0,0 @@
|
||||||
FROM sharelatex/sharelatex:5.5.2
|
|
||||||
|
|
||||||
# ../../bin/import_pr_patch.sh 27147 27173 27230 27240 27249 27257 27273 27397
|
|
||||||
# Remove CE tests
|
|
||||||
# Remove tests
|
|
||||||
# Remove cloudbuild changes
|
|
||||||
# Remove SaaS changes
|
|
||||||
# Fixup package.json and toolbar-items.tsx
|
|
||||||
# Fix cron paths
|
|
||||||
COPY *.patch .
|
|
||||||
RUN --mount=type=cache,target=/root/.cache \
|
|
||||||
--mount=type=cache,target=/root/.npm \
|
|
||||||
--mount=type=cache,target=/overleaf/services/web/node_modules/.cache,id=server-ce-webpack-cache \
|
|
||||||
--mount=type=tmpfs,target=/tmp true \
|
|
||||||
&& bash -ec 'for p in *.patch; do echo "=== Applying $p ==="; patch -p1 < "$p" && rm $p; done' \
|
|
||||||
&& npm audit --audit-level=high \
|
|
||||||
&& node genScript compile | bash \
|
|
||||||
&& npm prune --omit=dev \
|
|
||||||
&& apt remove -y linux-libc-dev
|
|
||||||
|
|
||||||
# ../../bin/import_pr_patch.sh 27476
|
|
||||||
# Remove tests
|
|
||||||
# Remove SaaS changes
|
|
||||||
COPY pr_27476.patch-stage-2 .
|
|
||||||
RUN patch -p1 < pr_27476.patch-stage-2 && rm pr_27476.patch-stage-2
|
|
||||||
|
|
||||||
# Extra tweaks to output
|
|
||||||
COPY pr_27397.patch-stage-2 .
|
|
||||||
RUN patch -p1 < pr_27397.patch-stage-2 && rm pr_27397.patch-stage-2
|
|
|
@ -1,54 +0,0 @@
|
||||||
# Get the base container running
|
|
||||||
docker build -t base .
|
|
||||||
|
|
||||||
CONTAINER_NAME=new
|
|
||||||
|
|
||||||
# Start the container
|
|
||||||
docker run -t -i --entrypoint /bin/bash --name $CONTAINER_NAME base
|
|
||||||
|
|
||||||
# Clean any existing directories
|
|
||||||
rm -rf /tmp/{a,b}
|
|
||||||
|
|
||||||
# Take snapshot of initial container
|
|
||||||
mkdir /tmp/a ; docker export $CONTAINER_NAME | tar --exclude node_modules -x -C /tmp/a --strip-components=1 overleaf
|
|
||||||
|
|
||||||
# In the container, run the following commands
|
|
||||||
docker exec -i $CONTAINER_NAME /bin/bash <<'EOF'
|
|
||||||
npm install -g json
|
|
||||||
json -I -f package.json -c 'this.overrides["swagger-tools"].multer="2.0.2"'
|
|
||||||
json -I -f package.json -c 'this.overrides["request@2.88.2"]["form-data"]="2.5.5"'
|
|
||||||
json -I -f package.json -c 'this.overrides["superagent@7.1.6"] ??= {}'
|
|
||||||
json -I -f package.json -c 'this.overrides["superagent@7.1.6"]["form-data"]="4.0.4"'
|
|
||||||
json -I -f package.json -c 'this.overrides["superagent@3.8.3"] ??= {}'
|
|
||||||
json -I -f package.json -c 'this.overrides["superagent@3.8.3"]["form-data"]="2.5.5"'
|
|
||||||
|
|
||||||
npm uninstall -w libraries/metrics @google-cloud/opentelemetry-cloud-trace-exporter @google-cloud/profiler
|
|
||||||
npm uninstall -w libraries/logger @google-cloud/logging-bunyan
|
|
||||||
npm uninstall -w services/web @slack/webhook contentful @contentful/rich-text-types @contentful/rich-text-html-renderer
|
|
||||||
npm uninstall -w services/history-v1 @google-cloud/secret-manager
|
|
||||||
|
|
||||||
npm uninstall -w services/web "@node-saml/passport-saml"
|
|
||||||
npm install -w services/web "@node-saml/passport-saml@^5.1.0"
|
|
||||||
|
|
||||||
npm uninstall -w services/web multer
|
|
||||||
npm install -w services/web "multer@2.0.2"
|
|
||||||
|
|
||||||
npm uninstall -w services/history-v1 swagger-tools
|
|
||||||
npm install -w services/history-v1 swagger-tools@0.10.4
|
|
||||||
|
|
||||||
npm uninstall -w services/clsi request
|
|
||||||
npm install -w services/clsi request@2.88.2
|
|
||||||
npm install
|
|
||||||
|
|
||||||
npm audit --audit-level=high
|
|
||||||
EOF
|
|
||||||
|
|
||||||
# Take snapshot of final container
|
|
||||||
mkdir /tmp/b ; docker export $CONTAINER_NAME | tar --exclude node_modules -x -C /tmp/b --strip-components=1 overleaf
|
|
||||||
|
|
||||||
# Find the diff excluding node modules directories
|
|
||||||
# The sec_ prefix ensures it applies after pr_* patches.
|
|
||||||
(cd /tmp ; diff -u -x 'node_modules' -r a/ b/) > sec-npm.patch
|
|
||||||
|
|
||||||
# In the docker file we also need to remove linux-libc-dev
|
|
||||||
apt remove -y linux-libc-dev
|
|
|
@ -1,27 +0,0 @@
|
||||||
commit 43d0476e489cdf8e2e7261eb419810140d252a6d
|
|
||||||
Author: Andrew Rumble <andrew.rumble@overleaf.com>
|
|
||||||
Date: Fri Jul 25 12:18:26 2025 +0100
|
|
||||||
|
|
||||||
Add patch for multer 2.0.2
|
|
||||||
|
|
||||||
Co-authored-by: Ersun Warncke <ersun.warncke@overleaf.com>
|
|
||||||
|
|
||||||
diff --git a/patches/multer+2.0.2.patch b/patches/multer+2.0.2.patch
|
|
||||||
new file mode 100644
|
|
||||||
index 00000000000..f9959effe15
|
|
||||||
--- /dev/null
|
|
||||||
+++ b/patches/multer+2.0.2.patch
|
|
||||||
@@ -0,0 +1,13 @@
|
|
||||||
+diff --git a/node_modules/multer/lib/make-middleware.js b/node_modules/multer/lib/make-middleware.js
|
|
||||||
+index 260dcb4..895b4b2 100644
|
|
||||||
+--- a/node_modules/multer/lib/make-middleware.js
|
|
||||||
++++ b/node_modules/multer/lib/make-middleware.js
|
|
||||||
+@@ -113,7 +113,7 @@ function makeMiddleware (setup) {
|
|
||||||
+ if (fieldname == null) return abortWithCode('MISSING_FIELD_NAME')
|
|
||||||
+
|
|
||||||
+ // don't attach to the files object, if there is no file
|
|
||||||
+- if (!filename) return fileStream.resume()
|
|
||||||
++ if (!filename) filename = 'undefined'
|
|
||||||
+
|
|
||||||
+ // Work around bug in Busboy (https://github.com/mscdex/busboy/issues/6)
|
|
||||||
+ if (limits && Object.prototype.hasOwnProperty.call(limits, 'fieldNameSize')) {
|
|
|
@ -1,351 +0,0 @@
|
||||||
|
|
||||||
|
|
||||||
diff --git a/services/history-v1/storage/scripts/back_fill_file_hash.mjs b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
|
|
||||||
index ba3e0d43598e..feb4612ddc23 100644
|
|
||||||
--- a/services/history-v1/storage/scripts/back_fill_file_hash.mjs
|
|
||||||
+++ b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
|
|
||||||
@@ -33,7 +33,6 @@ import {
|
|
||||||
makeProjectKey,
|
|
||||||
} from '../lib/blob_store/index.js'
|
|
||||||
import { backedUpBlobs as backedUpBlobsCollection, db } from '../lib/mongodb.js'
|
|
||||||
-import filestorePersistor from '../lib/persistor.js'
|
|
||||||
import commandLineArgs from 'command-line-args'
|
|
||||||
import readline from 'node:readline'
|
|
||||||
|
|
||||||
@@ -179,6 +178,37 @@ const STREAM_HIGH_WATER_MARK = parseInt(
|
|
||||||
const LOGGING_INTERVAL = parseInt(process.env.LOGGING_INTERVAL || '60000', 10)
|
|
||||||
const SLEEP_BEFORE_EXIT = parseInt(process.env.SLEEP_BEFORE_EXIT || '1000', 10)
|
|
||||||
|
|
||||||
+// Filestore endpoint location, the port is always hardcoded
|
|
||||||
+const FILESTORE_HOST = process.env.FILESTORE_HOST || '127.0.0.1'
|
|
||||||
+const FILESTORE_PORT = process.env.FILESTORE_PORT || '3009'
|
|
||||||
+
|
|
||||||
+async function fetchFromFilestore(projectId, fileId) {
|
|
||||||
+ const url = `http://${FILESTORE_HOST}:${FILESTORE_PORT}/project/${projectId}/file/${fileId}`
|
|
||||||
+ const response = await fetch(url)
|
|
||||||
+ if (!response.ok) {
|
|
||||||
+ if (response.status === 404) {
|
|
||||||
+ throw new NotFoundError('file not found in filestore', {
|
|
||||||
+ status: response.status,
|
|
||||||
+ })
|
|
||||||
+ }
|
|
||||||
+ const body = await response.text()
|
|
||||||
+ throw new OError('fetchFromFilestore failed', {
|
|
||||||
+ projectId,
|
|
||||||
+ fileId,
|
|
||||||
+ status: response.status,
|
|
||||||
+ body,
|
|
||||||
+ })
|
|
||||||
+ }
|
|
||||||
+ if (!response.body) {
|
|
||||||
+ throw new OError('fetchFromFilestore response has no body', {
|
|
||||||
+ projectId,
|
|
||||||
+ fileId,
|
|
||||||
+ status: response.status,
|
|
||||||
+ })
|
|
||||||
+ }
|
|
||||||
+ return response.body
|
|
||||||
+}
|
|
||||||
+
|
|
||||||
const projectsCollection = db.collection('projects')
|
|
||||||
/** @type {ProjectsCollection} */
|
|
||||||
const typedProjectsCollection = db.collection('projects')
|
|
||||||
@@ -348,8 +378,7 @@ async function processFile(entry, filePath) {
|
|
||||||
} catch (err) {
|
|
||||||
if (gracefulShutdownInitiated) throw err
|
|
||||||
if (err instanceof NotFoundError) {
|
|
||||||
- const { bucketName } = OError.getFullInfo(err)
|
|
||||||
- if (bucketName === USER_FILES_BUCKET_NAME && !RETRY_FILESTORE_404) {
|
|
||||||
+ if (!RETRY_FILESTORE_404) {
|
|
||||||
throw err // disable retries for not found in filestore bucket case
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -416,10 +445,8 @@ async function processFileOnce(entry, filePath) {
|
|
||||||
}
|
|
||||||
|
|
||||||
STATS.readFromGCSCount++
|
|
||||||
- const src = await filestorePersistor.getObjectStream(
|
|
||||||
- USER_FILES_BUCKET_NAME,
|
|
||||||
- `${projectId}/${fileId}`
|
|
||||||
- )
|
|
||||||
+ // make a fetch request to filestore itself
|
|
||||||
+ const src = await fetchFromFilestore(projectId, fileId)
|
|
||||||
const dst = fs.createWriteStream(filePath, {
|
|
||||||
highWaterMark: STREAM_HIGH_WATER_MARK,
|
|
||||||
})
|
|
||||||
@@ -1327,14 +1354,21 @@ async function processDeletedProjects() {
|
|
||||||
}
|
|
||||||
|
|
||||||
async function main() {
|
|
||||||
+ console.log('Starting project file backup...')
|
|
||||||
await loadGlobalBlobs()
|
|
||||||
+ console.log('Loaded global blobs:', GLOBAL_BLOBS.size)
|
|
||||||
if (PROJECT_IDS_FROM) {
|
|
||||||
+ console.log(
|
|
||||||
+ `Processing projects from file: ${PROJECT_IDS_FROM}, this may take a while...`
|
|
||||||
+ )
|
|
||||||
await processProjectsFromFile()
|
|
||||||
} else {
|
|
||||||
if (PROCESS_NON_DELETED_PROJECTS) {
|
|
||||||
+ console.log('Processing non-deleted projects...')
|
|
||||||
await processNonDeletedProjects()
|
|
||||||
}
|
|
||||||
if (PROCESS_DELETED_PROJECTS) {
|
|
||||||
+ console.log('Processing deleted projects...')
|
|
||||||
await processDeletedProjects()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
diff --git a/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs b/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs
|
|
||||||
index fd39369a7189..4e697b8bec2c 100644
|
|
||||||
--- a/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs
|
|
||||||
+++ b/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs
|
|
||||||
@@ -15,7 +15,6 @@ import { execFile } from 'node:child_process'
|
|
||||||
import chai, { expect } from 'chai'
|
|
||||||
import chaiExclude from 'chai-exclude'
|
|
||||||
import config from 'config'
|
|
||||||
-import ObjectPersistor from '@overleaf/object-persistor'
|
|
||||||
import { WritableBuffer } from '@overleaf/stream-utils'
|
|
||||||
import {
|
|
||||||
backupPersistor,
|
|
||||||
@@ -27,6 +26,9 @@ import {
|
|
||||||
makeProjectKey,
|
|
||||||
} from '../../../../storage/lib/blob_store/index.js'
|
|
||||||
|
|
||||||
+import express from 'express'
|
|
||||||
+import bodyParser from 'body-parser'
|
|
||||||
+
|
|
||||||
chai.use(chaiExclude)
|
|
||||||
const TIMEOUT = 20 * 1_000
|
|
||||||
|
|
||||||
@@ -36,15 +38,60 @@ const { tieringStorageClass } = config.get('backupPersistor')
|
|
||||||
const projectsCollection = db.collection('projects')
|
|
||||||
const deletedProjectsCollection = db.collection('deletedProjects')
|
|
||||||
|
|
||||||
-const FILESTORE_PERSISTOR = ObjectPersistor({
|
|
||||||
- backend: 'gcs',
|
|
||||||
- gcs: {
|
|
||||||
- endpoint: {
|
|
||||||
- apiEndpoint: process.env.GCS_API_ENDPOINT,
|
|
||||||
- projectId: process.env.GCS_PROJECT_ID,
|
|
||||||
- },
|
|
||||||
- },
|
|
||||||
-})
|
|
||||||
+class MockFilestore {
|
|
||||||
+ constructor() {
|
|
||||||
+ this.host = process.env.FILESTORE_HOST || '127.0.0.1'
|
|
||||||
+ this.port = process.env.FILESTORE_PORT || 3009
|
|
||||||
+ // create a server listening on this.host and this.port
|
|
||||||
+ this.files = {}
|
|
||||||
+
|
|
||||||
+ this.app = express()
|
|
||||||
+ this.app.use(bodyParser.json())
|
|
||||||
+ this.app.use(bodyParser.urlencoded({ extended: true }))
|
|
||||||
+
|
|
||||||
+ this.app.get('/project/:projectId/file/:fileId', (req, res) => {
|
|
||||||
+ const { projectId, fileId } = req.params
|
|
||||||
+ const content = this.files[projectId]?.[fileId]
|
|
||||||
+ if (!content) return res.status(404).end()
|
|
||||||
+ res.status(200).end(content)
|
|
||||||
+ })
|
|
||||||
+ }
|
|
||||||
+
|
|
||||||
+ start() {
|
|
||||||
+ // reset stored files
|
|
||||||
+ this.files = {}
|
|
||||||
+ // start the server
|
|
||||||
+ if (this.serverPromise) {
|
|
||||||
+ return this.serverPromise
|
|
||||||
+ } else {
|
|
||||||
+ this.serverPromise = new Promise((resolve, reject) => {
|
|
||||||
+ this.server = this.app.listen(this.port, this.host, err => {
|
|
||||||
+ if (err) return reject(err)
|
|
||||||
+ resolve()
|
|
||||||
+ })
|
|
||||||
+ })
|
|
||||||
+ return this.serverPromise
|
|
||||||
+ }
|
|
||||||
+ }
|
|
||||||
+
|
|
||||||
+ addFile(projectId, fileId, fileContent) {
|
|
||||||
+ if (!this.files[projectId]) {
|
|
||||||
+ this.files[projectId] = {}
|
|
||||||
+ }
|
|
||||||
+ this.files[projectId][fileId] = fileContent
|
|
||||||
+ }
|
|
||||||
+
|
|
||||||
+ deleteObject(projectId, fileId) {
|
|
||||||
+ if (this.files[projectId]) {
|
|
||||||
+ delete this.files[projectId][fileId]
|
|
||||||
+ if (Object.keys(this.files[projectId]).length === 0) {
|
|
||||||
+ delete this.files[projectId]
|
|
||||||
+ }
|
|
||||||
+ }
|
|
||||||
+ }
|
|
||||||
+}
|
|
||||||
+
|
|
||||||
+const mockFilestore = new MockFilestore()
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {ObjectId} objectId
|
|
||||||
@@ -472,67 +519,36 @@ describe('back_fill_file_hash script', function () {
|
|
||||||
}
|
|
||||||
|
|
||||||
async function populateFilestore() {
|
|
||||||
- await FILESTORE_PERSISTOR.sendStream(
|
|
||||||
- USER_FILES_BUCKET_NAME,
|
|
||||||
- `${projectId0}/${fileId0}`,
|
|
||||||
- Stream.Readable.from([fileId0.toString()])
|
|
||||||
- )
|
|
||||||
- await FILESTORE_PERSISTOR.sendStream(
|
|
||||||
- USER_FILES_BUCKET_NAME,
|
|
||||||
- `${projectId0}/${fileId6}`,
|
|
||||||
- Stream.Readable.from([fileId6.toString()])
|
|
||||||
- )
|
|
||||||
- await FILESTORE_PERSISTOR.sendStream(
|
|
||||||
- USER_FILES_BUCKET_NAME,
|
|
||||||
- `${projectId0}/${fileId7}`,
|
|
||||||
- Stream.Readable.from([contentFile7])
|
|
||||||
- )
|
|
||||||
- await FILESTORE_PERSISTOR.sendStream(
|
|
||||||
- USER_FILES_BUCKET_NAME,
|
|
||||||
- `${projectId1}/${fileId1}`,
|
|
||||||
- Stream.Readable.from([fileId1.toString()])
|
|
||||||
- )
|
|
||||||
- await FILESTORE_PERSISTOR.sendStream(
|
|
||||||
- USER_FILES_BUCKET_NAME,
|
|
||||||
- `${projectId2}/${fileId2}`,
|
|
||||||
- Stream.Readable.from([fileId2.toString()])
|
|
||||||
- )
|
|
||||||
- await FILESTORE_PERSISTOR.sendStream(
|
|
||||||
- USER_FILES_BUCKET_NAME,
|
|
||||||
- `${projectId3}/${fileId3}`,
|
|
||||||
- Stream.Readable.from([fileId3.toString()])
|
|
||||||
- )
|
|
||||||
- await FILESTORE_PERSISTOR.sendStream(
|
|
||||||
- USER_FILES_BUCKET_NAME,
|
|
||||||
- `${projectId3}/${fileId10}`,
|
|
||||||
+ await mockFilestore.addFile(projectId0, fileId0, fileId0.toString())
|
|
||||||
+ await mockFilestore.addFile(projectId0, fileId6, fileId6.toString())
|
|
||||||
+ await mockFilestore.addFile(projectId0, fileId7, contentFile7)
|
|
||||||
+ await mockFilestore.addFile(projectId1, fileId1, fileId1.toString())
|
|
||||||
+ await mockFilestore.addFile(projectId2, fileId2, fileId2.toString())
|
|
||||||
+ await mockFilestore.addFile(projectId3, fileId3, fileId3.toString())
|
|
||||||
+ await mockFilestore.addFile(
|
|
||||||
+ projectId3,
|
|
||||||
+ fileId10,
|
|
||||||
// fileId10 is dupe of fileId3
|
|
||||||
- Stream.Readable.from([fileId3.toString()])
|
|
||||||
+ fileId3.toString()
|
|
||||||
)
|
|
||||||
- await FILESTORE_PERSISTOR.sendStream(
|
|
||||||
- USER_FILES_BUCKET_NAME,
|
|
||||||
- `${projectId3}/${fileId11}`,
|
|
||||||
+ await mockFilestore.addFile(
|
|
||||||
+ projectId3,
|
|
||||||
+ fileId11,
|
|
||||||
// fileId11 is dupe of fileId3
|
|
||||||
- Stream.Readable.from([fileId3.toString()])
|
|
||||||
- )
|
|
||||||
- await FILESTORE_PERSISTOR.sendStream(
|
|
||||||
- USER_FILES_BUCKET_NAME,
|
|
||||||
- `${projectIdDeleted0}/${fileId4}`,
|
|
||||||
- Stream.Readable.from([fileId4.toString()])
|
|
||||||
+ fileId3.toString()
|
|
||||||
)
|
|
||||||
- await FILESTORE_PERSISTOR.sendStream(
|
|
||||||
- USER_FILES_BUCKET_NAME,
|
|
||||||
- `${projectIdDeleted1}/${fileId5}`,
|
|
||||||
- Stream.Readable.from([fileId5.toString()])
|
|
||||||
- )
|
|
||||||
- await FILESTORE_PERSISTOR.sendStream(
|
|
||||||
- USER_FILES_BUCKET_NAME,
|
|
||||||
- `${projectIdBadFileTree3}/${fileId9}`,
|
|
||||||
- Stream.Readable.from([fileId9.toString()])
|
|
||||||
+ await mockFilestore.addFile(projectIdDeleted0, fileId4, fileId4.toString())
|
|
||||||
+ await mockFilestore.addFile(projectIdDeleted1, fileId5, fileId5.toString())
|
|
||||||
+ await mockFilestore.addFile(
|
|
||||||
+ projectIdBadFileTree3,
|
|
||||||
+ fileId9,
|
|
||||||
+ fileId9.toString()
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
async function prepareEnvironment() {
|
|
||||||
await cleanup.everything()
|
|
||||||
+ await mockFilestore.start()
|
|
||||||
await populateMongo()
|
|
||||||
await populateHistoryV1()
|
|
||||||
await populateFilestore()
|
|
||||||
@@ -1117,10 +1133,7 @@ describe('back_fill_file_hash script', function () {
|
|
||||||
beforeEach('prepare environment', prepareEnvironment)
|
|
||||||
|
|
||||||
it('should gracefully handle fatal errors', async function () {
|
|
||||||
- await FILESTORE_PERSISTOR.deleteObject(
|
|
||||||
- USER_FILES_BUCKET_NAME,
|
|
||||||
- `${projectId0}/${fileId0}`
|
|
||||||
- )
|
|
||||||
+ mockFilestore.deleteObject(projectId0, fileId0)
|
|
||||||
const t0 = Date.now()
|
|
||||||
const { stats, result } = await tryRunScript([], {
|
|
||||||
RETRIES: '10',
|
|
||||||
@@ -1148,17 +1161,10 @@ describe('back_fill_file_hash script', function () {
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should retry on error', async function () {
|
|
||||||
- await FILESTORE_PERSISTOR.deleteObject(
|
|
||||||
- USER_FILES_BUCKET_NAME,
|
|
||||||
- `${projectId0}/${fileId0}`
|
|
||||||
- )
|
|
||||||
+ mockFilestore.deleteObject(projectId0, fileId0)
|
|
||||||
const restoreFileAfter5s = async () => {
|
|
||||||
await setTimeout(5_000)
|
|
||||||
- await FILESTORE_PERSISTOR.sendStream(
|
|
||||||
- USER_FILES_BUCKET_NAME,
|
|
||||||
- `${projectId0}/${fileId0}`,
|
|
||||||
- Stream.Readable.from([fileId0.toString()])
|
|
||||||
- )
|
|
||||||
+ mockFilestore.addFile(projectId0, fileId0, fileId0.toString())
|
|
||||||
}
|
|
||||||
// use Promise.allSettled to ensure the above sendStream call finishes before this test completes
|
|
||||||
const [
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
diff --git a/services/history-v1/storage/scripts/back_fill_file_hash.mjs b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
|
|
||||||
index feb4612ddc23..5a590e347a94 100644
|
|
||||||
--- a/services/history-v1/storage/scripts/back_fill_file_hash.mjs
|
|
||||||
+++ b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
|
|
||||||
@@ -178,7 +178,7 @@ const STREAM_HIGH_WATER_MARK = parseInt(
|
|
||||||
const LOGGING_INTERVAL = parseInt(process.env.LOGGING_INTERVAL || '60000', 10)
|
|
||||||
const SLEEP_BEFORE_EXIT = parseInt(process.env.SLEEP_BEFORE_EXIT || '1000', 10)
|
|
||||||
|
|
||||||
-// Filestore endpoint location, the port is always hardcoded
|
|
||||||
+// Filestore endpoint location
|
|
||||||
const FILESTORE_HOST = process.env.FILESTORE_HOST || '127.0.0.1'
|
|
||||||
const FILESTORE_PORT = process.env.FILESTORE_PORT || '3009'
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
diff --git a/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs b/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs
|
|
||||||
index 4e697b8bec2c..8f861d393451 100644
|
|
||||||
--- a/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs
|
|
||||||
+++ b/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs
|
|
||||||
@@ -27,7 +27,6 @@ import {
|
|
||||||
} from '../../../../storage/lib/blob_store/index.js'
|
|
||||||
|
|
||||||
import express from 'express'
|
|
||||||
-import bodyParser from 'body-parser'
|
|
||||||
|
|
||||||
chai.use(chaiExclude)
|
|
||||||
const TIMEOUT = 20 * 1_000
|
|
||||||
@@ -46,8 +45,6 @@ class MockFilestore {
|
|
||||||
this.files = {}
|
|
||||||
|
|
||||||
this.app = express()
|
|
||||||
- this.app.use(bodyParser.json())
|
|
||||||
- this.app.use(bodyParser.urlencoded({ extended: true }))
|
|
||||||
|
|
||||||
this.app.get('/project/:projectId/file/:fileId', (req, res) => {
|
|
||||||
const { projectId, fileId } = req.params
|
|
||||||
|
|
|
@ -1,961 +0,0 @@
|
||||||
|
|
||||||
|
|
||||||
diff --git a/services/history-v1/storage/scripts/back_fill_file_hash.mjs b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
|
|
||||||
index 5a590e347a9..3be1c8a5407 100644
|
|
||||||
--- a/services/history-v1/storage/scripts/back_fill_file_hash.mjs
|
|
||||||
+++ b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
|
|
||||||
@@ -1,28 +1,20 @@
|
|
||||||
// @ts-check
|
|
||||||
-import Crypto from 'node:crypto'
|
|
||||||
import Events from 'node:events'
|
|
||||||
import fs from 'node:fs'
|
|
||||||
import Path from 'node:path'
|
|
||||||
import { performance } from 'node:perf_hooks'
|
|
||||||
import Stream from 'node:stream'
|
|
||||||
-import zLib from 'node:zlib'
|
|
||||||
import { setTimeout } from 'node:timers/promises'
|
|
||||||
-import { Binary, ObjectId } from 'mongodb'
|
|
||||||
+import { ObjectId } from 'mongodb'
|
|
||||||
import pLimit from 'p-limit'
|
|
||||||
import logger from '@overleaf/logger'
|
|
||||||
import {
|
|
||||||
batchedUpdate,
|
|
||||||
objectIdFromInput,
|
|
||||||
renderObjectId,
|
|
||||||
- READ_PREFERENCE_SECONDARY,
|
|
||||||
} from '@overleaf/mongo-utils/batchedUpdate.js'
|
|
||||||
import OError from '@overleaf/o-error'
|
|
||||||
-import {
|
|
||||||
- AlreadyWrittenError,
|
|
||||||
- NoKEKMatchedError,
|
|
||||||
- NotFoundError,
|
|
||||||
-} from '@overleaf/object-persistor/src/Errors.js'
|
|
||||||
-import { backupPersistor, projectBlobsBucket } from '../lib/backupPersistor.mjs'
|
|
||||||
+import { NotFoundError } from '@overleaf/object-persistor/src/Errors.js'
|
|
||||||
import {
|
|
||||||
BlobStore,
|
|
||||||
GLOBAL_BLOBS,
|
|
||||||
@@ -30,9 +22,8 @@ import {
|
|
||||||
getProjectBlobsBatch,
|
|
||||||
getStringLengthOfFile,
|
|
||||||
makeBlobForFile,
|
|
||||||
- makeProjectKey,
|
|
||||||
} from '../lib/blob_store/index.js'
|
|
||||||
-import { backedUpBlobs as backedUpBlobsCollection, db } from '../lib/mongodb.js'
|
|
||||||
+import { db } from '../lib/mongodb.js'
|
|
||||||
import commandLineArgs from 'command-line-args'
|
|
||||||
import readline from 'node:readline'
|
|
||||||
|
|
||||||
@@ -88,7 +79,7 @@ ObjectId.cacheHexString = true
|
|
||||||
*/
|
|
||||||
|
|
||||||
/**
|
|
||||||
- * @return {{PROJECT_IDS_FROM: string, PROCESS_HASHED_FILES: boolean, LOGGING_IDENTIFIER: string, BATCH_RANGE_START: string, PROCESS_BLOBS: boolean, BATCH_RANGE_END: string, PROCESS_NON_DELETED_PROJECTS: boolean, PROCESS_DELETED_PROJECTS: boolean, COLLECT_BACKED_UP_BLOBS: boolean}}
|
|
||||||
+ * @return {{PROJECT_IDS_FROM: string, PROCESS_HASHED_FILES: boolean, LOGGING_IDENTIFIER: string, BATCH_RANGE_START: string, PROCESS_BLOBS: boolean, BATCH_RANGE_END: string, PROCESS_NON_DELETED_PROJECTS: boolean, PROCESS_DELETED_PROJECTS: boolean}}
|
|
||||||
*/
|
|
||||||
function parseArgs() {
|
|
||||||
const PUBLIC_LAUNCH_DATE = new Date('2012-01-01T00:00:00Z')
|
|
||||||
@@ -98,7 +89,6 @@ function parseArgs() {
|
|
||||||
{ name: 'processHashedFiles', type: String, defaultValue: 'false' },
|
|
||||||
{ name: 'processBlobs', type: String, defaultValue: 'true' },
|
|
||||||
{ name: 'projectIdsFrom', type: String, defaultValue: '' },
|
|
||||||
- { name: 'collectBackedUpBlobs', type: String, defaultValue: 'true' },
|
|
||||||
{
|
|
||||||
name: 'BATCH_RANGE_START',
|
|
||||||
type: String,
|
|
||||||
@@ -130,7 +120,6 @@ function parseArgs() {
|
|
||||||
PROCESS_DELETED_PROJECTS: boolVal('processDeletedProjects'),
|
|
||||||
PROCESS_BLOBS: boolVal('processBlobs'),
|
|
||||||
PROCESS_HASHED_FILES: boolVal('processHashedFiles'),
|
|
||||||
- COLLECT_BACKED_UP_BLOBS: boolVal('collectBackedUpBlobs'),
|
|
||||||
BATCH_RANGE_START,
|
|
||||||
BATCH_RANGE_END,
|
|
||||||
LOGGING_IDENTIFIER: args['LOGGING_IDENTIFIER'] || BATCH_RANGE_START,
|
|
||||||
@@ -143,7 +132,6 @@ const {
|
|
||||||
PROCESS_DELETED_PROJECTS,
|
|
||||||
PROCESS_BLOBS,
|
|
||||||
PROCESS_HASHED_FILES,
|
|
||||||
- COLLECT_BACKED_UP_BLOBS,
|
|
||||||
BATCH_RANGE_START,
|
|
||||||
BATCH_RANGE_END,
|
|
||||||
LOGGING_IDENTIFIER,
|
|
||||||
@@ -232,7 +220,6 @@ async function processConcurrently(array, fn) {
|
|
||||||
const STATS = {
|
|
||||||
projects: 0,
|
|
||||||
blobs: 0,
|
|
||||||
- backedUpBlobs: 0,
|
|
||||||
filesWithHash: 0,
|
|
||||||
filesWithoutHash: 0,
|
|
||||||
filesDuplicated: 0,
|
|
||||||
@@ -246,14 +233,8 @@ const STATS = {
|
|
||||||
projectHardDeleted: 0,
|
|
||||||
fileHardDeleted: 0,
|
|
||||||
mongoUpdates: 0,
|
|
||||||
- deduplicatedWriteToAWSLocalCount: 0,
|
|
||||||
- deduplicatedWriteToAWSLocalEgress: 0,
|
|
||||||
- deduplicatedWriteToAWSRemoteCount: 0,
|
|
||||||
- deduplicatedWriteToAWSRemoteEgress: 0,
|
|
||||||
readFromGCSCount: 0,
|
|
||||||
readFromGCSIngress: 0,
|
|
||||||
- writeToAWSCount: 0,
|
|
||||||
- writeToAWSEgress: 0,
|
|
||||||
writeToGCSCount: 0,
|
|
||||||
writeToGCSEgress: 0,
|
|
||||||
}
|
|
||||||
@@ -275,7 +256,7 @@ function toMiBPerSecond(v, ms) {
|
|
||||||
/**
|
|
||||||
* @param {any} stats
|
|
||||||
* @param {number} ms
|
|
||||||
- * @return {{writeToAWSThroughputMiBPerSecond: number, readFromGCSThroughputMiBPerSecond: number}}
|
|
||||||
+ * @return {{readFromGCSThroughputMiBPerSecond: number}}
|
|
||||||
*/
|
|
||||||
function bandwidthStats(stats, ms) {
|
|
||||||
return {
|
|
||||||
@@ -283,10 +264,6 @@ function bandwidthStats(stats, ms) {
|
|
||||||
stats.readFromGCSIngress,
|
|
||||||
ms
|
|
||||||
),
|
|
||||||
- writeToAWSThroughputMiBPerSecond: toMiBPerSecond(
|
|
||||||
- stats.writeToAWSEgress,
|
|
||||||
- ms
|
|
||||||
- ),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@@ -382,9 +359,6 @@ async function processFile(entry, filePath) {
|
|
||||||
throw err // disable retries for not found in filestore bucket case
|
|
||||||
}
|
|
||||||
}
|
|
||||||
- if (err instanceof NoKEKMatchedError) {
|
|
||||||
- throw err // disable retries when upload to S3 will fail again
|
|
||||||
- }
|
|
||||||
STATS.filesRetries++
|
|
||||||
const {
|
|
||||||
ctx: { projectId },
|
|
||||||
@@ -417,32 +391,8 @@ async function processFileOnce(entry, filePath) {
|
|
||||||
if (entry.blob) {
|
|
||||||
const { blob } = entry
|
|
||||||
const hash = blob.getHash()
|
|
||||||
- if (entry.ctx.hasBackedUpBlob(hash)) {
|
|
||||||
- STATS.deduplicatedWriteToAWSLocalCount++
|
|
||||||
- STATS.deduplicatedWriteToAWSLocalEgress += estimateBlobSize(blob)
|
|
||||||
- return hash
|
|
||||||
- }
|
|
||||||
- entry.ctx.recordPendingBlob(hash)
|
|
||||||
- STATS.readFromGCSCount++
|
|
||||||
- const src = await blobStore.getStream(hash)
|
|
||||||
- const dst = fs.createWriteStream(filePath, {
|
|
||||||
- highWaterMark: STREAM_HIGH_WATER_MARK,
|
|
||||||
- })
|
|
||||||
- try {
|
|
||||||
- await Stream.promises.pipeline(src, dst)
|
|
||||||
- } finally {
|
|
||||||
- STATS.readFromGCSIngress += dst.bytesWritten
|
|
||||||
- }
|
|
||||||
- await uploadBlobToAWS(entry, blob, filePath)
|
|
||||||
return hash
|
|
||||||
}
|
|
||||||
- if (entry.hash && entry.ctx.hasBackedUpBlob(entry.hash)) {
|
|
||||||
- STATS.deduplicatedWriteToAWSLocalCount++
|
|
||||||
- const blob = entry.ctx.getCachedHistoryBlob(entry.hash)
|
|
||||||
- // blob might not exist on re-run with --PROCESS_BLOBS=false
|
|
||||||
- if (blob) STATS.deduplicatedWriteToAWSLocalEgress += estimateBlobSize(blob)
|
|
||||||
- return entry.hash
|
|
||||||
- }
|
|
||||||
|
|
||||||
STATS.readFromGCSCount++
|
|
||||||
// make a fetch request to filestore itself
|
|
||||||
@@ -469,16 +419,14 @@ async function processFileOnce(entry, filePath) {
|
|
||||||
STATS.globalBlobsEgress += estimateBlobSize(blob)
|
|
||||||
return hash
|
|
||||||
}
|
|
||||||
- if (entry.ctx.hasBackedUpBlob(hash)) {
|
|
||||||
- STATS.deduplicatedWriteToAWSLocalCount++
|
|
||||||
- STATS.deduplicatedWriteToAWSLocalEgress += estimateBlobSize(blob)
|
|
||||||
+ if (entry.ctx.hasCompletedBlob(hash)) {
|
|
||||||
return hash
|
|
||||||
}
|
|
||||||
entry.ctx.recordPendingBlob(hash)
|
|
||||||
|
|
||||||
try {
|
|
||||||
await uploadBlobToGCS(blobStore, entry, blob, hash, filePath)
|
|
||||||
- await uploadBlobToAWS(entry, blob, filePath)
|
|
||||||
+ entry.ctx.recordCompletedBlob(hash) // mark upload as completed
|
|
||||||
} catch (err) {
|
|
||||||
entry.ctx.recordFailedBlob(hash)
|
|
||||||
throw err
|
|
||||||
@@ -515,76 +463,6 @@ async function uploadBlobToGCS(blobStore, entry, blob, hash, filePath) {
|
|
||||||
|
|
||||||
const GZ_SUFFIX = '.gz'
|
|
||||||
|
|
||||||
-/**
|
|
||||||
- * @param {QueueEntry} entry
|
|
||||||
- * @param {Blob} blob
|
|
||||||
- * @param {string} filePath
|
|
||||||
- * @return {Promise<void>}
|
|
||||||
- */
|
|
||||||
-async function uploadBlobToAWS(entry, blob, filePath) {
|
|
||||||
- const { historyId } = entry.ctx
|
|
||||||
- let backupSource
|
|
||||||
- let contentEncoding
|
|
||||||
- const md5 = Crypto.createHash('md5')
|
|
||||||
- let size
|
|
||||||
- if (blob.getStringLength()) {
|
|
||||||
- const filePathCompressed = filePath + GZ_SUFFIX
|
|
||||||
- backupSource = filePathCompressed
|
|
||||||
- contentEncoding = 'gzip'
|
|
||||||
- size = 0
|
|
||||||
- await Stream.promises.pipeline(
|
|
||||||
- fs.createReadStream(filePath, { highWaterMark: STREAM_HIGH_WATER_MARK }),
|
|
||||||
- zLib.createGzip(),
|
|
||||||
- async function* (source) {
|
|
||||||
- for await (const chunk of source) {
|
|
||||||
- size += chunk.byteLength
|
|
||||||
- md5.update(chunk)
|
|
||||||
- yield chunk
|
|
||||||
- }
|
|
||||||
- },
|
|
||||||
- fs.createWriteStream(filePathCompressed, {
|
|
||||||
- highWaterMark: STREAM_HIGH_WATER_MARK,
|
|
||||||
- })
|
|
||||||
- )
|
|
||||||
- } else {
|
|
||||||
- backupSource = filePath
|
|
||||||
- size = blob.getByteLength()
|
|
||||||
- await Stream.promises.pipeline(
|
|
||||||
- fs.createReadStream(filePath, { highWaterMark: STREAM_HIGH_WATER_MARK }),
|
|
||||||
- md5
|
|
||||||
- )
|
|
||||||
- }
|
|
||||||
- const backendKeyPath = makeProjectKey(historyId, blob.getHash())
|
|
||||||
- const persistor = await entry.ctx.getCachedPersistor(backendKeyPath)
|
|
||||||
- try {
|
|
||||||
- STATS.writeToAWSCount++
|
|
||||||
- await persistor.sendStream(
|
|
||||||
- projectBlobsBucket,
|
|
||||||
- backendKeyPath,
|
|
||||||
- fs.createReadStream(backupSource, {
|
|
||||||
- highWaterMark: STREAM_HIGH_WATER_MARK,
|
|
||||||
- }),
|
|
||||||
- {
|
|
||||||
- contentEncoding,
|
|
||||||
- contentType: 'application/octet-stream',
|
|
||||||
- contentLength: size,
|
|
||||||
- sourceMd5: md5.digest('hex'),
|
|
||||||
- ifNoneMatch: '*', // de-duplicate write (we pay for the request, but avoid egress)
|
|
||||||
- }
|
|
||||||
- )
|
|
||||||
- STATS.writeToAWSEgress += size
|
|
||||||
- } catch (err) {
|
|
||||||
- if (err instanceof AlreadyWrittenError) {
|
|
||||||
- STATS.deduplicatedWriteToAWSRemoteCount++
|
|
||||||
- STATS.deduplicatedWriteToAWSRemoteEgress += size
|
|
||||||
- } else {
|
|
||||||
- STATS.writeToAWSEgress += size
|
|
||||||
- throw err
|
|
||||||
- }
|
|
||||||
- }
|
|
||||||
- entry.ctx.recordBackedUpBlob(blob.getHash())
|
|
||||||
-}
|
|
||||||
-
|
|
||||||
/**
|
|
||||||
* @param {Array<QueueEntry>} files
|
|
||||||
* @return {Promise<void>}
|
|
||||||
@@ -670,23 +548,18 @@ async function queueNextBatch(batch, prefix = 'rootFolder.0') {
|
|
||||||
* @return {Promise<void>}
|
|
||||||
*/
|
|
||||||
async function processBatch(batch, prefix = 'rootFolder.0') {
|
|
||||||
- const [{ nBlobs, blobs }, { nBackedUpBlobs, backedUpBlobs }] =
|
|
||||||
- await Promise.all([collectProjectBlobs(batch), collectBackedUpBlobs(batch)])
|
|
||||||
- const files = Array.from(findFileInBatch(batch, prefix, blobs, backedUpBlobs))
|
|
||||||
+ const { nBlobs, blobs } = await collectProjectBlobs(batch)
|
|
||||||
+ const files = Array.from(findFileInBatch(batch, prefix, blobs))
|
|
||||||
STATS.projects += batch.length
|
|
||||||
STATS.blobs += nBlobs
|
|
||||||
- STATS.backedUpBlobs += nBackedUpBlobs
|
|
||||||
|
|
||||||
// GC
|
|
||||||
batch.length = 0
|
|
||||||
blobs.clear()
|
|
||||||
- backedUpBlobs.clear()
|
|
||||||
|
|
||||||
// The files are currently ordered by project-id.
|
|
||||||
// Order them by file-id ASC then blobs ASC to
|
|
||||||
// - process files before blobs
|
|
||||||
- // - avoid head-of-line blocking from many project-files waiting on the generation of the projects DEK (round trip to AWS)
|
|
||||||
- // - bonus: increase chance of de-duplicating write to AWS
|
|
||||||
files.sort(
|
|
||||||
/**
|
|
||||||
* @param {QueueEntry} a
|
|
||||||
@@ -903,23 +776,15 @@ function* findFiles(ctx, folder, path, isInputLoop = false) {
|
|
||||||
* @param {Array<Project>} projects
|
|
||||||
* @param {string} prefix
|
|
||||||
* @param {Map<string,Array<Blob>>} blobs
|
|
||||||
- * @param {Map<string,Array<string>>} backedUpBlobs
|
|
||||||
* @return Generator<QueueEntry>
|
|
||||||
*/
|
|
||||||
-function* findFileInBatch(projects, prefix, blobs, backedUpBlobs) {
|
|
||||||
+function* findFileInBatch(projects, prefix, blobs) {
|
|
||||||
for (const project of projects) {
|
|
||||||
const projectIdS = project._id.toString()
|
|
||||||
const historyIdS = project.overleaf.history.id.toString()
|
|
||||||
const projectBlobs = blobs.get(historyIdS) || []
|
|
||||||
- const projectBackedUpBlobs = new Set(backedUpBlobs.get(projectIdS) || [])
|
|
||||||
- const ctx = new ProjectContext(
|
|
||||||
- project._id,
|
|
||||||
- historyIdS,
|
|
||||||
- projectBlobs,
|
|
||||||
- projectBackedUpBlobs
|
|
||||||
- )
|
|
||||||
+ const ctx = new ProjectContext(project._id, historyIdS, projectBlobs)
|
|
||||||
for (const blob of projectBlobs) {
|
|
||||||
- if (projectBackedUpBlobs.has(blob.getHash())) continue
|
|
||||||
ctx.remainingQueueEntries++
|
|
||||||
yield {
|
|
||||||
ctx,
|
|
||||||
@@ -951,42 +816,11 @@ async function collectProjectBlobs(batch) {
|
|
||||||
return await getProjectBlobsBatch(batch.map(p => p.overleaf.history.id))
|
|
||||||
}
|
|
||||||
|
|
||||||
-/**
|
|
||||||
- * @param {Array<Project>} projects
|
|
||||||
- * @return {Promise<{nBackedUpBlobs:number,backedUpBlobs:Map<string,Array<string>>}>}
|
|
||||||
- */
|
|
||||||
-async function collectBackedUpBlobs(projects) {
|
|
||||||
- let nBackedUpBlobs = 0
|
|
||||||
- const backedUpBlobs = new Map()
|
|
||||||
- if (!COLLECT_BACKED_UP_BLOBS) return { nBackedUpBlobs, backedUpBlobs }
|
|
||||||
-
|
|
||||||
- const cursor = backedUpBlobsCollection.find(
|
|
||||||
- { _id: { $in: projects.map(p => p._id) } },
|
|
||||||
- {
|
|
||||||
- readPreference: READ_PREFERENCE_SECONDARY,
|
|
||||||
- sort: { _id: 1 },
|
|
||||||
- }
|
|
||||||
- )
|
|
||||||
- for await (const record of cursor) {
|
|
||||||
- const blobs = record.blobs.map(b => b.toString('hex'))
|
|
||||||
- backedUpBlobs.set(record._id.toString(), blobs)
|
|
||||||
- nBackedUpBlobs += blobs.length
|
|
||||||
- }
|
|
||||||
- return { nBackedUpBlobs, backedUpBlobs }
|
|
||||||
-}
|
|
||||||
-
|
|
||||||
-const BATCH_HASH_WRITES = 1_000
|
|
||||||
const BATCH_FILE_UPDATES = 100
|
|
||||||
|
|
||||||
const MONGO_PATH_SKIP_WRITE_HASH_TO_FILE_TREE = 'skip-write-to-file-tree'
|
|
||||||
|
|
||||||
class ProjectContext {
|
|
||||||
- /** @type {Promise<CachedPerProjectEncryptedS3Persistor> | null} */
|
|
||||||
- #cachedPersistorPromise = null
|
|
||||||
-
|
|
||||||
- /** @type {Set<string>} */
|
|
||||||
- #backedUpBlobs
|
|
||||||
-
|
|
||||||
/** @type {Map<string, Blob>} */
|
|
||||||
#historyBlobs
|
|
||||||
|
|
||||||
@@ -1000,12 +834,10 @@ class ProjectContext {
|
|
||||||
* @param {ObjectId} projectId
|
|
||||||
* @param {string} historyId
|
|
||||||
* @param {Array<Blob>} blobs
|
|
||||||
- * @param {Set<string>} backedUpBlobs
|
|
||||||
*/
|
|
||||||
- constructor(projectId, historyId, blobs, backedUpBlobs) {
|
|
||||||
+ constructor(projectId, historyId, blobs) {
|
|
||||||
this.projectId = projectId
|
|
||||||
this.historyId = historyId
|
|
||||||
- this.#backedUpBlobs = backedUpBlobs
|
|
||||||
this.#historyBlobs = new Map(blobs.map(b => [b.getHash(), b]))
|
|
||||||
}
|
|
||||||
|
|
||||||
@@ -1034,75 +866,17 @@ class ProjectContext {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
- /**
|
|
||||||
- * @param {string} key
|
|
||||||
- * @return {Promise<CachedPerProjectEncryptedS3Persistor>}
|
|
||||||
- */
|
|
||||||
- getCachedPersistor(key) {
|
|
||||||
- if (!this.#cachedPersistorPromise) {
|
|
||||||
- // Fetch DEK once, but only if needed -- upon the first use
|
|
||||||
- this.#cachedPersistorPromise = this.#getCachedPersistorWithRetries(key)
|
|
||||||
- }
|
|
||||||
- return this.#cachedPersistorPromise
|
|
||||||
- }
|
|
||||||
-
|
|
||||||
- /**
|
|
||||||
- * @param {string} key
|
|
||||||
- * @return {Promise<CachedPerProjectEncryptedS3Persistor>}
|
|
||||||
- */
|
|
||||||
- async #getCachedPersistorWithRetries(key) {
|
|
||||||
- // Optimization: Skip GET on DEK in case no blobs are marked as backed up yet.
|
|
||||||
- let tryGenerateDEKFirst = this.#backedUpBlobs.size === 0
|
|
||||||
- for (let attempt = 0; attempt < RETRIES; attempt++) {
|
|
||||||
- try {
|
|
||||||
- if (tryGenerateDEKFirst) {
|
|
||||||
- try {
|
|
||||||
- return await backupPersistor.generateDataEncryptionKey(
|
|
||||||
- projectBlobsBucket,
|
|
||||||
- key
|
|
||||||
- )
|
|
||||||
- } catch (err) {
|
|
||||||
- if (err instanceof AlreadyWrittenError) {
|
|
||||||
- tryGenerateDEKFirst = false
|
|
||||||
- // fall back to GET below
|
|
||||||
- } else {
|
|
||||||
- throw err
|
|
||||||
- }
|
|
||||||
- }
|
|
||||||
- }
|
|
||||||
- return await backupPersistor.forProject(projectBlobsBucket, key)
|
|
||||||
- } catch (err) {
|
|
||||||
- if (gracefulShutdownInitiated) throw err
|
|
||||||
- if (err instanceof NoKEKMatchedError) {
|
|
||||||
- throw err
|
|
||||||
- } else {
|
|
||||||
- logger.warn(
|
|
||||||
- { err, projectId: this.projectId, attempt },
|
|
||||||
- 'failed to get DEK, trying again'
|
|
||||||
- )
|
|
||||||
- const jitter = Math.random() * RETRY_DELAY_MS
|
|
||||||
- await setTimeout(RETRY_DELAY_MS + jitter)
|
|
||||||
- }
|
|
||||||
- }
|
|
||||||
- }
|
|
||||||
- return await backupPersistor.forProject(projectBlobsBucket, key)
|
|
||||||
- }
|
|
||||||
-
|
|
||||||
async flushMongoQueuesIfNeeded() {
|
|
||||||
if (this.remainingQueueEntries === 0) {
|
|
||||||
await this.flushMongoQueues()
|
|
||||||
}
|
|
||||||
|
|
||||||
- if (this.#completedBlobs.size > BATCH_HASH_WRITES) {
|
|
||||||
- await this.#storeBackedUpBlobs()
|
|
||||||
- }
|
|
||||||
if (this.#pendingFileWrites.length > BATCH_FILE_UPDATES) {
|
|
||||||
await this.#storeFileHashes()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async flushMongoQueues() {
|
|
||||||
- await this.#storeBackedUpBlobs()
|
|
||||||
await this.#storeFileHashes()
|
|
||||||
}
|
|
||||||
|
|
||||||
@@ -1111,20 +885,6 @@ class ProjectContext {
|
|
||||||
/** @type {Set<string>} */
|
|
||||||
#completedBlobs = new Set()
|
|
||||||
|
|
||||||
- async #storeBackedUpBlobs() {
|
|
||||||
- if (this.#completedBlobs.size === 0) return
|
|
||||||
- const blobs = Array.from(this.#completedBlobs).map(
|
|
||||||
- hash => new Binary(Buffer.from(hash, 'hex'))
|
|
||||||
- )
|
|
||||||
- this.#completedBlobs.clear()
|
|
||||||
- STATS.mongoUpdates++
|
|
||||||
- await backedUpBlobsCollection.updateOne(
|
|
||||||
- { _id: this.projectId },
|
|
||||||
- { $addToSet: { blobs: { $each: blobs } } },
|
|
||||||
- { upsert: true }
|
|
||||||
- )
|
|
||||||
- }
|
|
||||||
-
|
|
||||||
/**
|
|
||||||
* @param {string} hash
|
|
||||||
*/
|
|
||||||
@@ -1142,8 +902,7 @@ class ProjectContext {
|
|
||||||
/**
|
|
||||||
* @param {string} hash
|
|
||||||
*/
|
|
||||||
- recordBackedUpBlob(hash) {
|
|
||||||
- this.#backedUpBlobs.add(hash)
|
|
||||||
+ recordCompletedBlob(hash) {
|
|
||||||
this.#completedBlobs.add(hash)
|
|
||||||
this.#pendingBlobs.delete(hash)
|
|
||||||
}
|
|
||||||
@@ -1152,12 +911,8 @@ class ProjectContext {
|
|
||||||
* @param {string} hash
|
|
||||||
* @return {boolean}
|
|
||||||
*/
|
|
||||||
- hasBackedUpBlob(hash) {
|
|
||||||
- return (
|
|
||||||
- this.#pendingBlobs.has(hash) ||
|
|
||||||
- this.#completedBlobs.has(hash) ||
|
|
||||||
- this.#backedUpBlobs.has(hash)
|
|
||||||
- )
|
|
||||||
+ hasCompletedBlob(hash) {
|
|
||||||
+ return this.#pendingBlobs.has(hash) || this.#completedBlobs.has(hash)
|
|
||||||
}
|
|
||||||
|
|
||||||
/** @type {Array<QueueEntry>} */
|
|
||||||
diff --git a/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs b/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs
|
|
||||||
index 8f861d39345..62b0b1de25f 100644
|
|
||||||
--- a/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs
|
|
||||||
+++ b/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs
|
|
||||||
@@ -4,23 +4,17 @@ import Stream from 'node:stream'
|
|
||||||
import { setTimeout } from 'node:timers/promises'
|
|
||||||
import { promisify } from 'node:util'
|
|
||||||
import { ObjectId, Binary } from 'mongodb'
|
|
||||||
-import {
|
|
||||||
- db,
|
|
||||||
- backedUpBlobs,
|
|
||||||
- globalBlobs,
|
|
||||||
-} from '../../../../storage/lib/mongodb.js'
|
|
||||||
+import { db, globalBlobs } from '../../../../storage/lib/mongodb.js'
|
|
||||||
import cleanup from './support/cleanup.js'
|
|
||||||
import testProjects from '../api/support/test_projects.js'
|
|
||||||
import { execFile } from 'node:child_process'
|
|
||||||
import chai, { expect } from 'chai'
|
|
||||||
import chaiExclude from 'chai-exclude'
|
|
||||||
-import config from 'config'
|
|
||||||
import { WritableBuffer } from '@overleaf/stream-utils'
|
|
||||||
import {
|
|
||||||
backupPersistor,
|
|
||||||
projectBlobsBucket,
|
|
||||||
} from '../../../../storage/lib/backupPersistor.mjs'
|
|
||||||
-import projectKey from '../../../../storage/lib/project_key.js'
|
|
||||||
import {
|
|
||||||
BlobStore,
|
|
||||||
makeProjectKey,
|
|
||||||
@@ -31,9 +25,6 @@ import express from 'express'
|
|
||||||
chai.use(chaiExclude)
|
|
||||||
const TIMEOUT = 20 * 1_000
|
|
||||||
|
|
||||||
-const { deksBucket } = config.get('backupStore')
|
|
||||||
-const { tieringStorageClass } = config.get('backupPersistor')
|
|
||||||
-
|
|
||||||
const projectsCollection = db.collection('projects')
|
|
||||||
const deletedProjectsCollection = db.collection('deletedProjects')
|
|
||||||
|
|
||||||
@@ -117,17 +108,6 @@ function binaryForGitBlobHash(gitBlobHash) {
|
|
||||||
return new Binary(Buffer.from(gitBlobHash, 'hex'))
|
|
||||||
}
|
|
||||||
|
|
||||||
-async function listS3Bucket(bucket, wantStorageClass) {
|
|
||||||
- const client = backupPersistor._getClientForBucket(bucket)
|
|
||||||
- const response = await client.listObjectsV2({ Bucket: bucket }).promise()
|
|
||||||
-
|
|
||||||
- for (const object of response.Contents || []) {
|
|
||||||
- expect(object).to.have.property('StorageClass', wantStorageClass)
|
|
||||||
- }
|
|
||||||
-
|
|
||||||
- return (response.Contents || []).map(item => item.Key || '')
|
|
||||||
-}
|
|
||||||
-
|
|
||||||
function objectIdFromTime(timestamp) {
|
|
||||||
return ObjectId.createFromTime(new Date(timestamp).getTime() / 1000)
|
|
||||||
}
|
|
||||||
@@ -591,11 +571,7 @@ describe('back_fill_file_hash script', function () {
|
|
||||||
expect((await fs.promises.readdir('/tmp')).join(';')).to.not.match(
|
|
||||||
/back_fill_file_hash/
|
|
||||||
)
|
|
||||||
- const extraStatsKeys = [
|
|
||||||
- 'eventLoop',
|
|
||||||
- 'readFromGCSThroughputMiBPerSecond',
|
|
||||||
- 'writeToAWSThroughputMiBPerSecond',
|
|
||||||
- ]
|
|
||||||
+ const extraStatsKeys = ['eventLoop', 'readFromGCSThroughputMiBPerSecond']
|
|
||||||
const stats = JSON.parse(
|
|
||||||
result.stderr
|
|
||||||
.split('\n')
|
|
||||||
@@ -610,7 +586,6 @@ describe('back_fill_file_hash script', function () {
|
|
||||||
delete stats.time
|
|
||||||
if (shouldHaveWritten) {
|
|
||||||
expect(stats.readFromGCSThroughputMiBPerSecond).to.be.greaterThan(0)
|
|
||||||
- expect(stats.writeToAWSThroughputMiBPerSecond).to.be.greaterThan(0)
|
|
||||||
}
|
|
||||||
for (const key of extraStatsKeys) {
|
|
||||||
delete stats[key]
|
|
||||||
@@ -856,109 +831,6 @@ describe('back_fill_file_hash script', function () {
|
|
||||||
},
|
|
||||||
},
|
|
||||||
])
|
|
||||||
- expect(
|
|
||||||
- (await backedUpBlobs.find({}, { sort: { _id: 1 } }).toArray()).map(
|
|
||||||
- entry => {
|
|
||||||
- // blobs are pushed unordered into mongo. Sort the list for consistency.
|
|
||||||
- entry.blobs.sort()
|
|
||||||
- return entry
|
|
||||||
- }
|
|
||||||
- )
|
|
||||||
- ).to.deep.equal([
|
|
||||||
- {
|
|
||||||
- _id: projectId0,
|
|
||||||
- blobs: [
|
|
||||||
- binaryForGitBlobHash(gitBlobHash(fileId0)),
|
|
||||||
- binaryForGitBlobHash(hashFile7),
|
|
||||||
- binaryForGitBlobHash(hashTextBlob0),
|
|
||||||
- ].sort(),
|
|
||||||
- },
|
|
||||||
- {
|
|
||||||
- _id: projectId1,
|
|
||||||
- blobs: [
|
|
||||||
- binaryForGitBlobHash(gitBlobHash(fileId1)),
|
|
||||||
- binaryForGitBlobHash(hashTextBlob1),
|
|
||||||
- ].sort(),
|
|
||||||
- },
|
|
||||||
- {
|
|
||||||
- _id: projectId2,
|
|
||||||
- blobs: [binaryForGitBlobHash(hashTextBlob2)]
|
|
||||||
- .concat(
|
|
||||||
- processHashedFiles
|
|
||||||
- ? [binaryForGitBlobHash(gitBlobHash(fileId2))]
|
|
||||||
- : []
|
|
||||||
- )
|
|
||||||
- .sort(),
|
|
||||||
- },
|
|
||||||
- {
|
|
||||||
- _id: projectIdDeleted0,
|
|
||||||
- blobs: [binaryForGitBlobHash(gitBlobHash(fileId4))].sort(),
|
|
||||||
- },
|
|
||||||
- {
|
|
||||||
- _id: projectId3,
|
|
||||||
- blobs: [binaryForGitBlobHash(gitBlobHash(fileId3))].sort(),
|
|
||||||
- },
|
|
||||||
- ...(processHashedFiles
|
|
||||||
- ? [
|
|
||||||
- {
|
|
||||||
- _id: projectIdDeleted1,
|
|
||||||
- blobs: [binaryForGitBlobHash(gitBlobHash(fileId5))].sort(),
|
|
||||||
- },
|
|
||||||
- ]
|
|
||||||
- : []),
|
|
||||||
- {
|
|
||||||
- _id: projectIdBadFileTree0,
|
|
||||||
- blobs: [binaryForGitBlobHash(hashTextBlob3)].sort(),
|
|
||||||
- },
|
|
||||||
- {
|
|
||||||
- _id: projectIdBadFileTree3,
|
|
||||||
- blobs: [binaryForGitBlobHash(gitBlobHash(fileId9))].sort(),
|
|
||||||
- },
|
|
||||||
- ])
|
|
||||||
- })
|
|
||||||
- it('should have backed up all the files', async function () {
|
|
||||||
- expect(tieringStorageClass).to.exist
|
|
||||||
- const blobs = await listS3Bucket(projectBlobsBucket, tieringStorageClass)
|
|
||||||
- expect(blobs.sort()).to.deep.equal(
|
|
||||||
- Array.from(
|
|
||||||
- new Set(
|
|
||||||
- writtenBlobs
|
|
||||||
- .map(({ historyId, fileId, hash }) =>
|
|
||||||
- makeProjectKey(historyId, hash || gitBlobHash(fileId))
|
|
||||||
- )
|
|
||||||
- .sort()
|
|
||||||
- )
|
|
||||||
- )
|
|
||||||
- )
|
|
||||||
- for (let { historyId, fileId, hash, content } of writtenBlobs) {
|
|
||||||
- hash = hash || gitBlobHash(fileId.toString())
|
|
||||||
- const s = await backupPersistor.getObjectStream(
|
|
||||||
- projectBlobsBucket,
|
|
||||||
- makeProjectKey(historyId, hash),
|
|
||||||
- { autoGunzip: true }
|
|
||||||
- )
|
|
||||||
- const buf = new WritableBuffer()
|
|
||||||
- await Stream.promises.pipeline(s, buf)
|
|
||||||
- expect(gitBlobHashBuffer(buf.getContents())).to.equal(hash)
|
|
||||||
- if (content) {
|
|
||||||
- expect(buf.getContents()).to.deep.equal(content)
|
|
||||||
- } else {
|
|
||||||
- const id = buf.getContents().toString('utf-8')
|
|
||||||
- expect(id).to.equal(fileId.toString())
|
|
||||||
- // double check we are not comparing 'undefined' or '[object Object]' above
|
|
||||||
- expect(id).to.match(/^[a-f0-9]{24}$/)
|
|
||||||
- }
|
|
||||||
- }
|
|
||||||
- const deks = await listS3Bucket(deksBucket, 'STANDARD')
|
|
||||||
- expect(deks.sort()).to.deep.equal(
|
|
||||||
- Array.from(
|
|
||||||
- new Set(
|
|
||||||
- writtenBlobs.map(
|
|
||||||
- ({ historyId }) => projectKey.format(historyId) + '/dek'
|
|
||||||
- )
|
|
||||||
- )
|
|
||||||
- ).sort()
|
|
||||||
- )
|
|
||||||
})
|
|
||||||
it('should have written the back filled files to history v1', async function () {
|
|
||||||
for (const { historyId, hash, fileId, content } of writtenBlobs) {
|
|
||||||
@@ -991,14 +863,13 @@ describe('back_fill_file_hash script', function () {
|
|
||||||
// We still need to iterate over all the projects and blobs.
|
|
||||||
projects: 10,
|
|
||||||
blobs: 10,
|
|
||||||
- backedUpBlobs: 10,
|
|
||||||
+
|
|
||||||
badFileTrees: 4,
|
|
||||||
}
|
|
||||||
if (processHashedFiles) {
|
|
||||||
stats = sumStats(stats, {
|
|
||||||
...STATS_ALL_ZERO,
|
|
||||||
blobs: 2,
|
|
||||||
- backedUpBlobs: 2,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
expect(rerun.stats).deep.equal(stats)
|
|
||||||
@@ -1024,7 +895,6 @@ describe('back_fill_file_hash script', function () {
|
|
||||||
const STATS_ALL_ZERO = {
|
|
||||||
projects: 0,
|
|
||||||
blobs: 0,
|
|
||||||
- backedUpBlobs: 0,
|
|
||||||
filesWithHash: 0,
|
|
||||||
filesWithoutHash: 0,
|
|
||||||
filesDuplicated: 0,
|
|
||||||
@@ -1038,21 +908,14 @@ describe('back_fill_file_hash script', function () {
|
|
||||||
fileHardDeleted: 0,
|
|
||||||
badFileTrees: 0,
|
|
||||||
mongoUpdates: 0,
|
|
||||||
- deduplicatedWriteToAWSLocalCount: 0,
|
|
||||||
- deduplicatedWriteToAWSLocalEgress: 0,
|
|
||||||
- deduplicatedWriteToAWSRemoteCount: 0,
|
|
||||||
- deduplicatedWriteToAWSRemoteEgress: 0,
|
|
||||||
readFromGCSCount: 0,
|
|
||||||
readFromGCSIngress: 0,
|
|
||||||
- writeToAWSCount: 0,
|
|
||||||
- writeToAWSEgress: 0,
|
|
||||||
writeToGCSCount: 0,
|
|
||||||
writeToGCSEgress: 0,
|
|
||||||
}
|
|
||||||
const STATS_UP_TO_PROJECT1 = {
|
|
||||||
projects: 2,
|
|
||||||
blobs: 2,
|
|
||||||
- backedUpBlobs: 0,
|
|
||||||
filesWithHash: 0,
|
|
||||||
filesWithoutHash: 5,
|
|
||||||
filesDuplicated: 1,
|
|
||||||
@@ -1065,22 +928,15 @@ describe('back_fill_file_hash script', function () {
|
|
||||||
projectHardDeleted: 0,
|
|
||||||
fileHardDeleted: 0,
|
|
||||||
badFileTrees: 0,
|
|
||||||
- mongoUpdates: 4,
|
|
||||||
- deduplicatedWriteToAWSLocalCount: 0,
|
|
||||||
- deduplicatedWriteToAWSLocalEgress: 0,
|
|
||||||
- deduplicatedWriteToAWSRemoteCount: 0,
|
|
||||||
- deduplicatedWriteToAWSRemoteEgress: 0,
|
|
||||||
- readFromGCSCount: 6,
|
|
||||||
- readFromGCSIngress: 4000086,
|
|
||||||
- writeToAWSCount: 5,
|
|
||||||
- writeToAWSEgress: 4026,
|
|
||||||
+ mongoUpdates: 2, // 4-2 blobs written to backedUpBlobs collection
|
|
||||||
+ readFromGCSCount: 4,
|
|
||||||
+ readFromGCSIngress: 4000072,
|
|
||||||
writeToGCSCount: 3,
|
|
||||||
writeToGCSEgress: 4000048,
|
|
||||||
}
|
|
||||||
const STATS_UP_FROM_PROJECT1_ONWARD = {
|
|
||||||
projects: 8,
|
|
||||||
blobs: 2,
|
|
||||||
- backedUpBlobs: 0,
|
|
||||||
filesWithHash: 0,
|
|
||||||
filesWithoutHash: 4,
|
|
||||||
filesDuplicated: 0,
|
|
||||||
@@ -1093,26 +949,18 @@ describe('back_fill_file_hash script', function () {
|
|
||||||
projectHardDeleted: 0,
|
|
||||||
fileHardDeleted: 0,
|
|
||||||
badFileTrees: 4,
|
|
||||||
- mongoUpdates: 8,
|
|
||||||
- deduplicatedWriteToAWSLocalCount: 1,
|
|
||||||
- deduplicatedWriteToAWSLocalEgress: 30,
|
|
||||||
- deduplicatedWriteToAWSRemoteCount: 0,
|
|
||||||
- deduplicatedWriteToAWSRemoteEgress: 0,
|
|
||||||
- readFromGCSCount: 6,
|
|
||||||
- readFromGCSIngress: 110,
|
|
||||||
- writeToAWSCount: 5,
|
|
||||||
- writeToAWSEgress: 143,
|
|
||||||
+ mongoUpdates: 3, // previously 5 blobs written to backedUpBlobs collection
|
|
||||||
+ readFromGCSCount: 4,
|
|
||||||
+ readFromGCSIngress: 96,
|
|
||||||
writeToGCSCount: 3,
|
|
||||||
writeToGCSEgress: 72,
|
|
||||||
}
|
|
||||||
const STATS_FILES_HASHED_EXTRA = {
|
|
||||||
...STATS_ALL_ZERO,
|
|
||||||
filesWithHash: 2,
|
|
||||||
- mongoUpdates: 2,
|
|
||||||
+ mongoUpdates: 0, // previously 2 blobs written to backedUpBlobs collection
|
|
||||||
readFromGCSCount: 2,
|
|
||||||
readFromGCSIngress: 48,
|
|
||||||
- writeToAWSCount: 2,
|
|
||||||
- writeToAWSEgress: 60,
|
|
||||||
writeToGCSCount: 2,
|
|
||||||
writeToGCSEgress: 48,
|
|
||||||
}
|
|
||||||
@@ -1144,8 +992,6 @@ describe('back_fill_file_hash script', function () {
|
|
||||||
...STATS_ALL_ZERO,
|
|
||||||
filesFailed: 1,
|
|
||||||
readFromGCSIngress: -24,
|
|
||||||
- writeToAWSCount: -1,
|
|
||||||
- writeToAWSEgress: -28,
|
|
||||||
writeToGCSCount: -1,
|
|
||||||
writeToGCSEgress: -24,
|
|
||||||
})
|
|
||||||
@@ -1269,13 +1115,14 @@ describe('back_fill_file_hash script', function () {
|
|
||||||
before('run script with hashed files', async function () {
|
|
||||||
output2 = await runScript(['--processHashedFiles=true'], {})
|
|
||||||
})
|
|
||||||
- it('should print stats', function () {
|
|
||||||
+ it('should print stats for the first run without hashed files', function () {
|
|
||||||
expect(output1.stats).deep.equal(STATS_ALL)
|
|
||||||
+ })
|
|
||||||
+ it('should print stats for the hashed files run', function () {
|
|
||||||
expect(output2.stats).deep.equal({
|
|
||||||
...STATS_FILES_HASHED_EXTRA,
|
|
||||||
projects: 10,
|
|
||||||
blobs: 10,
|
|
||||||
- backedUpBlobs: 10,
|
|
||||||
badFileTrees: 4,
|
|
||||||
})
|
|
||||||
})
|
|
||||||
@@ -1322,9 +1169,7 @@ describe('back_fill_file_hash script', function () {
|
|
||||||
...STATS_FILES_HASHED_EXTRA,
|
|
||||||
readFromGCSCount: 3,
|
|
||||||
readFromGCSIngress: 72,
|
|
||||||
- deduplicatedWriteToAWSLocalCount: 1,
|
|
||||||
- deduplicatedWriteToAWSLocalEgress: 30,
|
|
||||||
- mongoUpdates: 1,
|
|
||||||
+ mongoUpdates: 0,
|
|
||||||
filesWithHash: 3,
|
|
||||||
})
|
|
||||||
)
|
|
||||||
@@ -1354,48 +1199,6 @@ describe('back_fill_file_hash script', function () {
|
|
||||||
expect(output.stats).deep.equal(
|
|
||||||
sumStats(STATS_ALL, {
|
|
||||||
...STATS_ALL_ZERO,
|
|
||||||
- // one remote deduplicate
|
|
||||||
- deduplicatedWriteToAWSRemoteCount: 1,
|
|
||||||
- deduplicatedWriteToAWSRemoteEgress: 28,
|
|
||||||
- writeToAWSEgress: -28, // subtract skipped egress
|
|
||||||
- })
|
|
||||||
- )
|
|
||||||
- })
|
|
||||||
- commonAssertions()
|
|
||||||
- })
|
|
||||||
-
|
|
||||||
- describe('with something in the bucket and marked as processed', function () {
|
|
||||||
- before('prepare environment', prepareEnvironment)
|
|
||||||
- before('create a file in s3', async function () {
|
|
||||||
- await backupPersistor.sendStream(
|
|
||||||
- projectBlobsBucket,
|
|
||||||
- makeProjectKey(historyId0, hashTextBlob0),
|
|
||||||
- Stream.Readable.from([contentTextBlob0]),
|
|
||||||
- { contentLength: contentTextBlob0.byteLength }
|
|
||||||
- )
|
|
||||||
- await backedUpBlobs.insertMany([
|
|
||||||
- {
|
|
||||||
- _id: projectId0,
|
|
||||||
- blobs: [binaryForGitBlobHash(hashTextBlob0)],
|
|
||||||
- },
|
|
||||||
- ])
|
|
||||||
- })
|
|
||||||
- let output
|
|
||||||
- before('run script', async function () {
|
|
||||||
- output = await runScript([], {
|
|
||||||
- CONCURRENCY: '1',
|
|
||||||
- })
|
|
||||||
- })
|
|
||||||
-
|
|
||||||
- it('should print stats', function () {
|
|
||||||
- expect(output.stats).deep.equal(
|
|
||||||
- sumStats(STATS_ALL, {
|
|
||||||
- ...STATS_ALL_ZERO,
|
|
||||||
- backedUpBlobs: 1,
|
|
||||||
- writeToAWSCount: -1,
|
|
||||||
- writeToAWSEgress: -27,
|
|
||||||
- readFromGCSCount: -1,
|
|
||||||
- readFromGCSIngress: -7,
|
|
||||||
})
|
|
||||||
)
|
|
||||||
})
|
|
||||||
@@ -1418,8 +1221,10 @@ describe('back_fill_file_hash script', function () {
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
- it('should print stats', function () {
|
|
||||||
+ it('should print stats for part 0', function () {
|
|
||||||
expect(outputPart0.stats).to.deep.equal(STATS_UP_TO_PROJECT1)
|
|
||||||
+ })
|
|
||||||
+ it('should print stats for part 1', function () {
|
|
||||||
expect(outputPart1.stats).to.deep.equal(STATS_UP_FROM_PROJECT1_ONWARD)
|
|
||||||
})
|
|
||||||
commonAssertions()
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
diff --git a/services/history-v1/storage/scripts/back_fill_file_hash.mjs b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
|
|
||||||
index 3be1c8a5407..c9ed13c6cb4 100644
|
|
||||||
--- a/services/history-v1/storage/scripts/back_fill_file_hash.mjs
|
|
||||||
+++ b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
|
|
||||||
@@ -388,12 +388,6 @@ async function processFileOnce(entry, filePath) {
|
|
||||||
fileId,
|
|
||||||
} = entry
|
|
||||||
const blobStore = new BlobStore(historyId)
|
|
||||||
- if (entry.blob) {
|
|
||||||
- const { blob } = entry
|
|
||||||
- const hash = blob.getHash()
|
|
||||||
- return hash
|
|
||||||
- }
|
|
||||||
-
|
|
||||||
STATS.readFromGCSCount++
|
|
||||||
// make a fetch request to filestore itself
|
|
||||||
const src = await fetchFromFilestore(projectId, fileId)
|
|
||||||
@@ -784,16 +778,6 @@ function* findFileInBatch(projects, prefix, blobs) {
|
|
||||||
const historyIdS = project.overleaf.history.id.toString()
|
|
||||||
const projectBlobs = blobs.get(historyIdS) || []
|
|
||||||
const ctx = new ProjectContext(project._id, historyIdS, projectBlobs)
|
|
||||||
- for (const blob of projectBlobs) {
|
|
||||||
- ctx.remainingQueueEntries++
|
|
||||||
- yield {
|
|
||||||
- ctx,
|
|
||||||
- cacheKey: blob.getHash(),
|
|
||||||
- path: MONGO_PATH_SKIP_WRITE_HASH_TO_FILE_TREE,
|
|
||||||
- blob,
|
|
||||||
- hash: blob.getHash(),
|
|
||||||
- }
|
|
||||||
- }
|
|
||||||
try {
|
|
||||||
yield* findFiles(ctx, project.rootFolder?.[0], prefix, true)
|
|
||||||
} catch (err) {
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
diff --git a/services/history-v1/storage/scripts/back_fill_file_hash.mjs b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
|
|
||||||
index c9ed13c6cb4..f24ce4a6605 100644
|
|
||||||
--- a/services/history-v1/storage/scripts/back_fill_file_hash.mjs
|
|
||||||
+++ b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
|
|
||||||
@@ -387,6 +387,13 @@ async function processFileOnce(entry, filePath) {
|
|
||||||
ctx: { projectId, historyId },
|
|
||||||
fileId,
|
|
||||||
} = entry
|
|
||||||
+ if (entry.hash && entry.ctx.hasCompletedBlob(entry.hash)) {
|
|
||||||
+ // We can enter this case for two identical files in the same project,
|
|
||||||
+ // one with hash, the other without. When the one without hash gets
|
|
||||||
+ // processed first, we can skip downloading the other one we already
|
|
||||||
+ // know the hash of.
|
|
||||||
+ return entry.hash
|
|
||||||
+ }
|
|
||||||
const blobStore = new BlobStore(historyId)
|
|
||||||
STATS.readFromGCSCount++
|
|
||||||
// make a fetch request to filestore itself
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
diff --git a/services/history-v1/storage/scripts/back_fill_file_hash.mjs b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
|
|
||||||
index f24ce4a6605..0ccadaf5a95 100644
|
|
||||||
--- a/services/history-v1/storage/scripts/back_fill_file_hash.mjs
|
|
||||||
+++ b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
|
|
||||||
@@ -559,8 +559,9 @@ async function processBatch(batch, prefix = 'rootFolder.0') {
|
|
||||||
blobs.clear()
|
|
||||||
|
|
||||||
// The files are currently ordered by project-id.
|
|
||||||
- // Order them by file-id ASC then blobs ASC to
|
|
||||||
- // - process files before blobs
|
|
||||||
+ // Order them by file-id ASC then hash ASC to
|
|
||||||
+ // increase the hit rate on the "already processed
|
|
||||||
+ // hash for project" checks.
|
|
||||||
files.sort(
|
|
||||||
/**
|
|
||||||
* @param {QueueEntry} a
|
|
||||||
|
|
|
@ -1,191 +0,0 @@
|
||||||
|
|
||||||
|
|
||||||
diff --git a/services/web/app.mjs b/services/web/app.mjs
|
|
||||||
index b7c723da3d77..3f54cc36a8c3 100644
|
|
||||||
--- a/services/web/app.mjs
|
|
||||||
+++ b/services/web/app.mjs
|
|
||||||
@@ -56,14 +56,8 @@ if (Settings.catchErrors) {
|
|
||||||
// Create ./data/dumpFolder if needed
|
|
||||||
FileWriter.ensureDumpFolderExists()
|
|
||||||
|
|
||||||
-if (
|
|
||||||
- !Features.hasFeature('project-history-blobs') &&
|
|
||||||
- !Features.hasFeature('filestore')
|
|
||||||
-) {
|
|
||||||
- throw new Error(
|
|
||||||
- 'invalid config: must enable either project-history-blobs (Settings.enableProjectHistoryBlobs=true) or enable filestore (Settings.disableFilestore=false)'
|
|
||||||
- )
|
|
||||||
-}
|
|
||||||
+// Validate combination of feature flags.
|
|
||||||
+Features.validateSettings()
|
|
||||||
|
|
||||||
// handle SIGTERM for graceful shutdown in kubernetes
|
|
||||||
process.on('SIGTERM', function (signal) {
|
|
||||||
diff --git a/services/web/app/src/Features/History/HistoryURLHelper.js b/services/web/app/src/Features/History/HistoryURLHelper.js
|
|
||||||
index 8b8d8cbdd730..acb43ced68e0 100644
|
|
||||||
--- a/services/web/app/src/Features/History/HistoryURLHelper.js
|
|
||||||
+++ b/services/web/app/src/Features/History/HistoryURLHelper.js
|
|
||||||
@@ -8,7 +8,7 @@ function projectHistoryURLWithFilestoreFallback(
|
|
||||||
) {
|
|
||||||
const filestoreURL = `${Settings.apis.filestore.url}/project/${projectId}/file/${fileRef._id}?from=${origin}`
|
|
||||||
// TODO: When this file is converted to ES modules we will be able to use Features.hasFeature('project-history-blobs'). Currently we can't stub the feature return value in tests.
|
|
||||||
- if (fileRef.hash && Settings.enableProjectHistoryBlobs) {
|
|
||||||
+ if (fileRef.hash && Settings.filestoreMigrationLevel >= 1) {
|
|
||||||
return {
|
|
||||||
url: `${Settings.apis.project_history.url}/project/${historyId}/blob/${fileRef.hash}`,
|
|
||||||
fallbackURL: filestoreURL,
|
|
||||||
diff --git a/services/web/app/src/infrastructure/Features.js b/services/web/app/src/infrastructure/Features.js
|
|
||||||
index aaf51103b9b8..89c8e6b841d0 100644
|
|
||||||
--- a/services/web/app/src/infrastructure/Features.js
|
|
||||||
+++ b/services/web/app/src/infrastructure/Features.js
|
|
||||||
@@ -19,8 +19,7 @@ const trackChangesModuleAvailable =
|
|
||||||
* @property {boolean | undefined} enableGithubSync
|
|
||||||
* @property {boolean | undefined} enableGitBridge
|
|
||||||
* @property {boolean | undefined} enableHomepage
|
|
||||||
- * @property {boolean | undefined} enableProjectHistoryBlobs
|
|
||||||
- * @property {boolean | undefined} disableFilestore
|
|
||||||
+ * @property {number} filestoreMigrationLevel
|
|
||||||
* @property {boolean | undefined} enableSaml
|
|
||||||
* @property {boolean | undefined} ldap
|
|
||||||
* @property {boolean | undefined} oauth
|
|
||||||
@@ -29,7 +28,39 @@ const trackChangesModuleAvailable =
|
|
||||||
* @property {boolean | undefined} saml
|
|
||||||
*/
|
|
||||||
|
|
||||||
+/**
|
|
||||||
+ * @return {{'project-history-blobs': boolean, filestore: boolean}}
|
|
||||||
+ */
|
|
||||||
+function getFilestoreMigrationOptions() {
|
|
||||||
+ switch (Settings.filestoreMigrationLevel) {
|
|
||||||
+ case 0:
|
|
||||||
+ return {
|
|
||||||
+ 'project-history-blobs': false,
|
|
||||||
+ filestore: true,
|
|
||||||
+ }
|
|
||||||
+ case 1:
|
|
||||||
+ return {
|
|
||||||
+ 'project-history-blobs': true,
|
|
||||||
+ filestore: true,
|
|
||||||
+ }
|
|
||||||
+
|
|
||||||
+ case 2:
|
|
||||||
+ return {
|
|
||||||
+ 'project-history-blobs': true,
|
|
||||||
+ filestore: false,
|
|
||||||
+ }
|
|
||||||
+ default:
|
|
||||||
+ throw new Error(
|
|
||||||
+ `invalid OVERLEAF_FILESTORE_MIGRATION_LEVEL=${Settings.filestoreMigrationLevel}, expected 0, 1 or 2`
|
|
||||||
+ )
|
|
||||||
+ }
|
|
||||||
+}
|
|
||||||
+
|
|
||||||
const Features = {
|
|
||||||
+ validateSettings() {
|
|
||||||
+ getFilestoreMigrationOptions() // throws for invalid settings
|
|
||||||
+ },
|
|
||||||
+
|
|
||||||
/**
|
|
||||||
* @returns {boolean}
|
|
||||||
*/
|
|
||||||
@@ -89,9 +120,9 @@ const Features = {
|
|
||||||
Settings.enabledLinkedFileTypes.includes('url')
|
|
||||||
)
|
|
||||||
case 'project-history-blobs':
|
|
||||||
- return Boolean(Settings.enableProjectHistoryBlobs)
|
|
||||||
+ return getFilestoreMigrationOptions()['project-history-blobs']
|
|
||||||
case 'filestore':
|
|
||||||
- return Boolean(Settings.disableFilestore) === false
|
|
||||||
+ return getFilestoreMigrationOptions().filestore
|
|
||||||
case 'support':
|
|
||||||
return supportModuleAvailable
|
|
||||||
case 'symbol-palette':
|
|
||||||
diff --git a/services/web/config/settings.defaults.js b/services/web/config/settings.defaults.js
|
|
||||||
index bd0730d5d00c..4df63ebd7c6c 100644
|
|
||||||
--- a/services/web/config/settings.defaults.js
|
|
||||||
+++ b/services/web/config/settings.defaults.js
|
|
||||||
@@ -440,6 +440,9 @@ module.exports = {
|
|
||||||
','
|
|
||||||
),
|
|
||||||
|
|
||||||
+ filestoreMigrationLevel:
|
|
||||||
+ parseInt(process.env.OVERLEAF_FILESTORE_MIGRATION_LEVEL, 10) || 0,
|
|
||||||
+
|
|
||||||
// i18n
|
|
||||||
// ------
|
|
||||||
//
|
|
||||||
|
|
||||||
diff --git a/services/history-v1/storage/scripts/back_fill_file_hash.mjs b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
|
|
||||||
index 0ccadaf5a955..2e12328e5c49 100644
|
|
||||||
--- a/services/history-v1/storage/scripts/back_fill_file_hash.mjs
|
|
||||||
+++ b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
|
|
||||||
@@ -150,10 +150,6 @@ const CONCURRENT_BATCHES = parseInt(process.env.CONCURRENT_BATCHES || '2', 10)
|
|
||||||
const RETRIES = parseInt(process.env.RETRIES || '10', 10)
|
|
||||||
const RETRY_DELAY_MS = parseInt(process.env.RETRY_DELAY_MS || '100', 10)
|
|
||||||
|
|
||||||
-const USER_FILES_BUCKET_NAME = process.env.USER_FILES_BUCKET_NAME || ''
|
|
||||||
-if (!USER_FILES_BUCKET_NAME) {
|
|
||||||
- throw new Error('env var USER_FILES_BUCKET_NAME is missing')
|
|
||||||
-}
|
|
||||||
const RETRY_FILESTORE_404 = process.env.RETRY_FILESTORE_404 === 'true'
|
|
||||||
const BUFFER_DIR = fs.mkdtempSync(
|
|
||||||
process.env.BUFFER_DIR_PREFIX || '/tmp/back_fill_file_hash-'
|
|
||||||
|
|
||||||
diff --git a/services/web/app/src/infrastructure/Features.js b/services/web/app/src/infrastructure/Features.js
|
|
||||||
index 89c8e6b841d0..6147e70e0faf 100644
|
|
||||||
--- a/services/web/app/src/infrastructure/Features.js
|
|
||||||
+++ b/services/web/app/src/infrastructure/Features.js
|
|
||||||
@@ -28,37 +28,13 @@ const trackChangesModuleAvailable =
|
|
||||||
* @property {boolean | undefined} saml
|
|
||||||
*/
|
|
||||||
|
|
||||||
-/**
|
|
||||||
- * @return {{'project-history-blobs': boolean, filestore: boolean}}
|
|
||||||
- */
|
|
||||||
-function getFilestoreMigrationOptions() {
|
|
||||||
- switch (Settings.filestoreMigrationLevel) {
|
|
||||||
- case 0:
|
|
||||||
- return {
|
|
||||||
- 'project-history-blobs': false,
|
|
||||||
- filestore: true,
|
|
||||||
- }
|
|
||||||
- case 1:
|
|
||||||
- return {
|
|
||||||
- 'project-history-blobs': true,
|
|
||||||
- filestore: true,
|
|
||||||
- }
|
|
||||||
-
|
|
||||||
- case 2:
|
|
||||||
- return {
|
|
||||||
- 'project-history-blobs': true,
|
|
||||||
- filestore: false,
|
|
||||||
- }
|
|
||||||
- default:
|
|
||||||
+const Features = {
|
|
||||||
+ validateSettings() {
|
|
||||||
+ if (![0, 1, 2].includes(Settings.filestoreMigrationLevel)) {
|
|
||||||
throw new Error(
|
|
||||||
`invalid OVERLEAF_FILESTORE_MIGRATION_LEVEL=${Settings.filestoreMigrationLevel}, expected 0, 1 or 2`
|
|
||||||
)
|
|
||||||
- }
|
|
||||||
-}
|
|
||||||
-
|
|
||||||
-const Features = {
|
|
||||||
- validateSettings() {
|
|
||||||
- getFilestoreMigrationOptions() // throws for invalid settings
|
|
||||||
+ }
|
|
||||||
},
|
|
||||||
|
|
||||||
/**
|
|
||||||
@@ -120,9 +96,9 @@ const Features = {
|
|
||||||
Settings.enabledLinkedFileTypes.includes('url')
|
|
||||||
)
|
|
||||||
case 'project-history-blobs':
|
|
||||||
- return getFilestoreMigrationOptions()['project-history-blobs']
|
|
||||||
+ return Settings.filestoreMigrationLevel > 0
|
|
||||||
case 'filestore':
|
|
||||||
- return getFilestoreMigrationOptions().filestore
|
|
||||||
+ return Settings.filestoreMigrationLevel < 2
|
|
||||||
case 'support':
|
|
||||||
return supportModuleAvailable
|
|
||||||
case 'symbol-palette':
|
|
|
@ -1,84 +0,0 @@
|
||||||
diff --git a/cron/deactivate-projects.sh b/cron/deactivate-projects.sh
|
|
||||||
index fab0fbfbf667..a391f99a5bd8 100755
|
|
||||||
--- a/cron/deactivate-projects.sh
|
|
||||||
+++ b/cron/deactivate-projects.sh
|
|
||||||
@@ -1,6 +1,6 @@
|
|
||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
-set -eux
|
|
||||||
+set -eu
|
|
||||||
|
|
||||||
echo "-------------------------"
|
|
||||||
echo "Deactivating old projects"
|
|
||||||
diff --git a/cron/delete-projects.sh b/cron/delete-projects.sh
|
|
||||||
index e1ea5ac5e621..7cd45771716a 100755
|
|
||||||
--- a/cron/delete-projects.sh
|
|
||||||
+++ b/cron/delete-projects.sh
|
|
||||||
@@ -1,6 +1,6 @@
|
|
||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
-set -eux
|
|
||||||
+set -eu
|
|
||||||
|
|
||||||
echo "-------------------------"
|
|
||||||
echo "Expiring deleted projects"
|
|
||||||
diff --git a/cron/delete-users.sh b/cron/delete-users.sh
|
|
||||||
index fe97bffeea0b..30872ac55657 100755
|
|
||||||
--- a/cron/delete-users.sh
|
|
||||||
+++ b/cron/delete-users.sh
|
|
||||||
@@ -1,6 +1,6 @@
|
|
||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
-set -eux
|
|
||||||
+set -eu
|
|
||||||
|
|
||||||
echo "----------------------"
|
|
||||||
echo "Expiring deleted users"
|
|
||||||
diff --git a/cron/project-history-flush-all.sh b/cron/project-history-flush-all.sh
|
|
||||||
index d8bbb184aa37..8fe9eea5fc55 100755
|
|
||||||
--- a/cron/project-history-flush-all.sh
|
|
||||||
+++ b/cron/project-history-flush-all.sh
|
|
||||||
@@ -1,6 +1,6 @@
|
|
||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
-set -eux
|
|
||||||
+set -eu
|
|
||||||
|
|
||||||
echo "---------------------------------"
|
|
||||||
echo "Flush all project-history changes"
|
|
||||||
diff --git a/cron/project-history-periodic-flush.sh b/cron/project-history-periodic-flush.sh
|
|
||||||
index 76feae410e26..1b8efff6cc7c 100755
|
|
||||||
--- a/cron/project-history-periodic-flush.sh
|
|
||||||
+++ b/cron/project-history-periodic-flush.sh
|
|
||||||
@@ -1,6 +1,6 @@
|
|
||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
-set -eux
|
|
||||||
+set -eu
|
|
||||||
|
|
||||||
echo "--------------------------"
|
|
||||||
echo "Flush project-history queue"
|
|
||||||
diff --git a/cron/project-history-retry-hard.sh b/cron/project-history-retry-hard.sh
|
|
||||||
index 651a6615f22d..df9b4703a58e 100755
|
|
||||||
--- a/cron/project-history-retry-hard.sh
|
|
||||||
+++ b/cron/project-history-retry-hard.sh
|
|
||||||
@@ -1,6 +1,6 @@
|
|
||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
-set -eux
|
|
||||||
+set -eu
|
|
||||||
|
|
||||||
echo "-----------------------------------"
|
|
||||||
echo "Retry project-history errors (hard)"
|
|
||||||
diff --git a/cron/project-history-retry-soft.sh b/cron/project-history-retry-soft.sh
|
|
||||||
index 70c597021b28..cbb6e714cae7 100755
|
|
||||||
--- a/cron/project-history-retry-soft.sh
|
|
||||||
+++ b/cron/project-history-retry-soft.sh
|
|
||||||
@@ -1,6 +1,6 @@
|
|
||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
-set -eux
|
|
||||||
+set -eu
|
|
||||||
|
|
||||||
echo "-----------------------------------"
|
|
||||||
echo "Retry project-history errors (soft)"
|
|
|
@ -1,76 +0,0 @@
|
||||||
|
|
||||||
|
|
||||||
diff --git a/package-lock.json b/package-lock.json
|
|
||||||
index 2b3a5868a20..d9d8285618d 100644
|
|
||||||
--- a/package-lock.json
|
|
||||||
+++ b/package-lock.json
|
|
||||||
@@ -35581,6 +35581,7 @@
|
|
||||||
"resolved": "https://registry.npmjs.org/request/-/request-2.88.2.tgz",
|
|
||||||
"integrity": "sha512-MsvtOrfG9ZcrOwAW+Qi+F6HbD0CWXEh9ou77uOb7FM2WPhwT7smM833PzanhJLsgXjN89Ir6V2PczXNnMpwKhw==",
|
|
||||||
"deprecated": "request has been deprecated, see https://github.com/request/request/issues/3142",
|
|
||||||
+ "license": "Apache-2.0",
|
|
||||||
"dependencies": {
|
|
||||||
"aws-sign2": "~0.7.0",
|
|
||||||
"aws4": "^1.8.0",
|
|
||||||
@@ -35638,15 +35639,15 @@
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/request/node_modules/tough-cookie": {
|
|
||||||
- "version": "2.5.0",
|
|
||||||
- "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.5.0.tgz",
|
|
||||||
- "integrity": "sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==",
|
|
||||||
+ "version": "5.1.2",
|
|
||||||
+ "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-5.1.2.tgz",
|
|
||||||
+ "integrity": "sha512-FVDYdxtnj0G6Qm/DhNPSb8Ju59ULcup3tuJxkFb5K8Bv2pUXILbf0xZWU8PX8Ov19OXljbUyveOFwRMwkXzO+A==",
|
|
||||||
+ "license": "BSD-3-Clause",
|
|
||||||
"dependencies": {
|
|
||||||
- "psl": "^1.1.28",
|
|
||||||
- "punycode": "^2.1.1"
|
|
||||||
+ "tldts": "^6.1.32"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
- "node": ">=0.8"
|
|
||||||
+ "node": ">=16"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/requestretry": {
|
|
||||||
@@ -39612,6 +39613,24 @@
|
|
||||||
"tlds": "bin.js"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
+ "node_modules/tldts": {
|
|
||||||
+ "version": "6.1.86",
|
|
||||||
+ "resolved": "https://registry.npmjs.org/tldts/-/tldts-6.1.86.tgz",
|
|
||||||
+ "integrity": "sha512-WMi/OQ2axVTf/ykqCQgXiIct+mSQDFdH2fkwhPwgEwvJ1kSzZRiinb0zF2Xb8u4+OqPChmyI6MEu4EezNJz+FQ==",
|
|
||||||
+ "license": "MIT",
|
|
||||||
+ "dependencies": {
|
|
||||||
+ "tldts-core": "^6.1.86"
|
|
||||||
+ },
|
|
||||||
+ "bin": {
|
|
||||||
+ "tldts": "bin/cli.js"
|
|
||||||
+ }
|
|
||||||
+ },
|
|
||||||
+ "node_modules/tldts-core": {
|
|
||||||
+ "version": "6.1.86",
|
|
||||||
+ "resolved": "https://registry.npmjs.org/tldts-core/-/tldts-core-6.1.86.tgz",
|
|
||||||
+ "integrity": "sha512-Je6p7pkk+KMzMv2XXKmAE3McmolOQFdxkKw0R8EYNr7sELW46JqnNeTX8ybPiQgvg1ymCoF8LXs5fzFaZvJPTA==",
|
|
||||||
+ "license": "MIT"
|
|
||||||
+ },
|
|
||||||
"node_modules/tmp": {
|
|
||||||
"version": "0.2.3",
|
|
||||||
"resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.3.tgz",
|
|
||||||
diff --git a/package.json b/package.json
|
|
||||||
index 388b750c3d2..44fffc4664a 100644
|
|
||||||
--- a/package.json
|
|
||||||
+++ b/package.json
|
|
||||||
@@ -33,6 +33,9 @@
|
|
||||||
"multer": "2.0.1",
|
|
||||||
"path-to-regexp": "3.3.0",
|
|
||||||
"qs": "6.13.0"
|
|
||||||
+ },
|
|
||||||
+ "request@2.88.2": {
|
|
||||||
+ "tough-cookie": "5.1.2"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"scripts": {
|
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,82 +0,0 @@
|
||||||
|
|
||||||
|
|
||||||
diff --git a/services/web/frontend/js/features/review-panel-new/components/review-tooltip-menu.tsx b/services/web/frontend/js/features/review-panel-new/components/review-tooltip-menu.tsx
|
|
||||||
index f26542ebe909..fb6b68460bdc 100644
|
|
||||||
--- a/services/web/frontend/js/features/review-panel-new/components/review-tooltip-menu.tsx
|
|
||||||
+++ b/services/web/frontend/js/features/review-panel-new/components/review-tooltip-menu.tsx
|
|
||||||
@@ -18,7 +18,6 @@ import {
|
|
||||||
reviewTooltipStateField,
|
|
||||||
} from '@/features/source-editor/extensions/review-tooltip'
|
|
||||||
import { EditorView, getTooltip } from '@codemirror/view'
|
|
||||||
-import useViewerPermissions from '@/shared/hooks/use-viewer-permissions'
|
|
||||||
import usePreviousValue from '@/shared/hooks/use-previous-value'
|
|
||||||
import { useLayoutContext } from '@/shared/context/layout-context'
|
|
||||||
import { useReviewPanelViewActionsContext } from '../context/review-panel-view-context'
|
|
||||||
@@ -35,6 +34,7 @@ import { useEditorPropertiesContext } from '@/features/ide-react/context/editor-
|
|
||||||
import classNames from 'classnames'
|
|
||||||
import useEventListener from '@/shared/hooks/use-event-listener'
|
|
||||||
import useReviewPanelLayout from '../hooks/use-review-panel-layout'
|
|
||||||
+import { usePermissionsContext } from '@/features/ide-react/context/permissions-context'
|
|
||||||
|
|
||||||
const EDIT_MODE_SWITCH_WIDGET_HEIGHT = 40
|
|
||||||
const CM_LINE_RIGHT_PADDING = 8
|
|
||||||
@@ -43,7 +43,7 @@ const TOOLTIP_SHOW_DELAY = 120
|
|
||||||
const ReviewTooltipMenu: FC = () => {
|
|
||||||
const state = useCodeMirrorStateContext()
|
|
||||||
const view = useCodeMirrorViewContext()
|
|
||||||
- const isViewer = useViewerPermissions()
|
|
||||||
+ const permissions = usePermissionsContext()
|
|
||||||
const [show, setShow] = useState(true)
|
|
||||||
const { setView } = useReviewPanelViewActionsContext()
|
|
||||||
const { openReviewPanel } = useReviewPanelLayout()
|
|
||||||
@@ -58,7 +58,7 @@ const ReviewTooltipMenu: FC = () => {
|
|
||||||
|
|
||||||
const addComment = useCallback(() => {
|
|
||||||
const { main } = view.state.selection
|
|
||||||
- if (main.empty) {
|
|
||||||
+ if (main.empty || !permissions.comment) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
@@ -74,11 +74,11 @@ const ReviewTooltipMenu: FC = () => {
|
|
||||||
|
|
||||||
view.dispatch({ effects })
|
|
||||||
setShow(false)
|
|
||||||
- }, [openReviewPanel, setView, setShow, view])
|
|
||||||
+ }, [view, permissions.comment, openReviewPanel, setView])
|
|
||||||
|
|
||||||
useEventListener('add-new-review-comment', addComment)
|
|
||||||
|
|
||||||
- if (isViewer || !show || !tooltipState) {
|
|
||||||
+ if (!permissions.comment || !show || !tooltipState) {
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
|
|
||||||
diff --git a/services/web/frontend/js/features/source-editor/components/toolbar/toolbar-items.tsx b/services/web/frontend/js/features/source-editor/components/toolbar/toolbar-items.tsx
|
|
||||||
index 3404976d4462..1811ccc99950 100644
|
|
||||||
--- a/services/web/frontend/js/features/source-editor/components/toolbar/toolbar-items.tsx
|
|
||||||
+++ b/services/web/frontend/js/features/source-editor/components/toolbar/toolbar-items.tsx
|
|
||||||
@@ -16,5 +16,6 @@ import { isSplitTestEnabled } from '@/utils/splitTestUtils'
|
|
||||||
import { isMac } from '@/shared/utils/os'
|
|
||||||
import { useProjectContext } from '@/shared/context/project-context'
|
|
||||||
+import { usePermissionsContext } from '@/features/ide-react/context/permissions-context'
|
|
||||||
|
|
||||||
export const ToolbarItems: FC<{
|
|
||||||
state: EditorState
|
|
||||||
@@ -35,6 +36,7 @@ export const ToolbarItems: FC<{
|
|
||||||
useEditorPropertiesContext()
|
|
||||||
const { writefullInstance } = useEditorContext()
|
|
||||||
const { features } = useProjectContext()
|
|
||||||
+ const permissions = usePermissionsContext()
|
|
||||||
const isActive = withinFormattingCommand(state)
|
|
||||||
|
|
||||||
const symbolPaletteAvailable = getMeta('ol-symbolPaletteAvailable')
|
|
||||||
@@ -131,7 +133,7 @@ export const ToolbarItems: FC<{
|
|
||||||
command={commands.wrapInHref}
|
|
||||||
icon="add_link"
|
|
||||||
/>
|
|
||||||
- {features.trackChangesVisible && (
|
|
||||||
+ {features.trackChangesVisible && permissions.comment && (
|
|
||||||
<ToolbarButton
|
|
||||||
id="toolbar-add-comment"
|
|
||||||
label={t('add_comment')}
|
|
|
@ -1,673 +0,0 @@
|
||||||
|
|
||||||
|
|
||||||
diff --git a/services/history-v1/storage/scripts/back_fill_file_hash.mjs b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
|
|
||||||
index c0fdda35d8f..09212d426e3 100644
|
|
||||||
--- a/services/history-v1/storage/scripts/back_fill_file_hash.mjs
|
|
||||||
+++ b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
|
|
||||||
@@ -83,7 +83,7 @@ ObjectId.cacheHexString = true
|
|
||||||
*/
|
|
||||||
function parseArgs() {
|
|
||||||
const PUBLIC_LAUNCH_DATE = new Date('2012-01-01T00:00:00Z')
|
|
||||||
- const DEFAULT_OUTPUT_FILE = `file-migration-${new Date()
|
|
||||||
+ const DEFAULT_OUTPUT_FILE = `/var/log/overleaf/file-migration-${new Date()
|
|
||||||
.toISOString()
|
|
||||||
.replace(/[:.]/g, '_')}.log`
|
|
||||||
|
|
||||||
@@ -208,7 +208,7 @@ is equivalent to
|
|
||||||
PROCESS_HASHED_FILES: !args['skip-hashed-files'],
|
|
||||||
PROCESS_BLOBS: !args['skip-existing-blobs'],
|
|
||||||
DRY_RUN: args['dry-run'],
|
|
||||||
- OUTPUT_FILE: args.output,
|
|
||||||
+ OUTPUT_FILE: args.report ? '-' : args.output,
|
|
||||||
BATCH_RANGE_START,
|
|
||||||
BATCH_RANGE_END,
|
|
||||||
LOGGING_IDENTIFIER: args['logging-id'] || BATCH_RANGE_START,
|
|
||||||
@@ -256,6 +256,9 @@ const LOGGING_INTERVAL = parseInt(process.env.LOGGING_INTERVAL || '60000', 10)
|
|
||||||
const SLEEP_BEFORE_EXIT = parseInt(process.env.SLEEP_BEFORE_EXIT || '1000', 10)
|
|
||||||
|
|
||||||
// Log output to a file
|
|
||||||
+if (OUTPUT_FILE !== '-') {
|
|
||||||
+ console.warn(`Writing logs into ${OUTPUT_FILE}`)
|
|
||||||
+}
|
|
||||||
logger.initialize('file-migration', {
|
|
||||||
streams: [
|
|
||||||
{
|
|
||||||
diff --git a/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs b/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs
|
|
||||||
index f6f4a6fb76d..c661ae9bc3f 100644
|
|
||||||
--- a/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs
|
|
||||||
+++ b/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs
|
|
||||||
@@ -501,6 +501,7 @@ describe('back_fill_file_hash script', function () {
|
|
||||||
timeout: TIMEOUT - 500,
|
|
||||||
env: {
|
|
||||||
...process.env,
|
|
||||||
+ AWS_SDK_JS_SUPPRESS_MAINTENANCE_MODE_MESSAGE: '1',
|
|
||||||
USER_FILES_BUCKET_NAME,
|
|
||||||
SLEEP_BEFORE_EXIT: '1',
|
|
||||||
...env,
|
|
||||||
@@ -516,6 +517,7 @@ describe('back_fill_file_hash script', function () {
|
|
||||||
}
|
|
||||||
result = { stdout, stderr, status: code }
|
|
||||||
}
|
|
||||||
+ // Ensure no tmp folder is left behind.
|
|
||||||
expect((await fs.promises.readdir('/tmp')).join(';')).to.not.match(
|
|
||||||
/back_fill_file_hash/
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
diff --git a/services/history-v1/storage/scripts/back_fill_file_hash.mjs b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
|
|
||||||
index 09212d426e3..de4fca51db4 100644
|
|
||||||
--- a/services/history-v1/storage/scripts/back_fill_file_hash.mjs
|
|
||||||
+++ b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
|
|
||||||
@@ -1373,7 +1373,18 @@ async function main() {
|
|
||||||
console.warn('Done.')
|
|
||||||
}
|
|
||||||
|
|
||||||
+async function cleanupBufferDir() {
|
|
||||||
+ try {
|
|
||||||
+ // Perform non-recursive removal of the BUFFER_DIR. Individual files
|
|
||||||
+ // should get removed in parallel as part of batch processing.
|
|
||||||
+ await fs.promises.rmdir(BUFFER_DIR)
|
|
||||||
+ } catch (err) {
|
|
||||||
+ console.error(`cleanup of BUFFER_DIR=${BUFFER_DIR} failed`, err)
|
|
||||||
+ }
|
|
||||||
+}
|
|
||||||
+
|
|
||||||
if (DISPLAY_REPORT) {
|
|
||||||
+ await cleanupBufferDir()
|
|
||||||
console.warn('Displaying report...')
|
|
||||||
await displayReport()
|
|
||||||
process.exit(0)
|
|
||||||
@@ -1384,13 +1395,7 @@ try {
|
|
||||||
await main()
|
|
||||||
} finally {
|
|
||||||
printStats(true)
|
|
||||||
- try {
|
|
||||||
- // Perform non-recursive removal of the BUFFER_DIR. Individual files
|
|
||||||
- // should get removed in parallel as part of batch processing.
|
|
||||||
- await fs.promises.rmdir(BUFFER_DIR)
|
|
||||||
- } catch (err) {
|
|
||||||
- console.error(`cleanup of BUFFER_DIR=${BUFFER_DIR} failed`, err)
|
|
||||||
- }
|
|
||||||
+ await cleanupBufferDir()
|
|
||||||
}
|
|
||||||
|
|
||||||
let code = 0
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
diff --git a/services/history-v1/storage/scripts/back_fill_file_hash.mjs b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
|
|
||||||
index de4fca51db4..e9a7721944c 100644
|
|
||||||
--- a/services/history-v1/storage/scripts/back_fill_file_hash.mjs
|
|
||||||
+++ b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
|
|
||||||
@@ -316,6 +316,7 @@ async function getStatsForCollection(
|
|
||||||
projectsWithAllHashes: 0,
|
|
||||||
fileCount: 0,
|
|
||||||
fileWithHashCount: 0,
|
|
||||||
+ fileMissingInHistoryCount: 0,
|
|
||||||
}
|
|
||||||
// Pick a random sample of projects and estimate the number of files without hashes
|
|
||||||
const result = await collection
|
|
||||||
@@ -342,25 +343,43 @@ async function getStatsForCollection(
|
|
||||||
const filesWithoutHash = fileTree.match(/\{"_id":"[0-9a-f]{24}"\}/g) || []
|
|
||||||
// count the number of files with a hash, these are uniquely identified
|
|
||||||
// by the number of "hash" strings due to the filtering
|
|
||||||
- const filesWithHash = fileTree.match(/"hash"/g) || []
|
|
||||||
+ const filesWithHash = fileTree.match(/"hash":"[0-9a-f]{40}"/g) || []
|
|
||||||
stats.fileCount += filesWithoutHash.length + filesWithHash.length
|
|
||||||
stats.fileWithHashCount += filesWithHash.length
|
|
||||||
stats.projectCount++
|
|
||||||
stats.projectsWithAllHashes += filesWithoutHash.length === 0 ? 1 : 0
|
|
||||||
+ const projectId = project._id.toString()
|
|
||||||
+ const { blobs: perProjectBlobs } = await getProjectBlobsBatch([projectId])
|
|
||||||
+ const blobs = new Set(
|
|
||||||
+ (perProjectBlobs.get(projectId) || []).map(b => b.getHash())
|
|
||||||
+ )
|
|
||||||
+ const uniqueHashes = new Set(filesWithHash.map(m => m.slice(8, 48)))
|
|
||||||
+ for (const hash of uniqueHashes) {
|
|
||||||
+ if (blobs.has(hash) || GLOBAL_BLOBS.has(hash)) continue
|
|
||||||
+ stats.fileMissingInHistoryCount++
|
|
||||||
+ }
|
|
||||||
}
|
|
||||||
console.log(`Sampled stats for ${name}:`)
|
|
||||||
const fractionSampled = stats.projectCount / collectionCount
|
|
||||||
- const percentageSampled = (fractionSampled * 100).toFixed(1)
|
|
||||||
+ const percentageSampled = (fractionSampled * 100).toFixed(0)
|
|
||||||
const fractionConverted = stats.projectsWithAllHashes / stats.projectCount
|
|
||||||
- const percentageConverted = (fractionConverted * 100).toFixed(1)
|
|
||||||
+ const percentageConverted = (fractionConverted * 100).toFixed(0)
|
|
||||||
+ const fractionMissing = stats.fileMissingInHistoryCount / stats.fileCount
|
|
||||||
+ const percentageMissing = (fractionMissing * 100).toFixed(0)
|
|
||||||
console.log(
|
|
||||||
- `- Sampled ${name}: ${stats.projectCount} (${percentageSampled}%)`
|
|
||||||
+ `- Sampled ${name}: ${stats.projectCount} (${percentageSampled}% of all ${name})`
|
|
||||||
)
|
|
||||||
console.log(
|
|
||||||
`- Sampled ${name} with all hashes present: ${stats.projectsWithAllHashes}`
|
|
||||||
)
|
|
||||||
console.log(
|
|
||||||
- `- Percentage of ${name} converted: ${percentageConverted}% (estimated)`
|
|
||||||
+ `- Percentage of ${name} that need back-filling hashes: ${percentageConverted}% (estimated)`
|
|
||||||
+ )
|
|
||||||
+ console.log(
|
|
||||||
+ `- Sampled ${name} have ${stats.fileCount} files that need to be checked against the full project history system.`
|
|
||||||
+ )
|
|
||||||
+ console.log(
|
|
||||||
+ `- Sampled ${name} have ${stats.fileMissingInHistoryCount} files that need to be uploaded to the full project history system (estimating ${percentageMissing}% of all files).`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
@@ -369,13 +388,15 @@ async function getStatsForCollection(
|
|
||||||
* including counts and estimated progress based on a sample.
|
|
||||||
*/
|
|
||||||
async function displayReport() {
|
|
||||||
- const projectsCountResult = await projectsCollection.countDocuments()
|
|
||||||
+ const projectsCountResult = await projectsCollection.estimatedDocumentCount()
|
|
||||||
const deletedProjectsCountResult =
|
|
||||||
- await deletedProjectsCollection.countDocuments()
|
|
||||||
+ await deletedProjectsCollection.estimatedDocumentCount()
|
|
||||||
const sampleSize = 1000
|
|
||||||
console.log('Current status:')
|
|
||||||
- console.log(`- Projects: ${projectsCountResult}`)
|
|
||||||
- console.log(`- Deleted projects: ${deletedProjectsCountResult}`)
|
|
||||||
+ console.log(`- Total number of projects: ${projectsCountResult}`)
|
|
||||||
+ console.log(
|
|
||||||
+ `- Total number of deleted projects: ${deletedProjectsCountResult}`
|
|
||||||
+ )
|
|
||||||
console.log(`Sampling ${sampleSize} projects to estimate progress...`)
|
|
||||||
await getStatsForCollection(
|
|
||||||
sampleSize,
|
|
||||||
diff --git a/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs b/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs
|
|
||||||
index c661ae9bc3f..7248e74cb3f 100644
|
|
||||||
--- a/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs
|
|
||||||
+++ b/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs
|
|
||||||
@@ -481,21 +481,14 @@ describe('back_fill_file_hash script', function () {
|
|
||||||
/**
|
|
||||||
* @param {Array<string>} args
|
|
||||||
* @param {Record<string, string>} env
|
|
||||||
- * @param {boolean} shouldHaveWritten
|
|
||||||
- * @return {Promise<{result, stats: any}>}
|
|
||||||
+ * @return {Promise<{result: { stdout: string, stderr: string, status: number }, stats: any}>}
|
|
||||||
*/
|
|
||||||
- async function tryRunScript(args = [], env = {}, shouldHaveWritten) {
|
|
||||||
+ async function rawRunScript(args = [], env = {}) {
|
|
||||||
let result
|
|
||||||
try {
|
|
||||||
result = await promisify(execFile)(
|
|
||||||
process.argv0,
|
|
||||||
- [
|
|
||||||
- 'storage/scripts/back_fill_file_hash.mjs',
|
|
||||||
- '--output=-',
|
|
||||||
- '--projects',
|
|
||||||
- '--deleted-projects',
|
|
||||||
- ...args,
|
|
||||||
- ],
|
|
||||||
+ ['storage/scripts/back_fill_file_hash.mjs', ...args],
|
|
||||||
{
|
|
||||||
encoding: 'utf-8',
|
|
||||||
timeout: TIMEOUT - 500,
|
|
||||||
@@ -521,6 +514,20 @@ describe('back_fill_file_hash script', function () {
|
|
||||||
expect((await fs.promises.readdir('/tmp')).join(';')).to.not.match(
|
|
||||||
/back_fill_file_hash/
|
|
||||||
)
|
|
||||||
+ return result
|
|
||||||
+ }
|
|
||||||
+
|
|
||||||
+ /**
|
|
||||||
+ * @param {Array<string>} args
|
|
||||||
+ * @param {Record<string, string>} env
|
|
||||||
+ * @param {boolean} shouldHaveWritten
|
|
||||||
+ * @return {Promise<{result, stats: any}>}
|
|
||||||
+ */
|
|
||||||
+ async function tryRunScript(args = [], env = {}, shouldHaveWritten) {
|
|
||||||
+ const result = await rawRunScript(
|
|
||||||
+ ['--output=-', '--projects', '--deleted-projects', ...args],
|
|
||||||
+ env
|
|
||||||
+ )
|
|
||||||
const extraStatsKeys = ['eventLoop', 'readFromGCSThroughputMiBPerSecond']
|
|
||||||
const stats = JSON.parse(
|
|
||||||
result.stderr
|
|
||||||
@@ -1078,6 +1085,35 @@ describe('back_fill_file_hash script', function () {
|
|
||||||
})
|
|
||||||
commonAssertions(true)
|
|
||||||
})
|
|
||||||
+ describe('report mode', function () {
|
|
||||||
+ let output
|
|
||||||
+ before('prepare environment', prepareEnvironment)
|
|
||||||
+ before('run script', async function () {
|
|
||||||
+ output = await rawRunScript(['--report'], {})
|
|
||||||
+ })
|
|
||||||
+ it('should print the report', () => {
|
|
||||||
+ expect(output.status).to.equal(0)
|
|
||||||
+ console.log(output.stdout)
|
|
||||||
+ expect(output.stdout).to.equal(`\
|
|
||||||
+Current status:
|
|
||||||
+- Total number of projects: 10
|
|
||||||
+- Total number of deleted projects: 5
|
|
||||||
+Sampling 1000 projects to estimate progress...
|
|
||||||
+Sampled stats for projects:
|
|
||||||
+- Sampled projects: 9 (90% of all projects)
|
|
||||||
+- Sampled projects with all hashes present: 5
|
|
||||||
+- Percentage of projects that need back-filling hashes: 56% (estimated)
|
|
||||||
+- Sampled projects have 11 files that need to be checked against the full project history system.
|
|
||||||
+- Sampled projects have 3 files that need to be uploaded to the full project history system (estimating 27% of all files).
|
|
||||||
+Sampled stats for deleted projects:
|
|
||||||
+- Sampled deleted projects: 4 (80% of all deleted projects)
|
|
||||||
+- Sampled deleted projects with all hashes present: 3
|
|
||||||
+- Percentage of deleted projects that need back-filling hashes: 75% (estimated)
|
|
||||||
+- Sampled deleted projects have 2 files that need to be checked against the full project history system.
|
|
||||||
+- Sampled deleted projects have 1 files that need to be uploaded to the full project history system (estimating 50% of all files).
|
|
||||||
+`)
|
|
||||||
+ })
|
|
||||||
+ })
|
|
||||||
|
|
||||||
describe('full run in dry-run mode', function () {
|
|
||||||
let output
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
diff --git a/services/history-v1/storage/scripts/back_fill_file_hash.mjs b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
|
|
||||||
index e9a7721944c..9c2a9818680 100644
|
|
||||||
--- a/services/history-v1/storage/scripts/back_fill_file_hash.mjs
|
|
||||||
+++ b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
|
|
||||||
@@ -79,7 +79,7 @@ ObjectId.cacheHexString = true
|
|
||||||
*/
|
|
||||||
|
|
||||||
/**
|
|
||||||
- * @return {{PROJECT_IDS_FROM: string, PROCESS_HASHED_FILES: boolean, LOGGING_IDENTIFIER: string, BATCH_RANGE_START: string, BATCH_RANGE_END: string, PROCESS_NON_DELETED_PROJECTS: boolean, PROCESS_DELETED_PROJECTS: boolean, PROCESS_BLOBS: boolean, DRY_RUN: boolean, OUTPUT_FILE: string, DISPLAY_REPORT: boolean}}
|
|
||||||
+ * @return {{PROJECT_IDS_FROM: string, PROCESS_HASHED_FILES: boolean, LOGGING_IDENTIFIER: string, BATCH_RANGE_START: string, BATCH_RANGE_END: string, PROCESS_NON_DELETED_PROJECTS: boolean, PROCESS_DELETED_PROJECTS: boolean, PROCESS_BLOBS: boolean, DRY_RUN: boolean, OUTPUT_FILE: string, DISPLAY_REPORT: boolean, CONCURRENCY: number, CONCURRENT_BATCHES: number, RETRIES: number, RETRY_DELAY_MS: number, RETRY_FILESTORE_404: boolean, BUFFER_DIR_PREFIX: string, STREAM_HIGH_WATER_MARK: number, LOGGING_INTERVAL: number, SLEEP_BEFORE_EXIT: number }}
|
|
||||||
*/
|
|
||||||
function parseArgs() {
|
|
||||||
const PUBLIC_LAUNCH_DATE = new Date('2012-01-01T00:00:00Z')
|
|
||||||
@@ -95,6 +95,12 @@ function parseArgs() {
|
|
||||||
{ name: 'skip-hashed-files', type: Boolean },
|
|
||||||
{ name: 'skip-existing-blobs', type: Boolean },
|
|
||||||
{ name: 'from-file', type: String, defaultValue: '' },
|
|
||||||
+ { name: 'concurrency', type: Number, defaultValue: 10 },
|
|
||||||
+ { name: 'concurrent-batches', type: Number, defaultValue: 1 },
|
|
||||||
+ { name: 'stream-high-water-mark', type: Number, defaultValue: 1024 * 1024 },
|
|
||||||
+ { name: 'retries', type: Number, defaultValue: 10 },
|
|
||||||
+ { name: 'retry-delay-ms', type: Number, defaultValue: 100 },
|
|
||||||
+ { name: 'retry-filestore-404', type: Boolean },
|
|
||||||
{ name: 'dry-run', alias: 'n', type: Boolean },
|
|
||||||
{
|
|
||||||
name: 'output',
|
|
||||||
@@ -114,6 +120,13 @@ function parseArgs() {
|
|
||||||
defaultValue: new Date().toISOString(),
|
|
||||||
},
|
|
||||||
{ name: 'logging-id', type: String, defaultValue: '' },
|
|
||||||
+ { name: 'logging-interval-ms', type: Number, defaultValue: 60_000 },
|
|
||||||
+ {
|
|
||||||
+ name: 'buffer-dir-prefix',
|
|
||||||
+ type: String,
|
|
||||||
+ defaultValue: '/tmp/back_fill_file_hash-',
|
|
||||||
+ },
|
|
||||||
+ { name: 'sleep-before-exit-ms', type: Number, defaultValue: 1_000 },
|
|
||||||
])
|
|
||||||
|
|
||||||
// If no arguments are provided, display a usage message
|
|
||||||
@@ -143,6 +156,8 @@ Logging options:
|
|
||||||
(default: file-migration-<timestamp>.log)
|
|
||||||
--logging-id <id> Identifier for logging
|
|
||||||
(default: BATCH_RANGE_START)
|
|
||||||
+ --logging-interval-ms <ms> Interval for logging progres stats
|
|
||||||
+ (default: 60000, 1min)
|
|
||||||
|
|
||||||
Batch range options:
|
|
||||||
--BATCH_RANGE_START <date> Start date for processing
|
|
||||||
@@ -150,10 +165,30 @@ Batch range options:
|
|
||||||
--BATCH_RANGE_END <date> End date for processing
|
|
||||||
(default: ${args.BATCH_RANGE_END})
|
|
||||||
|
|
||||||
+Concurrency:
|
|
||||||
+ --concurrency <n> Number of files to process concurrently
|
|
||||||
+ (default: 10)
|
|
||||||
+ --concurrent-batches <n> Number of project batches to process concurrently
|
|
||||||
+ (default: 1)
|
|
||||||
+ --stream-high-water-mark n In-Memory buffering threshold
|
|
||||||
+ (default: 1MiB)
|
|
||||||
+
|
|
||||||
+Retries:
|
|
||||||
+ --retries <n> Number of times to retry processing a file
|
|
||||||
+ (default: 10)
|
|
||||||
+ --retry-delay-ms <ms> How long to wait before processing a file again
|
|
||||||
+ (default: 100, 100ms)
|
|
||||||
+ --retry-filestore-404 Retry downloading a file when receiving a 404
|
|
||||||
+ (default: false)
|
|
||||||
+
|
|
||||||
Other options:
|
|
||||||
--report Display a report of the current status
|
|
||||||
--dry-run, -n Perform a dry run without making changes
|
|
||||||
--help, -h Show this help message
|
|
||||||
+ --buffer-dir-prefix <p> Folder/prefix for buffering files on disk
|
|
||||||
+ (default: ${args['buffer-dir-prefix']})
|
|
||||||
+ --sleep-before-exit-ms <n> Defer exiting from the script
|
|
||||||
+ (default: 1000, 1s)
|
|
||||||
|
|
||||||
Typical usage:
|
|
||||||
|
|
||||||
@@ -212,8 +247,17 @@ is equivalent to
|
|
||||||
BATCH_RANGE_START,
|
|
||||||
BATCH_RANGE_END,
|
|
||||||
LOGGING_IDENTIFIER: args['logging-id'] || BATCH_RANGE_START,
|
|
||||||
+ LOGGING_INTERVAL: args['logging-interval-ms'],
|
|
||||||
PROJECT_IDS_FROM: args['from-file'],
|
|
||||||
DISPLAY_REPORT: args.report,
|
|
||||||
+ CONCURRENCY: args.concurrency,
|
|
||||||
+ CONCURRENT_BATCHES: args['concurrent-batches'],
|
|
||||||
+ STREAM_HIGH_WATER_MARK: args['stream-high-water-mark'],
|
|
||||||
+ RETRIES: args.retries,
|
|
||||||
+ RETRY_DELAY_MS: args['retry-delay-ms'],
|
|
||||||
+ RETRY_FILESTORE_404: args['retry-filestore-404'],
|
|
||||||
+ BUFFER_DIR_PREFIX: args['buffer-dir-prefix'],
|
|
||||||
+ SLEEP_BEFORE_EXIT: args['sleep-before-exit-ms'],
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@@ -229,6 +273,15 @@ const {
|
|
||||||
LOGGING_IDENTIFIER,
|
|
||||||
PROJECT_IDS_FROM,
|
|
||||||
DISPLAY_REPORT,
|
|
||||||
+ CONCURRENCY,
|
|
||||||
+ CONCURRENT_BATCHES,
|
|
||||||
+ RETRIES,
|
|
||||||
+ RETRY_DELAY_MS,
|
|
||||||
+ RETRY_FILESTORE_404,
|
|
||||||
+ BUFFER_DIR_PREFIX,
|
|
||||||
+ STREAM_HIGH_WATER_MARK,
|
|
||||||
+ LOGGING_INTERVAL,
|
|
||||||
+ SLEEP_BEFORE_EXIT,
|
|
||||||
} = parseArgs()
|
|
||||||
|
|
||||||
// We need to handle the start and end differently as ids of deleted projects are created at time of deletion.
|
|
||||||
@@ -236,24 +289,7 @@ if (process.env.BATCH_RANGE_START || process.env.BATCH_RANGE_END) {
|
|
||||||
throw new Error('use --BATCH_RANGE_START and --BATCH_RANGE_END')
|
|
||||||
}
|
|
||||||
|
|
||||||
-// Concurrency for downloading from GCS and updating hashes in mongo
|
|
||||||
-const CONCURRENCY = parseInt(process.env.CONCURRENCY || '100', 10)
|
|
||||||
-const CONCURRENT_BATCHES = parseInt(process.env.CONCURRENT_BATCHES || '2', 10)
|
|
||||||
-// Retries for processing a given file
|
|
||||||
-const RETRIES = parseInt(process.env.RETRIES || '10', 10)
|
|
||||||
-const RETRY_DELAY_MS = parseInt(process.env.RETRY_DELAY_MS || '100', 10)
|
|
||||||
-
|
|
||||||
-const RETRY_FILESTORE_404 = process.env.RETRY_FILESTORE_404 === 'true'
|
|
||||||
-const BUFFER_DIR = fs.mkdtempSync(
|
|
||||||
- process.env.BUFFER_DIR_PREFIX || '/tmp/back_fill_file_hash-'
|
|
||||||
-)
|
|
||||||
-// https://nodejs.org/api/stream.html#streamgetdefaulthighwatermarkobjectmode
|
|
||||||
-const STREAM_HIGH_WATER_MARK = parseInt(
|
|
||||||
- process.env.STREAM_HIGH_WATER_MARK || (64 * 1024).toString(),
|
|
||||||
- 10
|
|
||||||
-)
|
|
||||||
-const LOGGING_INTERVAL = parseInt(process.env.LOGGING_INTERVAL || '60000', 10)
|
|
||||||
-const SLEEP_BEFORE_EXIT = parseInt(process.env.SLEEP_BEFORE_EXIT || '1000', 10)
|
|
||||||
+const BUFFER_DIR = fs.mkdtempSync(BUFFER_DIR_PREFIX)
|
|
||||||
|
|
||||||
// Log output to a file
|
|
||||||
if (OUTPUT_FILE !== '-') {
|
|
||||||
@@ -416,7 +452,7 @@ async function displayReport() {
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
-// Filestore endpoint location
|
|
||||||
+// Filestore endpoint location (configured by /etc/overleaf/env.sh)
|
|
||||||
const FILESTORE_HOST = process.env.FILESTORE_HOST || '127.0.0.1'
|
|
||||||
const FILESTORE_PORT = process.env.FILESTORE_PORT || '3009'
|
|
||||||
|
|
||||||
diff --git a/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs b/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs
|
|
||||||
index 7248e74cb3f..601cea13b6a 100644
|
|
||||||
--- a/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs
|
|
||||||
+++ b/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs
|
|
||||||
@@ -61,9 +61,8 @@ function objectIdFromTime(timestamp) {
|
|
||||||
|
|
||||||
const PRINT_IDS_AND_HASHES_FOR_DEBUGGING = false
|
|
||||||
|
|
||||||
-describe('back_fill_file_hash script', function () {
|
|
||||||
+describe.only('back_fill_file_hash script', function () {
|
|
||||||
this.timeout(TIMEOUT)
|
|
||||||
- const USER_FILES_BUCKET_NAME = 'fake-user-files-gcs'
|
|
||||||
|
|
||||||
const projectId0 = objectIdFromTime('2017-01-01T00:00:00Z')
|
|
||||||
const projectId1 = objectIdFromTime('2017-01-01T00:01:00Z')
|
|
||||||
@@ -480,24 +479,24 @@ describe('back_fill_file_hash script', function () {
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {Array<string>} args
|
|
||||||
- * @param {Record<string, string>} env
|
|
||||||
* @return {Promise<{result: { stdout: string, stderr: string, status: number }, stats: any}>}
|
|
||||||
*/
|
|
||||||
- async function rawRunScript(args = [], env = {}) {
|
|
||||||
+ async function rawRunScript(args = []) {
|
|
||||||
let result
|
|
||||||
try {
|
|
||||||
result = await promisify(execFile)(
|
|
||||||
process.argv0,
|
|
||||||
- ['storage/scripts/back_fill_file_hash.mjs', ...args],
|
|
||||||
+ [
|
|
||||||
+ 'storage/scripts/back_fill_file_hash.mjs',
|
|
||||||
+ '--sleep-before-exit-ms=1',
|
|
||||||
+ ...args,
|
|
||||||
+ ],
|
|
||||||
{
|
|
||||||
encoding: 'utf-8',
|
|
||||||
timeout: TIMEOUT - 500,
|
|
||||||
env: {
|
|
||||||
...process.env,
|
|
||||||
AWS_SDK_JS_SUPPRESS_MAINTENANCE_MODE_MESSAGE: '1',
|
|
||||||
- USER_FILES_BUCKET_NAME,
|
|
||||||
- SLEEP_BEFORE_EXIT: '1',
|
|
||||||
- ...env,
|
|
||||||
LOG_LEVEL: 'warn', // Override LOG_LEVEL of acceptance tests
|
|
||||||
},
|
|
||||||
}
|
|
||||||
@@ -519,15 +518,16 @@ describe('back_fill_file_hash script', function () {
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {Array<string>} args
|
|
||||||
- * @param {Record<string, string>} env
|
|
||||||
* @param {boolean} shouldHaveWritten
|
|
||||||
* @return {Promise<{result, stats: any}>}
|
|
||||||
*/
|
|
||||||
- async function tryRunScript(args = [], env = {}, shouldHaveWritten) {
|
|
||||||
- const result = await rawRunScript(
|
|
||||||
- ['--output=-', '--projects', '--deleted-projects', ...args],
|
|
||||||
- env
|
|
||||||
- )
|
|
||||||
+ async function tryRunScript(args = [], shouldHaveWritten) {
|
|
||||||
+ const result = await rawRunScript([
|
|
||||||
+ '--output=-',
|
|
||||||
+ '--projects',
|
|
||||||
+ '--deleted-projects',
|
|
||||||
+ ...args,
|
|
||||||
+ ])
|
|
||||||
const extraStatsKeys = ['eventLoop', 'readFromGCSThroughputMiBPerSecond']
|
|
||||||
const stats = JSON.parse(
|
|
||||||
result.stderr
|
|
||||||
@@ -558,12 +558,11 @@ describe('back_fill_file_hash script', function () {
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {Array<string>} args
|
|
||||||
- * @param {Record<string, string>} env
|
|
||||||
* @param {boolean} shouldHaveWritten
|
|
||||||
* @return {Promise<{result, stats: any}>}
|
|
||||||
*/
|
|
||||||
- async function runScript(args = [], env = {}, shouldHaveWritten = true) {
|
|
||||||
- const { stats, result } = await tryRunScript(args, env, shouldHaveWritten)
|
|
||||||
+ async function runScript(args = [], shouldHaveWritten = true) {
|
|
||||||
+ const { stats, result } = await tryRunScript(args, shouldHaveWritten)
|
|
||||||
if (result.status !== 0) {
|
|
||||||
console.log(result)
|
|
||||||
expect(result).to.have.property('status', 0)
|
|
||||||
@@ -812,7 +811,6 @@ describe('back_fill_file_hash script', function () {
|
|
||||||
it('should process nothing on re-run', async function () {
|
|
||||||
const rerun = await runScript(
|
|
||||||
!processHashedFiles ? ['--skip-hashed-files'] : [],
|
|
||||||
- {},
|
|
||||||
false
|
|
||||||
)
|
|
||||||
let stats = {
|
|
||||||
@@ -937,10 +935,11 @@ describe('back_fill_file_hash script', function () {
|
|
||||||
it('should gracefully handle fatal errors', async function () {
|
|
||||||
mockFilestore.deleteObject(projectId0, fileId0)
|
|
||||||
const t0 = Date.now()
|
|
||||||
- const { stats, result } = await tryRunScript(['--skip-hashed-files'], {
|
|
||||||
- RETRIES: '10',
|
|
||||||
- RETRY_DELAY_MS: '1000',
|
|
||||||
- })
|
|
||||||
+ const { stats, result } = await tryRunScript([
|
|
||||||
+ '--skip-hashed-files',
|
|
||||||
+ '--retries=10',
|
|
||||||
+ '--retry-delay-ms=1000',
|
|
||||||
+ ])
|
|
||||||
const t1 = Date.now()
|
|
||||||
expectNotFoundError(result, 'failed to process file')
|
|
||||||
expect(result.status).to.equal(1)
|
|
||||||
@@ -972,11 +971,12 @@ describe('back_fill_file_hash script', function () {
|
|
||||||
value: { stats, result },
|
|
||||||
},
|
|
||||||
] = await Promise.allSettled([
|
|
||||||
- tryRunScript(['--skip-hashed-files'], {
|
|
||||||
- RETRY_DELAY_MS: '100',
|
|
||||||
- RETRIES: '60',
|
|
||||||
- RETRY_FILESTORE_404: 'true', // 404s are the easiest to simulate in tests
|
|
||||||
- }),
|
|
||||||
+ tryRunScript([
|
|
||||||
+ '--skip-hashed-files',
|
|
||||||
+ '--retries=60',
|
|
||||||
+ '--retry-delay-ms=1000',
|
|
||||||
+ '--retry-filestore-404',
|
|
||||||
+ ]),
|
|
||||||
restoreFileAfter5s(),
|
|
||||||
])
|
|
||||||
expectNotFoundError(result, 'failed to process file, trying again')
|
|
||||||
@@ -998,9 +998,7 @@ describe('back_fill_file_hash script', function () {
|
|
||||||
let output
|
|
||||||
before('prepare environment', prepareEnvironment)
|
|
||||||
before('run script', async function () {
|
|
||||||
- output = await runScript(['--skip-hashed-files'], {
|
|
||||||
- CONCURRENCY: '1',
|
|
||||||
- })
|
|
||||||
+ output = await runScript(['--skip-hashed-files', '--concurrency=1'])
|
|
||||||
})
|
|
||||||
|
|
||||||
/**
|
|
||||||
@@ -1067,10 +1065,10 @@ describe('back_fill_file_hash script', function () {
|
|
||||||
let output1, output2
|
|
||||||
before('prepare environment', prepareEnvironment)
|
|
||||||
before('run script without hashed files', async function () {
|
|
||||||
- output1 = await runScript(['--skip-hashed-files'], {})
|
|
||||||
+ output1 = await runScript(['--skip-hashed-files'])
|
|
||||||
})
|
|
||||||
before('run script with hashed files', async function () {
|
|
||||||
- output2 = await runScript([], {})
|
|
||||||
+ output2 = await runScript([])
|
|
||||||
})
|
|
||||||
it('should print stats for the first run without hashed files', function () {
|
|
||||||
expect(output1.stats).deep.equal(STATS_ALL)
|
|
||||||
@@ -1089,7 +1087,7 @@ describe('back_fill_file_hash script', function () {
|
|
||||||
let output
|
|
||||||
before('prepare environment', prepareEnvironment)
|
|
||||||
before('run script', async function () {
|
|
||||||
- output = await rawRunScript(['--report'], {})
|
|
||||||
+ output = await rawRunScript(['--report'])
|
|
||||||
})
|
|
||||||
it('should print the report', () => {
|
|
||||||
expect(output.status).to.equal(0)
|
|
||||||
@@ -1127,13 +1125,7 @@ Sampled stats for deleted projects:
|
|
||||||
.toArray()
|
|
||||||
})
|
|
||||||
before('run script', async function () {
|
|
||||||
- output = await runScript(
|
|
||||||
- ['--dry-run'],
|
|
||||||
- {
|
|
||||||
- CONCURRENCY: '1',
|
|
||||||
- },
|
|
||||||
- false
|
|
||||||
- )
|
|
||||||
+ output = await runScript(['--dry-run', '--concurrency=1'], false)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should print stats for dry-run mode', function () {
|
|
||||||
@@ -1174,9 +1166,7 @@ Sampled stats for deleted projects:
|
|
||||||
let output
|
|
||||||
before('prepare environment', prepareEnvironment)
|
|
||||||
before('run script', async function () {
|
|
||||||
- output = await runScript(['--skip-hashed-files'], {
|
|
||||||
- CONCURRENCY: '10',
|
|
||||||
- })
|
|
||||||
+ output = await runScript(['--skip-hashed-files', '--concurrency=10'])
|
|
||||||
})
|
|
||||||
it('should print stats', function () {
|
|
||||||
expect(output.stats).deep.equal(STATS_ALL)
|
|
||||||
@@ -1184,13 +1174,14 @@ Sampled stats for deleted projects:
|
|
||||||
commonAssertions()
|
|
||||||
})
|
|
||||||
|
|
||||||
- describe('full run STREAM_HIGH_WATER_MARK=1MB', function () {
|
|
||||||
+ describe('full run STREAM_HIGH_WATER_MARK=64kiB', function () {
|
|
||||||
let output
|
|
||||||
before('prepare environment', prepareEnvironment)
|
|
||||||
before('run script', async function () {
|
|
||||||
- output = await runScript(['--skip-hashed-files'], {
|
|
||||||
- STREAM_HIGH_WATER_MARK: (1024 * 1024).toString(),
|
|
||||||
- })
|
|
||||||
+ output = await runScript([
|
|
||||||
+ '--skip-hashed-files',
|
|
||||||
+ `--stream-high-water-mark=${64 * 1024}`,
|
|
||||||
+ ])
|
|
||||||
})
|
|
||||||
it('should print stats', function () {
|
|
||||||
expect(output.stats).deep.equal(STATS_ALL)
|
|
||||||
@@ -1202,7 +1193,7 @@ Sampled stats for deleted projects:
|
|
||||||
let output
|
|
||||||
before('prepare environment', prepareEnvironment)
|
|
||||||
before('run script', async function () {
|
|
||||||
- output = await runScript([], {})
|
|
||||||
+ output = await runScript([])
|
|
||||||
})
|
|
||||||
it('should print stats', function () {
|
|
||||||
expect(output.stats).deep.equal(
|
|
||||||
@@ -1231,9 +1222,7 @@ Sampled stats for deleted projects:
|
|
||||||
})
|
|
||||||
let output
|
|
||||||
before('run script', async function () {
|
|
||||||
- output = await runScript(['--skip-hashed-files'], {
|
|
||||||
- CONCURRENCY: '1',
|
|
||||||
- })
|
|
||||||
+ output = await runScript(['--skip-hashed-files', '--concurrency=1'])
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should print stats', function () {
|
|
||||||
@@ -1252,20 +1241,18 @@ Sampled stats for deleted projects:
|
|
||||||
let outputPart0, outputPart1
|
|
||||||
before('prepare environment', prepareEnvironment)
|
|
||||||
before('run script on part 0', async function () {
|
|
||||||
- outputPart0 = await runScript(
|
|
||||||
- ['--skip-hashed-files', `--BATCH_RANGE_END=${edge}`],
|
|
||||||
- {
|
|
||||||
- CONCURRENCY: '1',
|
|
||||||
- }
|
|
||||||
- )
|
|
||||||
+ outputPart0 = await runScript([
|
|
||||||
+ '--skip-hashed-files',
|
|
||||||
+ `--BATCH_RANGE_END=${edge}`,
|
|
||||||
+ '--concurrency=1',
|
|
||||||
+ ])
|
|
||||||
})
|
|
||||||
before('run script on part 1', async function () {
|
|
||||||
- outputPart1 = await runScript(
|
|
||||||
- ['--skip-hashed-files', `--BATCH_RANGE_START=${edge}`],
|
|
||||||
- {
|
|
||||||
- CONCURRENCY: '1',
|
|
||||||
- }
|
|
||||||
- )
|
|
||||||
+ outputPart1 = await runScript([
|
|
||||||
+ '--skip-hashed-files',
|
|
||||||
+ `--BATCH_RANGE_START=${edge}`,
|
|
||||||
+ '--concurrency=1',
|
|
||||||
+ ])
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should print stats for part 0', function () {
|
|
||||||
|
|
|
@ -1,79 +0,0 @@
|
||||||
|
|
||||||
|
|
||||||
diff --git a/services/web/scripts/find_malformed_filetrees.mjs b/services/web/scripts/find_malformed_filetrees.mjs
|
|
||||||
index 2614c7d6223c..9f0f6323ab8d 100644
|
|
||||||
--- a/services/web/scripts/find_malformed_filetrees.mjs
|
|
||||||
+++ b/services/web/scripts/find_malformed_filetrees.mjs
|
|
||||||
@@ -49,7 +49,7 @@ async function main(trackProgress) {
|
|
||||||
for (const project of projects) {
|
|
||||||
projectsProcessed += 1
|
|
||||||
if (projectsProcessed % 100000 === 0) {
|
|
||||||
- console.log(projectsProcessed, 'projects processed')
|
|
||||||
+ console.warn(projectsProcessed, 'projects processed')
|
|
||||||
}
|
|
||||||
const projectId = project._id.toString()
|
|
||||||
for (const { reason, path, _id } of processProject(project)) {
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
diff --git a/services/history-v1/storage/scripts/back_fill_file_hash.mjs b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
|
|
||||||
index 9c2a9818680a..33962c5da7d4 100644
|
|
||||||
--- a/services/history-v1/storage/scripts/back_fill_file_hash.mjs
|
|
||||||
+++ b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
|
|
||||||
@@ -399,7 +399,8 @@ async function getStatsForCollection(
|
|
||||||
const fractionSampled = stats.projectCount / collectionCount
|
|
||||||
const percentageSampled = (fractionSampled * 100).toFixed(0)
|
|
||||||
const fractionConverted = stats.projectsWithAllHashes / stats.projectCount
|
|
||||||
- const percentageConverted = (fractionConverted * 100).toFixed(0)
|
|
||||||
+ const fractionToBackFill = 1 - fractionConverted
|
|
||||||
+ const percentageToBackFill = (fractionToBackFill * 100).toFixed(0)
|
|
||||||
const fractionMissing = stats.fileMissingInHistoryCount / stats.fileCount
|
|
||||||
const percentageMissing = (fractionMissing * 100).toFixed(0)
|
|
||||||
console.log(
|
|
||||||
@@ -409,7 +410,7 @@ async function getStatsForCollection(
|
|
||||||
`- Sampled ${name} with all hashes present: ${stats.projectsWithAllHashes}`
|
|
||||||
)
|
|
||||||
console.log(
|
|
||||||
- `- Percentage of ${name} that need back-filling hashes: ${percentageConverted}% (estimated)`
|
|
||||||
+ `- Percentage of ${name} that need back-filling hashes: ${percentageToBackFill}% (estimated)`
|
|
||||||
)
|
|
||||||
console.log(
|
|
||||||
`- Sampled ${name} have ${stats.fileCount} files that need to be checked against the full project history system.`
|
|
||||||
diff --git a/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs b/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs
|
|
||||||
index 601cea13b6a6..bfcf93228d9e 100644
|
|
||||||
--- a/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs
|
|
||||||
+++ b/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs
|
|
||||||
@@ -61,7 +61,7 @@ function objectIdFromTime(timestamp) {
|
|
||||||
|
|
||||||
const PRINT_IDS_AND_HASHES_FOR_DEBUGGING = false
|
|
||||||
|
|
||||||
-describe.only('back_fill_file_hash script', function () {
|
|
||||||
+describe('back_fill_file_hash script', function () {
|
|
||||||
this.timeout(TIMEOUT)
|
|
||||||
|
|
||||||
const projectId0 = objectIdFromTime('2017-01-01T00:00:00Z')
|
|
||||||
@@ -1091,7 +1091,6 @@ describe.only('back_fill_file_hash script', function () {
|
|
||||||
})
|
|
||||||
it('should print the report', () => {
|
|
||||||
expect(output.status).to.equal(0)
|
|
||||||
- console.log(output.stdout)
|
|
||||||
expect(output.stdout).to.equal(`\
|
|
||||||
Current status:
|
|
||||||
- Total number of projects: 10
|
|
||||||
@@ -1100,13 +1099,13 @@ Sampling 1000 projects to estimate progress...
|
|
||||||
Sampled stats for projects:
|
|
||||||
- Sampled projects: 9 (90% of all projects)
|
|
||||||
- Sampled projects with all hashes present: 5
|
|
||||||
-- Percentage of projects that need back-filling hashes: 56% (estimated)
|
|
||||||
+- Percentage of projects that need back-filling hashes: 44% (estimated)
|
|
||||||
- Sampled projects have 11 files that need to be checked against the full project history system.
|
|
||||||
- Sampled projects have 3 files that need to be uploaded to the full project history system (estimating 27% of all files).
|
|
||||||
Sampled stats for deleted projects:
|
|
||||||
- Sampled deleted projects: 4 (80% of all deleted projects)
|
|
||||||
- Sampled deleted projects with all hashes present: 3
|
|
||||||
-- Percentage of deleted projects that need back-filling hashes: 75% (estimated)
|
|
||||||
+- Percentage of deleted projects that need back-filling hashes: 25% (estimated)
|
|
||||||
- Sampled deleted projects have 2 files that need to be checked against the full project history system.
|
|
||||||
- Sampled deleted projects have 1 files that need to be uploaded to the full project history system (estimating 50% of all files).
|
|
||||||
`)
|
|
||||||
|
|
|
@ -1,165 +0,0 @@
|
||||||
diff --git a/services/web/app/src/Features/Collaborators/OwnershipTransferHandler.js b/services/web/app/src/Features/Collaborators/OwnershipTransferHandler.js
|
|
||||||
index e22818ebb880..81ec5ccb0aa5 100644
|
|
||||||
--- a/services/web/app/src/Features/Collaborators/OwnershipTransferHandler.js
|
|
||||||
+++ b/services/web/app/src/Features/Collaborators/OwnershipTransferHandler.js
|
|
||||||
@@ -9,9 +9,75 @@ const PrivilegeLevels = require('../Authorization/PrivilegeLevels')
|
|
||||||
const TpdsProjectFlusher = require('../ThirdPartyDataStore/TpdsProjectFlusher')
|
|
||||||
const ProjectAuditLogHandler = require('../Project/ProjectAuditLogHandler')
|
|
||||||
const AnalyticsManager = require('../Analytics/AnalyticsManager')
|
|
||||||
+const OError = require('@overleaf/o-error')
|
|
||||||
+const TagsHandler = require('../Tags/TagsHandler')
|
|
||||||
+const { promiseMapWithLimit } = require('@overleaf/promise-utils')
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
- promises: { transferOwnership },
|
|
||||||
+ promises: {
|
|
||||||
+ transferOwnership,
|
|
||||||
+ transferAllProjectsToUser,
|
|
||||||
+ },
|
|
||||||
+}
|
|
||||||
+
|
|
||||||
+const TAG_COLOR_BLUE = '#434AF0'
|
|
||||||
+
|
|
||||||
+/**
|
|
||||||
+ * @param {string} fromUserId
|
|
||||||
+ * @param {string} toUserId
|
|
||||||
+ * @param {string} ipAddress
|
|
||||||
+ * @return {Promise<{projectCount: number, newTagName: string}>}
|
|
||||||
+ */
|
|
||||||
+async function transferAllProjectsToUser({ fromUserId, toUserId, ipAddress }) {
|
|
||||||
+ // - Verify that both users exist
|
|
||||||
+ const fromUser = await UserGetter.promises.getUser(fromUserId, {
|
|
||||||
+ _id: 1,
|
|
||||||
+ email: 1,
|
|
||||||
+ })
|
|
||||||
+ const toUser = await UserGetter.promises.getUser(toUserId, { _id: 1 })
|
|
||||||
+ if (!fromUser) throw new OError('missing source user', { fromUserId })
|
|
||||||
+ if (!toUser) throw new OError('missing destination user', { toUserId })
|
|
||||||
+ if (fromUser._id.equals(toUser._id))
|
|
||||||
+ throw new OError('rejecting transfer between identical users', {
|
|
||||||
+ fromUserId,
|
|
||||||
+ toUserId,
|
|
||||||
+ })
|
|
||||||
+ logger.debug(
|
|
||||||
+ { fromUserId, toUserId },
|
|
||||||
+ 'started bulk transfer of all projects from one user to another'
|
|
||||||
+ )
|
|
||||||
+ // - Get all owned projects for fromUserId
|
|
||||||
+ const projects = await Project.find({ owner_ref: fromUserId }, { _id: 1 })
|
|
||||||
+
|
|
||||||
+ // - Create new tag on toUserId
|
|
||||||
+ const newTag = await TagsHandler.promises.createTag(
|
|
||||||
+ toUserId,
|
|
||||||
+ `transferred-from-${fromUser.email}`,
|
|
||||||
+ TAG_COLOR_BLUE,
|
|
||||||
+ { truncate: true }
|
|
||||||
+ )
|
|
||||||
+
|
|
||||||
+ // - Add tag to projects (can happen before ownership is transferred)
|
|
||||||
+ await TagsHandler.promises.addProjectsToTag(
|
|
||||||
+ toUserId,
|
|
||||||
+ newTag._id,
|
|
||||||
+ projects.map(p => p._id)
|
|
||||||
+ )
|
|
||||||
+
|
|
||||||
+ // - Transfer all projects
|
|
||||||
+ await promiseMapWithLimit(5, projects, async project => {
|
|
||||||
+ await transferOwnership(project._id, toUserId, {
|
|
||||||
+ allowTransferToNonCollaborators: true,
|
|
||||||
+ skipEmails: true,
|
|
||||||
+ ipAddress,
|
|
||||||
+ })
|
|
||||||
+ })
|
|
||||||
+
|
|
||||||
+ logger.debug(
|
|
||||||
+ { fromUserId, toUserId },
|
|
||||||
+ 'finished bulk transfer of all projects from one user to another'
|
|
||||||
+ )
|
|
||||||
+ return { projectCount: projects.length, newTagName: newTag.name }
|
|
||||||
}
|
|
||||||
|
|
||||||
async function transferOwnership(projectId, newOwnerId, options = {}) {
|
|
||||||
@@ -74,8 +140,8 @@ async function transferOwnership(projectId, newOwnerId, options = {}) {
|
|
||||||
await TpdsProjectFlusher.promises.flushProjectToTpds(projectId)
|
|
||||||
|
|
||||||
// Send confirmation emails
|
|
||||||
- const previousOwner = await UserGetter.promises.getUser(previousOwnerId)
|
|
||||||
if (!skipEmails) {
|
|
||||||
+ const previousOwner = await UserGetter.promises.getUser(previousOwnerId)
|
|
||||||
await _sendEmails(project, previousOwner, newOwner)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
diff --git a/services/web/modules/server-ce-scripts/scripts/transfer-all-projects-to-user.mjs b/services/web/modules/server-ce-scripts/scripts/transfer-all-projects-to-user.mjs
|
|
||||||
new file mode 100644
|
|
||||||
index 000000000000..6ff1215de53b
|
|
||||||
--- /dev/null
|
|
||||||
+++ b/services/web/modules/server-ce-scripts/scripts/transfer-all-projects-to-user.mjs
|
|
||||||
@@ -0,0 +1,46 @@
|
|
||||||
+import { ObjectId } from 'mongodb'
|
|
||||||
+import minimist from 'minimist'
|
|
||||||
+import OwnershipTransferHandler from '../../../app/src/Features/Collaborators/OwnershipTransferHandler.js'
|
|
||||||
+import UserGetter from '../../../app/src/Features/User/UserGetter.js'
|
|
||||||
+import EmailHelper from '../../../app/src/Features/Helpers/EmailHelper.js'
|
|
||||||
+
|
|
||||||
+const args = minimist(process.argv.slice(2), {
|
|
||||||
+ string: ['from-user', 'to-user'],
|
|
||||||
+})
|
|
||||||
+
|
|
||||||
+/**
|
|
||||||
+ * @param {string} flag
|
|
||||||
+ * @return {Promise<string>}
|
|
||||||
+ */
|
|
||||||
+async function resolveUser(flag) {
|
|
||||||
+ const raw = args[flag]
|
|
||||||
+ if (!raw) throw new Error(`missing parameter --${flag}`)
|
|
||||||
+ if (ObjectId.isValid(raw)) return raw
|
|
||||||
+ const email = EmailHelper.parseEmail(raw)
|
|
||||||
+ if (!email) throw new Error(`invalid email --${flag}=${raw}`)
|
|
||||||
+ const user = await UserGetter.promises.getUser({ email: email }, { _id: 1 })
|
|
||||||
+ if (!user)
|
|
||||||
+ throw new Error(`user with email --${flag}=${email} does not exist`)
|
|
||||||
+ return user._id.toString()
|
|
||||||
+}
|
|
||||||
+
|
|
||||||
+async function main() {
|
|
||||||
+ const fromUserId = await resolveUser('from-user')
|
|
||||||
+ const toUserId = await resolveUser('to-user')
|
|
||||||
+ await OwnershipTransferHandler.promises.transferAllProjectsToUser({
|
|
||||||
+ fromUserId,
|
|
||||||
+ toUserId,
|
|
||||||
+ ipAddress: '0.0.0.0',
|
|
||||||
+ })
|
|
||||||
+}
|
|
||||||
+
|
|
||||||
+main()
|
|
||||||
+ .then(() => {
|
|
||||||
+ console.error('Done.')
|
|
||||||
+ process.exit(0)
|
|
||||||
+ })
|
|
||||||
+ .catch(err => {
|
|
||||||
+ console.error('---')
|
|
||||||
+ console.error(err)
|
|
||||||
+ process.exit(1)
|
|
||||||
+ })
|
|
||||||
|
|
||||||
|
|
||||||
diff --git a/services/web/modules/server-ce-scripts/scripts/transfer-all-projects-to-user.mjs b/services/web/modules/server-ce-scripts/scripts/transfer-all-projects-to-user.mjs
|
|
||||||
index 6ff1215de53b..8c5951334403 100644
|
|
||||||
--- a/services/web/modules/server-ce-scripts/scripts/transfer-all-projects-to-user.mjs
|
|
||||||
+++ b/services/web/modules/server-ce-scripts/scripts/transfer-all-projects-to-user.mjs
|
|
||||||
@@ -1,4 +1,4 @@
|
|
||||||
-import { ObjectId } from 'mongodb'
|
|
||||||
+import { ObjectId } from '../../../app/src/infrastructure/mongodb.js'
|
|
||||||
import minimist from 'minimist'
|
|
||||||
import OwnershipTransferHandler from '../../../app/src/Features/Collaborators/OwnershipTransferHandler.js'
|
|
||||||
import UserGetter from '../../../app/src/Features/User/UserGetter.js'
|
|
||||||
@@ -18,7 +18,7 @@ async function resolveUser(flag) {
|
|
||||||
if (ObjectId.isValid(raw)) return raw
|
|
||||||
const email = EmailHelper.parseEmail(raw)
|
|
||||||
if (!email) throw new Error(`invalid email --${flag}=${raw}`)
|
|
||||||
- const user = await UserGetter.promises.getUser({ email: email }, { _id: 1 })
|
|
||||||
+ const user = await UserGetter.promises.getUser({ email }, { _id: 1 })
|
|
||||||
if (!user)
|
|
||||||
throw new Error(`user with email --${flag}=${email} does not exist`)
|
|
||||||
return user._id.toString()
|
|
||||||
|
|
|
@ -1,13 +0,0 @@
|
||||||
diff --git a/services/web/modules/authentication/saml/app/src/SAMLModuleManager.mjs b/services/web/modules/authentication/saml/app/src/SAMLModuleManager.mjs
|
|
||||||
index 29e9ae52cd..335ace8209 100644
|
|
||||||
--- a/services/web/modules/authentication/saml/app/src/SAMLModuleManager.mjs
|
|
||||||
+++ b/services/web/modules/authentication/saml/app/src/SAMLModuleManager.mjs
|
|
||||||
@@ -26,7 +26,7 @@ const SAMLModuleManager = {
|
|
||||||
callbackUrl: `${Settings.siteUrl.replace(/\/+$/, '')}/saml/login/callback`,
|
|
||||||
issuer: process.env.OVERLEAF_SAML_ISSUER,
|
|
||||||
audience: process.env.OVERLEAF_SAML_AUDIENCE,
|
|
||||||
- cert: readFilesContentFromEnv(process.env.OVERLEAF_SAML_IDP_CERT),
|
|
||||||
+ idpCert: readFilesContentFromEnv(process.env.OVERLEAF_SAML_IDP_CERT),
|
|
||||||
privateKey: readFilesContentFromEnv(process.env.OVERLEAF_SAML_PRIVATE_KEY),
|
|
||||||
decryptionPvk: readFilesContentFromEnv(process.env.OVERLEAF_SAML_DECRYPTION_PVK),
|
|
||||||
signatureAlgorithm: process.env.OVERLEAF_SAML_SIGNATURE_ALGORITHM,
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue