mirror of
https://github.com/yu-i-i/overleaf-cep.git
synced 2025-07-31 11:00:06 +02:00
Compare commits
170 commits
1741e48d59
...
ffbb09e1d4
Author | SHA1 | Date | |
---|---|---|---|
![]() |
ffbb09e1d4 | ||
![]() |
057f4b4bb5 | ||
![]() |
d9587e8b06 | ||
![]() |
50c9de6178 | ||
![]() |
7a072164a2 | ||
![]() |
930401541d | ||
![]() |
081fced4bd | ||
![]() |
22ad3a86a9 | ||
![]() |
dd3ae65bd2 | ||
![]() |
2a88d7d9c9 | ||
![]() |
6f05a43f32 | ||
![]() |
79f9957b68 | ||
![]() |
32a8142f9c | ||
![]() |
32b30606e5 | ||
![]() |
7abafb01ea | ||
![]() |
4464320757 | ||
![]() |
4bbd5f32b9 | ||
![]() |
4077486b86 | ||
![]() |
666481d8b2 | ||
![]() |
61db35ac8f | ||
![]() |
707e197625 | ||
![]() |
958e05a001 | ||
![]() |
5b499efd23 | ||
![]() |
d7d60f9d4c | ||
![]() |
7256c99e29 | ||
![]() |
a8d6055b4e | ||
![]() |
c51d6f46d4 | ||
![]() |
73476180d4 | ||
![]() |
b3cc1fa582 | ||
![]() |
19a804d5bf | ||
![]() |
478e264817 | ||
![]() |
df3d9099b6 | ||
![]() |
2731ffaf10 | ||
![]() |
14c82ac94d | ||
![]() |
52e6a216f4 | ||
![]() |
1a8c549389 | ||
![]() |
62760a9bf5 | ||
![]() |
5d78229e1e | ||
![]() |
d7bd665bee | ||
![]() |
447be67f78 | ||
![]() |
5fec16153b | ||
![]() |
53c34b5726 | ||
![]() |
2b49653f21 | ||
![]() |
a0aa6b9cc7 | ||
![]() |
f5a89cc38f | ||
![]() |
2c3eed8d96 | ||
![]() |
2ad9f36706 | ||
![]() |
850da34778 | ||
![]() |
23c1a0ba4d | ||
![]() |
71094cb283 | ||
![]() |
35722acb3d | ||
![]() |
8870aa6e63 | ||
![]() |
5a4cf8a003 | ||
![]() |
c732a02b38 | ||
![]() |
c6ac06b51c | ||
![]() |
c378f0961c | ||
![]() |
a9780ccf96 | ||
![]() |
247b4e274d | ||
![]() |
9d290ae234 | ||
![]() |
ed9844b2ec | ||
![]() |
87bca3601d | ||
![]() |
bb3a123b8d | ||
![]() |
86d310c741 | ||
![]() |
8ed650f57a | ||
![]() |
13270dee2d | ||
![]() |
cf36767f03 | ||
![]() |
247f04557c | ||
![]() |
3c154955b2 | ||
![]() |
612981bedb | ||
![]() |
a7466a7291 | ||
![]() |
fb50d429b4 | ||
![]() |
8ec9cd21b4 | ||
![]() |
5861e4160c | ||
![]() |
ec763c69a7 | ||
![]() |
1cfd5ca948 | ||
![]() |
12b96e40a5 | ||
![]() |
626416ed02 | ||
![]() |
6166a51552 | ||
![]() |
6f1f1ba744 | ||
![]() |
dd3956f5f4 | ||
![]() |
52898ac83b | ||
![]() |
fe1129c2cf | ||
![]() |
7f086b21c8 | ||
![]() |
b225b55e8d | ||
![]() |
f95bf41824 | ||
![]() |
d492512d9e | ||
![]() |
1c672e55f5 | ||
![]() |
9d858dcf0f | ||
![]() |
584db6c301 | ||
![]() |
a29280a1fe | ||
![]() |
315bde6f1b | ||
![]() |
2256697323 | ||
![]() |
ee2338a33b | ||
![]() |
94e12ec404 | ||
![]() |
26032d6b77 | ||
![]() |
adb9723d62 | ||
![]() |
b901bb6c75 | ||
![]() |
fa62529d82 | ||
![]() |
cde7ff5d2f | ||
![]() |
82c95dd82d | ||
![]() |
778221c0af | ||
![]() |
f8f2585164 | ||
![]() |
04d36122bd | ||
![]() |
dfc00ed8c1 | ||
![]() |
262a1d09c6 | ||
![]() |
5e76a97bc4 | ||
![]() |
aa367bcd1d | ||
![]() |
fe68930e9a | ||
![]() |
3a0c71175b | ||
![]() |
8fc206073b | ||
![]() |
34d5564abc | ||
![]() |
c1fc5b88b3 | ||
![]() |
a1098a921c | ||
![]() |
a1a3019d1e | ||
![]() |
34be8b75ad | ||
![]() |
cedc96bdd7 | ||
![]() |
457d61fa9a | ||
![]() |
35902407b3 | ||
![]() |
1f7bfb4737 | ||
![]() |
6ac5142b41 | ||
![]() |
39110d9da9 | ||
![]() |
ef958f97a1 | ||
![]() |
832028e92d | ||
![]() |
f0edc7ba00 | ||
![]() |
d6c2188f2d | ||
![]() |
bc95219bf6 | ||
![]() |
3e49fd6967 | ||
![]() |
846ccd3aac | ||
![]() |
9babb6283b | ||
![]() |
62c8af2a93 | ||
![]() |
3850e97446 | ||
![]() |
397a546095 | ||
![]() |
d8c5160349 | ||
![]() |
3a5d24eb7a | ||
![]() |
a3b908e255 | ||
![]() |
0e49a5d9b0 | ||
![]() |
958ff0f3bf | ||
![]() |
773cbc92eb | ||
![]() |
b9f1013f37 | ||
![]() |
30c5495b21 | ||
![]() |
835e14b8b2 | ||
![]() |
fb03fe4d26 | ||
![]() |
4a17a1e713 | ||
![]() |
c60ceaf932 | ||
![]() |
8ad335cf47 | ||
![]() |
14308f4fba | ||
![]() |
fe8d6392d5 | ||
![]() |
dd526693f5 | ||
![]() |
42aea53307 | ||
![]() |
3aa579f232 | ||
![]() |
9cd7e49daf | ||
![]() |
9f22564ca3 | ||
![]() |
af46bcdace | ||
![]() |
c27c7bbe83 | ||
![]() |
814a55809b | ||
![]() |
29b0dd0725 | ||
![]() |
f7f4a03abb | ||
![]() |
f11a6a6b87 | ||
![]() |
6207c853ef | ||
![]() |
c183176fd3 | ||
![]() |
15663796ad | ||
![]() |
e670024f5c | ||
![]() |
bdf0194fc8 | ||
![]() |
4ba0e97b95 | ||
![]() |
d85dbe429d | ||
![]() |
d99ba08d01 | ||
![]() |
b831a0b3f7 | ||
![]() |
b538d56591 | ||
![]() |
7920cd9d3d | ||
![]() |
28468e134c |
882 changed files with 27325 additions and 18025 deletions
|
@ -29,6 +29,7 @@ services:
|
|||
- DOCKER_RUNNER=true
|
||||
- TEXLIVE_IMAGE=texlive-full # docker build texlive -t texlive-full
|
||||
- COMPILES_HOST_DIR=${PWD}/compiles
|
||||
- OUTPUT_HOST_DIR=${PWD}/output
|
||||
user: root
|
||||
volumes:
|
||||
- ${PWD}/compiles:/overleaf/services/clsi/compiles
|
||||
|
|
|
@ -75,9 +75,13 @@ services:
|
|||
|
||||
## Sandboxed Compiles: https://github.com/overleaf/overleaf/wiki/Server-Pro:-Sandboxed-Compiles
|
||||
SANDBOXED_COMPILES: 'true'
|
||||
SANDBOXED_COMPILES_SIBLING_CONTAINERS: 'true'
|
||||
### Bind-mount source for /var/lib/overleaf/data/compiles inside the container.
|
||||
SANDBOXED_COMPILES_HOST_DIR: '/home/user/sharelatex_data/data/compiles'
|
||||
SANDBOXED_COMPILES_HOST_DIR_COMPILES: '/home/user/sharelatex_data/data/compiles'
|
||||
### Bind-mount source for /var/lib/overleaf/data/output inside the container.
|
||||
SANDBOXED_COMPILES_HOST_DIR_OUTPUT: '/home/user/sharelatex_data/data/output'
|
||||
### Backwards compatibility (before Server Pro 5.5)
|
||||
DOCKER_RUNNER: 'true'
|
||||
SANDBOXED_COMPILES_SIBLING_CONTAINERS: 'true'
|
||||
|
||||
## Works with test LDAP server shown at bottom of docker compose
|
||||
# OVERLEAF_LDAP_URL: 'ldap://ldap:389'
|
||||
|
|
|
@ -7,4 +7,4 @@ access-token-encryptor
|
|||
--is-library=True
|
||||
--node-version=20.18.2
|
||||
--public-repo=False
|
||||
--script-version=4.5.0
|
||||
--script-version=4.7.0
|
||||
|
|
|
@ -7,4 +7,4 @@ fetch-utils
|
|||
--is-library=True
|
||||
--node-version=20.18.2
|
||||
--public-repo=False
|
||||
--script-version=4.5.0
|
||||
--script-version=4.7.0
|
||||
|
|
|
@ -23,11 +23,11 @@ async function fetchJson(url, opts = {}) {
|
|||
}
|
||||
|
||||
async function fetchJsonWithResponse(url, opts = {}) {
|
||||
const { fetchOpts } = parseOpts(opts)
|
||||
const { fetchOpts, detachSignal } = parseOpts(opts)
|
||||
fetchOpts.headers = fetchOpts.headers ?? {}
|
||||
fetchOpts.headers.Accept = fetchOpts.headers.Accept ?? 'application/json'
|
||||
|
||||
const response = await performRequest(url, fetchOpts)
|
||||
const response = await performRequest(url, fetchOpts, detachSignal)
|
||||
if (!response.ok) {
|
||||
const body = await maybeGetResponseBody(response)
|
||||
throw new RequestFailedError(url, opts, response, body)
|
||||
|
@ -53,8 +53,8 @@ async function fetchStream(url, opts = {}) {
|
|||
}
|
||||
|
||||
async function fetchStreamWithResponse(url, opts = {}) {
|
||||
const { fetchOpts, abortController } = parseOpts(opts)
|
||||
const response = await performRequest(url, fetchOpts)
|
||||
const { fetchOpts, abortController, detachSignal } = parseOpts(opts)
|
||||
const response = await performRequest(url, fetchOpts, detachSignal)
|
||||
|
||||
if (!response.ok) {
|
||||
const body = await maybeGetResponseBody(response)
|
||||
|
@ -76,8 +76,8 @@ async function fetchStreamWithResponse(url, opts = {}) {
|
|||
* @throws {RequestFailedError} if the response has a failure status code
|
||||
*/
|
||||
async function fetchNothing(url, opts = {}) {
|
||||
const { fetchOpts } = parseOpts(opts)
|
||||
const response = await performRequest(url, fetchOpts)
|
||||
const { fetchOpts, detachSignal } = parseOpts(opts)
|
||||
const response = await performRequest(url, fetchOpts, detachSignal)
|
||||
if (!response.ok) {
|
||||
const body = await maybeGetResponseBody(response)
|
||||
throw new RequestFailedError(url, opts, response, body)
|
||||
|
@ -108,9 +108,9 @@ async function fetchRedirect(url, opts = {}) {
|
|||
* @throws {RequestFailedError} if the response has a non redirect status code or missing Location header
|
||||
*/
|
||||
async function fetchRedirectWithResponse(url, opts = {}) {
|
||||
const { fetchOpts } = parseOpts(opts)
|
||||
const { fetchOpts, detachSignal } = parseOpts(opts)
|
||||
fetchOpts.redirect = 'manual'
|
||||
const response = await performRequest(url, fetchOpts)
|
||||
const response = await performRequest(url, fetchOpts, detachSignal)
|
||||
if (response.status < 300 || response.status >= 400) {
|
||||
const body = await maybeGetResponseBody(response)
|
||||
throw new RequestFailedError(url, opts, response, body)
|
||||
|
@ -142,8 +142,8 @@ async function fetchString(url, opts = {}) {
|
|||
}
|
||||
|
||||
async function fetchStringWithResponse(url, opts = {}) {
|
||||
const { fetchOpts } = parseOpts(opts)
|
||||
const response = await performRequest(url, fetchOpts)
|
||||
const { fetchOpts, detachSignal } = parseOpts(opts)
|
||||
const response = await performRequest(url, fetchOpts, detachSignal)
|
||||
if (!response.ok) {
|
||||
const body = await maybeGetResponseBody(response)
|
||||
throw new RequestFailedError(url, opts, response, body)
|
||||
|
@ -178,13 +178,14 @@ function parseOpts(opts) {
|
|||
|
||||
const abortController = new AbortController()
|
||||
fetchOpts.signal = abortController.signal
|
||||
let detachSignal = () => {}
|
||||
if (opts.signal) {
|
||||
abortOnSignal(abortController, opts.signal)
|
||||
detachSignal = abortOnSignal(abortController, opts.signal)
|
||||
}
|
||||
if (opts.body instanceof Readable) {
|
||||
abortOnDestroyedRequest(abortController, fetchOpts.body)
|
||||
}
|
||||
return { fetchOpts, abortController }
|
||||
return { fetchOpts, abortController, detachSignal }
|
||||
}
|
||||
|
||||
function setupJsonBody(fetchOpts, json) {
|
||||
|
@ -208,6 +209,9 @@ function abortOnSignal(abortController, signal) {
|
|||
abortController.abort(signal.reason)
|
||||
}
|
||||
signal.addEventListener('abort', listener)
|
||||
return () => {
|
||||
signal.removeEventListener('abort', listener)
|
||||
}
|
||||
}
|
||||
|
||||
function abortOnDestroyedRequest(abortController, stream) {
|
||||
|
@ -226,11 +230,12 @@ function abortOnDestroyedResponse(abortController, response) {
|
|||
})
|
||||
}
|
||||
|
||||
async function performRequest(url, fetchOpts) {
|
||||
async function performRequest(url, fetchOpts, detachSignal) {
|
||||
let response
|
||||
try {
|
||||
response = await fetch(url, fetchOpts)
|
||||
} catch (err) {
|
||||
detachSignal()
|
||||
if (fetchOpts.body instanceof Readable) {
|
||||
fetchOpts.body.destroy()
|
||||
}
|
||||
|
@ -239,6 +244,7 @@ async function performRequest(url, fetchOpts) {
|
|||
method: fetchOpts.method ?? 'GET',
|
||||
})
|
||||
}
|
||||
response.body.on('close', detachSignal)
|
||||
if (fetchOpts.body instanceof Readable) {
|
||||
response.body.on('close', () => {
|
||||
if (!fetchOpts.body.readableEnded) {
|
||||
|
|
|
@ -1,6 +1,9 @@
|
|||
const { expect } = require('chai')
|
||||
const fs = require('node:fs')
|
||||
const events = require('node:events')
|
||||
const { FetchError, AbortError } = require('node-fetch')
|
||||
const { Readable } = require('node:stream')
|
||||
const { pipeline } = require('node:stream/promises')
|
||||
const { once } = require('node:events')
|
||||
const { TestServer } = require('./helpers/TestServer')
|
||||
const selfsigned = require('selfsigned')
|
||||
|
@ -203,6 +206,31 @@ describe('fetch-utils', function () {
|
|||
).to.be.rejectedWith(AbortError)
|
||||
expect(stream.destroyed).to.be.true
|
||||
})
|
||||
|
||||
it('detaches from signal on success', async function () {
|
||||
const signal = AbortSignal.timeout(10_000)
|
||||
for (let i = 0; i < 20; i++) {
|
||||
const s = await fetchStream(this.url('/hello'), { signal })
|
||||
expect(events.getEventListeners(signal, 'abort')).to.have.length(1)
|
||||
await pipeline(s, fs.createWriteStream('/dev/null'))
|
||||
expect(events.getEventListeners(signal, 'abort')).to.have.length(0)
|
||||
}
|
||||
})
|
||||
|
||||
it('detaches from signal on error', async function () {
|
||||
const signal = AbortSignal.timeout(10_000)
|
||||
for (let i = 0; i < 20; i++) {
|
||||
try {
|
||||
await fetchStream(this.url('/500'), { signal })
|
||||
} catch (err) {
|
||||
if (err instanceof RequestFailedError && err.response.status === 500)
|
||||
continue
|
||||
throw err
|
||||
} finally {
|
||||
expect(events.getEventListeners(signal, 'abort')).to.have.length(0)
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
describe('fetchNothing', function () {
|
||||
|
@ -391,9 +419,16 @@ async function* infiniteIterator() {
|
|||
async function abortOnceReceived(func, server) {
|
||||
const controller = new AbortController()
|
||||
const promise = func(controller.signal)
|
||||
expect(events.getEventListeners(controller.signal, 'abort')).to.have.length(1)
|
||||
await once(server.events, 'request-received')
|
||||
controller.abort()
|
||||
return await promise
|
||||
try {
|
||||
return await promise
|
||||
} finally {
|
||||
expect(events.getEventListeners(controller.signal, 'abort')).to.have.length(
|
||||
0
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
async function expectRequestAborted(req) {
|
||||
|
|
|
@ -7,4 +7,4 @@ logger
|
|||
--is-library=True
|
||||
--node-version=20.18.2
|
||||
--public-repo=False
|
||||
--script-version=4.5.0
|
||||
--script-version=4.7.0
|
||||
|
|
|
@ -7,4 +7,4 @@ metrics
|
|||
--is-library=True
|
||||
--node-version=20.18.2
|
||||
--public-repo=False
|
||||
--script-version=4.5.0
|
||||
--script-version=4.7.0
|
||||
|
|
|
@ -7,4 +7,4 @@ mongo-utils
|
|||
--is-library=True
|
||||
--node-version=20.18.2
|
||||
--public-repo=False
|
||||
--script-version=4.5.0
|
||||
--script-version=4.7.0
|
||||
|
|
|
@ -7,4 +7,4 @@ o-error
|
|||
--is-library=True
|
||||
--node-version=20.18.2
|
||||
--public-repo=False
|
||||
--script-version=4.5.0
|
||||
--script-version=4.7.0
|
||||
|
|
|
@ -7,4 +7,4 @@ object-persistor
|
|||
--is-library=True
|
||||
--node-version=20.18.2
|
||||
--public-repo=False
|
||||
--script-version=4.5.0
|
||||
--script-version=4.7.0
|
||||
|
|
|
@ -7,4 +7,4 @@ overleaf-editor-core
|
|||
--is-library=True
|
||||
--node-version=20.18.2
|
||||
--public-repo=False
|
||||
--script-version=4.5.0
|
||||
--script-version=4.7.0
|
||||
|
|
|
@ -7,4 +7,4 @@ promise-utils
|
|||
--is-library=True
|
||||
--node-version=20.18.2
|
||||
--public-repo=False
|
||||
--script-version=4.5.0
|
||||
--script-version=4.7.0
|
||||
|
|
|
@ -7,4 +7,4 @@ ranges-tracker
|
|||
--is-library=True
|
||||
--node-version=20.18.2
|
||||
--public-repo=False
|
||||
--script-version=4.5.0
|
||||
--script-version=4.7.0
|
||||
|
|
|
@ -7,4 +7,4 @@ redis-wrapper
|
|||
--is-library=True
|
||||
--node-version=20.18.2
|
||||
--public-repo=False
|
||||
--script-version=4.5.0
|
||||
--script-version=4.7.0
|
||||
|
|
|
@ -7,4 +7,4 @@ settings
|
|||
--is-library=True
|
||||
--node-version=20.18.2
|
||||
--public-repo=False
|
||||
--script-version=4.5.0
|
||||
--script-version=4.7.0
|
||||
|
|
|
@ -7,4 +7,4 @@ stream-utils
|
|||
--is-library=True
|
||||
--node-version=20.18.2
|
||||
--public-repo=False
|
||||
--script-version=4.5.0
|
||||
--script-version=4.7.0
|
||||
|
|
5695
package-lock.json
generated
5695
package-lock.json
generated
File diff suppressed because it is too large
Load diff
|
@ -55,6 +55,7 @@
|
|||
"services/analytics",
|
||||
"services/chat",
|
||||
"services/clsi",
|
||||
"services/clsi-cache",
|
||||
"services/clsi-perf",
|
||||
"services/contacts",
|
||||
"services/docstore",
|
||||
|
|
22
patches/pdfjs-dist+5.1.91.patch
Normal file
22
patches/pdfjs-dist+5.1.91.patch
Normal file
|
@ -0,0 +1,22 @@
|
|||
diff --git a/node_modules/pdfjs-dist/build/pdf.worker.mjs b/node_modules/pdfjs-dist/build/pdf.worker.mjs
|
||||
index 6c5c6f1..bb6b7d1 100644
|
||||
--- a/node_modules/pdfjs-dist/build/pdf.worker.mjs
|
||||
+++ b/node_modules/pdfjs-dist/build/pdf.worker.mjs
|
||||
@@ -1830,7 +1830,7 @@ async function __wbg_init(module_or_path) {
|
||||
}
|
||||
}
|
||||
if (typeof module_or_path === 'undefined') {
|
||||
- module_or_path = new URL('qcms_bg.wasm', import.meta.url);
|
||||
+ module_or_path = new URL(/* webpackIgnore: true */ 'qcms_bg.wasm', import.meta.url);
|
||||
}
|
||||
const imports = __wbg_get_imports();
|
||||
if (typeof module_or_path === 'string' || typeof Request === 'function' && module_or_path instanceof Request || typeof URL === 'function' && module_or_path instanceof URL) {
|
||||
@@ -5358,7 +5358,7 @@ var OpenJPEG = (() => {
|
||||
if (Module["locateFile"]) {
|
||||
return locateFile("openjpeg.wasm");
|
||||
}
|
||||
- return new URL("openjpeg.wasm", import.meta.url).href;
|
||||
+ return new URL(/* webpackIgnore: true */ "openjpeg.wasm", import.meta.url).href;
|
||||
}
|
||||
function getBinarySync(file) {
|
||||
if (file == wasmBinaryFile && wasmBinary) {
|
|
@ -45,5 +45,17 @@
|
|||
"clusterWorkers": "CLUSTER_WORKERS",
|
||||
"maxFileUploadSize": "MAX_FILE_UPLOAD_SIZE",
|
||||
"httpsOnly": "HTTPS_ONLY",
|
||||
"httpRequestTimeout": "OVERLEAF_HISTORY_V1_HTTP_REQUEST_TIMEOUT"
|
||||
"httpRequestTimeout": "OVERLEAF_HISTORY_V1_HTTP_REQUEST_TIMEOUT",
|
||||
"redis": {
|
||||
"history": {
|
||||
"host": "OVERLEAF_REDIS_HOST",
|
||||
"password": "OVERLEAF_REDIS_PASS",
|
||||
"port": "OVERLEAF_REDIS_PORT"
|
||||
},
|
||||
"lock": {
|
||||
"host": "OVERLEAF_REDIS_HOST",
|
||||
"password": "OVERLEAF_REDIS_PASS",
|
||||
"port": "OVERLEAF_REDIS_PORT"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,7 +9,7 @@ describe('Accounts', function () {
|
|||
it('can log in and out', function () {
|
||||
login('user@example.com')
|
||||
cy.visit('/project')
|
||||
cy.findByText('Account').click()
|
||||
cy.findByRole('menuitem', { name: 'Account' }).click()
|
||||
cy.findByText('Log Out').click()
|
||||
cy.url().should('include', '/login')
|
||||
cy.visit('/project')
|
||||
|
|
|
@ -293,7 +293,7 @@ describe('admin panel', function () {
|
|||
cy.findByText(deletedProjectName).should('not.exist')
|
||||
|
||||
cy.log('navigate to thrashed projects and delete the project')
|
||||
cy.get('.project-list-sidebar-react').within(() => {
|
||||
cy.get('.project-list-sidebar-scroll').within(() => {
|
||||
cy.findByText('Trashed Projects').click()
|
||||
})
|
||||
findProjectRow(deletedProjectName).within(() =>
|
||||
|
@ -318,7 +318,7 @@ describe('admin panel', function () {
|
|||
cy.log('login as the user and verify the project is restored')
|
||||
login(user1)
|
||||
cy.visit('/project')
|
||||
cy.get('.project-list-sidebar-react').within(() => {
|
||||
cy.get('.project-list-sidebar-scroll').within(() => {
|
||||
cy.findByText('Trashed Projects').click()
|
||||
})
|
||||
cy.findByText(`${deletedProjectName} (Restored)`)
|
||||
|
|
|
@ -102,10 +102,6 @@ describe('Project creation and compilation', function () {
|
|||
cy.findByText('Invite not yet accepted.')
|
||||
})
|
||||
|
||||
cy.visit('/project')
|
||||
cy.findByText('Account').click()
|
||||
cy.findByText('Log Out').click()
|
||||
|
||||
login('collaborator@example.com')
|
||||
openProjectViaInviteNotification(targetProjectName)
|
||||
cy.get('@targetProjectId').then(targetProjectId => {
|
||||
|
|
|
@ -131,9 +131,7 @@ const allowedVars = Joi.object(
|
|||
'GIT_BRIDGE_HOST',
|
||||
'GIT_BRIDGE_PORT',
|
||||
'V1_HISTORY_URL',
|
||||
'DOCKER_RUNNER',
|
||||
'SANDBOXED_COMPILES',
|
||||
'SANDBOXED_COMPILES_SIBLING_CONTAINERS',
|
||||
'ALL_TEX_LIVE_DOCKER_IMAGE_NAMES',
|
||||
'OVERLEAF_TEMPLATES_USER_ID',
|
||||
'OVERLEAF_NEW_PROJECT_TEMPLATE_LINKS',
|
||||
|
@ -196,10 +194,7 @@ function setVarsDockerCompose({ pro, vars, version, withDataDir }) {
|
|||
)
|
||||
}
|
||||
|
||||
if (
|
||||
cfg.services.sharelatex.environment
|
||||
.SANDBOXED_COMPILES_SIBLING_CONTAINERS === 'true'
|
||||
) {
|
||||
if (cfg.services.sharelatex.environment.SANDBOXED_COMPILES === 'true') {
|
||||
cfg.services.sharelatex.environment.SANDBOXED_COMPILES_HOST_DIR =
|
||||
PATHS.SANDBOXED_COMPILES_HOST_DIR
|
||||
cfg.services.sharelatex.environment.TEX_LIVE_DOCKER_IMAGE =
|
||||
|
|
|
@ -10,9 +10,7 @@ const LABEL_TEX_LIVE_VERSION = 'TeX Live version'
|
|||
|
||||
describe('SandboxedCompiles', function () {
|
||||
const enabledVars = {
|
||||
DOCKER_RUNNER: 'true',
|
||||
SANDBOXED_COMPILES: 'true',
|
||||
SANDBOXED_COMPILES_SIBLING_CONTAINERS: 'true',
|
||||
ALL_TEX_LIVE_DOCKER_IMAGE_NAMES: '2023,2022',
|
||||
}
|
||||
|
||||
|
|
|
@ -96,12 +96,12 @@ describe('Templates', () => {
|
|||
.parent()
|
||||
.parent()
|
||||
.within(() => cy.get('input[type="checkbox"]').first().check())
|
||||
cy.get('.project-list-sidebar-react').within(() => {
|
||||
cy.get('.project-list-sidebar-scroll').within(() => {
|
||||
cy.findAllByText('New Tag').first().click()
|
||||
})
|
||||
cy.focused().type(tagName)
|
||||
cy.findByText('Create').click()
|
||||
cy.get('.project-list-sidebar-react').within(() => {
|
||||
cy.get('.project-list-sidebar-scroll').within(() => {
|
||||
cy.findByText(tagName)
|
||||
.parent()
|
||||
.within(() => cy.get('.name').should('have.text', `${tagName} (1)`))
|
||||
|
|
|
@ -6,4 +6,4 @@ chat
|
|||
--esmock-loader=False
|
||||
--node-version=20.18.2
|
||||
--public-repo=False
|
||||
--script-version=4.5.0
|
||||
--script-version=4.7.0
|
||||
|
|
|
@ -20,6 +20,7 @@ The CLSI can be configured through the following environment variables:
|
|||
* `CATCH_ERRORS` - Set to `true` to log uncaught exceptions
|
||||
* `COMPILE_GROUP_DOCKER_CONFIGS` - JSON string of Docker configs for compile groups
|
||||
* `COMPILES_HOST_DIR` - Working directory for LaTeX compiles
|
||||
* `OUTPUT_HOST_DIR` - Output directory for LaTeX compiles
|
||||
* `COMPILE_SIZE_LIMIT` - Sets the body-parser [limit](https://github.com/expressjs/body-parser#limit)
|
||||
* `DOCKER_RUNNER` - Set to true to use sibling containers
|
||||
* `DOCKER_RUNTIME` -
|
||||
|
|
|
@ -258,6 +258,8 @@ app.use(function (error, req, res, next) {
|
|||
if (error instanceof Errors.NotFoundError) {
|
||||
logger.debug({ err: error, url: req.url }, 'not found error')
|
||||
res.sendStatus(404)
|
||||
} else if (error instanceof Errors.InvalidParameter) {
|
||||
res.status(400).send(error.message)
|
||||
} else if (error.code === 'EPIPE') {
|
||||
// inspect container returns EPIPE when shutting down
|
||||
res.sendStatus(503) // send 503 Unavailable response
|
||||
|
|
276
services/clsi/app/js/CLSICacheHandler.js
Normal file
276
services/clsi/app/js/CLSICacheHandler.js
Normal file
|
@ -0,0 +1,276 @@
|
|||
const crypto = require('node:crypto')
|
||||
const fs = require('node:fs')
|
||||
const Path = require('node:path')
|
||||
const { pipeline } = require('node:stream/promises')
|
||||
const { createGzip, createGunzip } = require('node:zlib')
|
||||
const tarFs = require('tar-fs')
|
||||
const _ = require('lodash')
|
||||
const {
|
||||
fetchNothing,
|
||||
fetchStream,
|
||||
RequestFailedError,
|
||||
} = require('@overleaf/fetch-utils')
|
||||
const logger = require('@overleaf/logger')
|
||||
const Metrics = require('@overleaf/metrics')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const { CACHE_SUBDIR } = require('./OutputCacheManager')
|
||||
const { isExtraneousFile } = require('./ResourceWriter')
|
||||
|
||||
const TIMING_BUCKETS = [
|
||||
0, 10, 100, 1000, 2000, 5000, 10000, 15000, 20000, 30000,
|
||||
]
|
||||
const MAX_ENTRIES_IN_OUTPUT_TAR = 100
|
||||
|
||||
/**
|
||||
* @param {string} projectId
|
||||
* @param {string} userId
|
||||
* @param {string} buildId
|
||||
* @param {string} editorId
|
||||
* @param {[{path: string}]} outputFiles
|
||||
* @param {string} compileGroup
|
||||
* @param {Record<string, any>} options
|
||||
*/
|
||||
function notifyCLSICacheAboutBuild({
|
||||
projectId,
|
||||
userId,
|
||||
buildId,
|
||||
editorId,
|
||||
outputFiles,
|
||||
compileGroup,
|
||||
options,
|
||||
}) {
|
||||
if (!Settings.apis.clsiCache.enabled) return
|
||||
|
||||
/**
|
||||
* @param {[{path: string}]} files
|
||||
*/
|
||||
const enqueue = files => {
|
||||
Metrics.count('clsi_cache_enqueue_files', files.length)
|
||||
fetchNothing(`${Settings.apis.clsiCache.url}/enqueue`, {
|
||||
method: 'POST',
|
||||
json: {
|
||||
projectId,
|
||||
userId,
|
||||
buildId,
|
||||
editorId,
|
||||
files,
|
||||
downloadHost: Settings.apis.clsi.downloadHost,
|
||||
clsiServerId: Settings.apis.clsi.clsiServerId,
|
||||
compileGroup,
|
||||
options,
|
||||
},
|
||||
signal: AbortSignal.timeout(15_000),
|
||||
}).catch(err => {
|
||||
logger.warn(
|
||||
{ err, projectId, userId, buildId },
|
||||
'enqueue for clsi cache failed'
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
// PDF preview
|
||||
enqueue(
|
||||
outputFiles
|
||||
.filter(
|
||||
f =>
|
||||
f.path === 'output.pdf' ||
|
||||
f.path === 'output.log' ||
|
||||
f.path === 'output.synctex.gz' ||
|
||||
f.path.endsWith('.blg')
|
||||
)
|
||||
.map(f => {
|
||||
if (f.path === 'output.pdf') {
|
||||
return _.pick(f, 'path', 'size', 'contentId', 'ranges')
|
||||
}
|
||||
return _.pick(f, 'path')
|
||||
})
|
||||
)
|
||||
|
||||
// Compile Cache
|
||||
buildTarball({ projectId, userId, buildId, outputFiles })
|
||||
.then(() => {
|
||||
enqueue([{ path: 'output.tar.gz' }])
|
||||
})
|
||||
.catch(err => {
|
||||
logger.warn(
|
||||
{ err, projectId, userId, buildId },
|
||||
'build output.tar.gz for clsi cache failed'
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} projectId
|
||||
* @param {string} userId
|
||||
* @param {string} buildId
|
||||
* @param {[{path: string}]} outputFiles
|
||||
* @return {Promise<void>}
|
||||
*/
|
||||
async function buildTarball({ projectId, userId, buildId, outputFiles }) {
|
||||
const timer = new Metrics.Timer('clsi_cache_build', 1, {}, TIMING_BUCKETS)
|
||||
const outputDir = Path.join(
|
||||
Settings.path.outputDir,
|
||||
userId ? `${projectId}-${userId}` : projectId,
|
||||
CACHE_SUBDIR,
|
||||
buildId
|
||||
)
|
||||
|
||||
const files = outputFiles.filter(f => !isExtraneousFile(f.path))
|
||||
if (files.length > MAX_ENTRIES_IN_OUTPUT_TAR) {
|
||||
Metrics.inc('clsi_cache_build_too_many_entries')
|
||||
throw new Error('too many output files for output.tar.gz')
|
||||
}
|
||||
Metrics.count('clsi_cache_build_files', files.length)
|
||||
|
||||
const path = Path.join(outputDir, 'output.tar.gz')
|
||||
try {
|
||||
await pipeline(
|
||||
tarFs.pack(outputDir, { entries: files.map(f => f.path) }),
|
||||
createGzip(),
|
||||
fs.createWriteStream(path)
|
||||
)
|
||||
} catch (err) {
|
||||
try {
|
||||
await fs.promises.unlink(path)
|
||||
} catch (e) {}
|
||||
throw err
|
||||
} finally {
|
||||
timer.done()
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} projectId
|
||||
* @param {string} userId
|
||||
* @param {string} editorId
|
||||
* @param {string} buildId
|
||||
* @param {string} outputDir
|
||||
* @return {Promise<boolean>}
|
||||
*/
|
||||
async function downloadOutputDotSynctexFromCompileCache(
|
||||
projectId,
|
||||
userId,
|
||||
editorId,
|
||||
buildId,
|
||||
outputDir
|
||||
) {
|
||||
if (!Settings.apis.clsiCache.enabled) return false
|
||||
|
||||
const timer = new Metrics.Timer(
|
||||
'clsi_cache_download',
|
||||
1,
|
||||
{ method: 'synctex' },
|
||||
TIMING_BUCKETS
|
||||
)
|
||||
let stream
|
||||
try {
|
||||
stream = await fetchStream(
|
||||
`${Settings.apis.clsiCache.url}/project/${projectId}/${
|
||||
userId ? `user/${userId}/` : ''
|
||||
}build/${editorId}-${buildId}/search/output/output.synctex.gz`,
|
||||
{
|
||||
method: 'GET',
|
||||
signal: AbortSignal.timeout(10_000),
|
||||
}
|
||||
)
|
||||
} catch (err) {
|
||||
if (err instanceof RequestFailedError && err.response.status === 404) {
|
||||
timer.done({ status: 'not-found' })
|
||||
return false
|
||||
}
|
||||
timer.done({ status: 'error' })
|
||||
throw err
|
||||
}
|
||||
await fs.promises.mkdir(outputDir, { recursive: true })
|
||||
const dst = Path.join(outputDir, 'output.synctex.gz')
|
||||
const tmp = dst + crypto.randomUUID()
|
||||
try {
|
||||
await pipeline(stream, fs.createWriteStream(tmp))
|
||||
await fs.promises.rename(tmp, dst)
|
||||
} catch (err) {
|
||||
try {
|
||||
await fs.promises.unlink(tmp)
|
||||
} catch {}
|
||||
throw err
|
||||
}
|
||||
timer.done({ status: 'success' })
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} projectId
|
||||
* @param {string} userId
|
||||
* @param {string} compileDir
|
||||
* @return {Promise<boolean>}
|
||||
*/
|
||||
async function downloadLatestCompileCache(projectId, userId, compileDir) {
|
||||
if (!Settings.apis.clsiCache.enabled) return false
|
||||
|
||||
const url = `${Settings.apis.clsiCache.url}/project/${projectId}/${
|
||||
userId ? `user/${userId}/` : ''
|
||||
}latest/output/output.tar.gz`
|
||||
const timer = new Metrics.Timer(
|
||||
'clsi_cache_download',
|
||||
1,
|
||||
{ method: 'tar' },
|
||||
TIMING_BUCKETS
|
||||
)
|
||||
let stream
|
||||
try {
|
||||
stream = await fetchStream(url, {
|
||||
method: 'GET',
|
||||
signal: AbortSignal.timeout(10_000),
|
||||
})
|
||||
} catch (err) {
|
||||
if (err instanceof RequestFailedError && err.response.status === 404) {
|
||||
timer.done({ status: 'not-found' })
|
||||
return false
|
||||
}
|
||||
timer.done({ status: 'error' })
|
||||
throw err
|
||||
}
|
||||
let n = 0
|
||||
let abort = false
|
||||
await pipeline(
|
||||
stream,
|
||||
createGunzip(),
|
||||
tarFs.extract(compileDir, {
|
||||
// use ignore hook for counting entries (files+folders) and validation.
|
||||
// Include folders as they incur mkdir calls.
|
||||
ignore(_, header) {
|
||||
if (abort) return true // log once
|
||||
n++
|
||||
if (n > MAX_ENTRIES_IN_OUTPUT_TAR) {
|
||||
abort = true
|
||||
logger.warn(
|
||||
{
|
||||
url,
|
||||
compileDir,
|
||||
},
|
||||
'too many entries in tar-ball from clsi-cache'
|
||||
)
|
||||
} else if (header.type !== 'file' && header.type !== 'directory') {
|
||||
abort = true
|
||||
logger.warn(
|
||||
{
|
||||
url,
|
||||
compileDir,
|
||||
entryType: header.type,
|
||||
},
|
||||
'unexpected entry in tar-ball from clsi-cache'
|
||||
)
|
||||
}
|
||||
return abort
|
||||
},
|
||||
})
|
||||
)
|
||||
Metrics.count('clsi_cache_download_entries', n)
|
||||
timer.done({ status: 'success' })
|
||||
return !abort
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
notifyCLSICacheAboutBuild,
|
||||
downloadLatestCompileCache,
|
||||
downloadOutputDotSynctexFromCompileCache,
|
||||
}
|
|
@ -1,3 +1,4 @@
|
|||
const Path = require('node:path')
|
||||
const RequestParser = require('./RequestParser')
|
||||
const CompileManager = require('./CompileManager')
|
||||
const Settings = require('@overleaf/settings')
|
||||
|
@ -5,6 +6,7 @@ const Metrics = require('./Metrics')
|
|||
const ProjectPersistenceManager = require('./ProjectPersistenceManager')
|
||||
const logger = require('@overleaf/logger')
|
||||
const Errors = require('./Errors')
|
||||
const { notifyCLSICacheAboutBuild } = require('./CLSICacheHandler')
|
||||
|
||||
let lastSuccessfulCompileTimestamp = 0
|
||||
|
||||
|
@ -29,100 +31,133 @@ function compile(req, res, next) {
|
|||
if (error) {
|
||||
return next(error)
|
||||
}
|
||||
CompileManager.doCompileWithLock(request, (error, result) => {
|
||||
let { buildId, outputFiles, stats, timings } = result || {}
|
||||
let code, status
|
||||
if (outputFiles == null) {
|
||||
outputFiles = []
|
||||
}
|
||||
if (error instanceof Errors.AlreadyCompilingError) {
|
||||
code = 423 // Http 423 Locked
|
||||
status = 'compile-in-progress'
|
||||
} else if (error instanceof Errors.FilesOutOfSyncError) {
|
||||
code = 409 // Http 409 Conflict
|
||||
status = 'retry'
|
||||
logger.warn(
|
||||
{
|
||||
const stats = {}
|
||||
const timings = {}
|
||||
CompileManager.doCompileWithLock(
|
||||
request,
|
||||
stats,
|
||||
timings,
|
||||
(error, result) => {
|
||||
let { buildId, outputFiles } = result || {}
|
||||
let code, status
|
||||
if (outputFiles == null) {
|
||||
outputFiles = []
|
||||
}
|
||||
if (error instanceof Errors.AlreadyCompilingError) {
|
||||
code = 423 // Http 423 Locked
|
||||
status = 'compile-in-progress'
|
||||
} else if (error instanceof Errors.FilesOutOfSyncError) {
|
||||
code = 409 // Http 409 Conflict
|
||||
status = 'retry'
|
||||
logger.warn(
|
||||
{
|
||||
projectId: request.project_id,
|
||||
userId: request.user_id,
|
||||
},
|
||||
'files out of sync, please retry'
|
||||
)
|
||||
} else if (
|
||||
error?.code === 'EPIPE' ||
|
||||
error instanceof Errors.TooManyCompileRequestsError
|
||||
) {
|
||||
// docker returns EPIPE when shutting down
|
||||
code = 503 // send 503 Unavailable response
|
||||
status = 'unavailable'
|
||||
} else if (error?.terminated) {
|
||||
status = 'terminated'
|
||||
} else if (error?.validate) {
|
||||
status = `validation-${error.validate}`
|
||||
} else if (error?.timedout) {
|
||||
status = 'timedout'
|
||||
logger.debug(
|
||||
{ err: error, projectId: request.project_id },
|
||||
'timeout running compile'
|
||||
)
|
||||
} else if (error) {
|
||||
status = 'error'
|
||||
code = 500
|
||||
logger.error(
|
||||
{ err: error, projectId: request.project_id },
|
||||
'error running compile'
|
||||
)
|
||||
} else {
|
||||
if (
|
||||
outputFiles.some(
|
||||
file => file.path === 'output.pdf' && file.size > 0
|
||||
)
|
||||
) {
|
||||
status = 'success'
|
||||
lastSuccessfulCompileTimestamp = Date.now()
|
||||
} else if (request.stopOnFirstError) {
|
||||
status = 'stopped-on-first-error'
|
||||
} else {
|
||||
status = 'failure'
|
||||
logger.warn(
|
||||
{ projectId: request.project_id, outputFiles },
|
||||
'project failed to compile successfully, no output.pdf generated'
|
||||
)
|
||||
}
|
||||
|
||||
// log an error if any core files are found
|
||||
if (outputFiles.some(file => file.path === 'core')) {
|
||||
logger.error(
|
||||
{ projectId: request.project_id, req, outputFiles },
|
||||
'core file found in output'
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
if (error) {
|
||||
outputFiles = error.outputFiles || []
|
||||
buildId = error.buildId
|
||||
}
|
||||
|
||||
if (
|
||||
status === 'success' &&
|
||||
request.editorId &&
|
||||
request.populateClsiCache
|
||||
) {
|
||||
notifyCLSICacheAboutBuild({
|
||||
projectId: request.project_id,
|
||||
userId: request.user_id,
|
||||
buildId: outputFiles[0].build,
|
||||
editorId: request.editorId,
|
||||
outputFiles,
|
||||
compileGroup: request.compileGroup,
|
||||
options: {
|
||||
compiler: request.compiler,
|
||||
draft: request.draft,
|
||||
imageName: request.imageName
|
||||
? Path.basename(request.imageName)
|
||||
: undefined,
|
||||
rootResourcePath: request.rootResourcePath,
|
||||
stopOnFirstError: request.stopOnFirstError,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
timer.done()
|
||||
res.status(code || 200).send({
|
||||
compile: {
|
||||
status,
|
||||
error: error?.message || error,
|
||||
stats,
|
||||
timings,
|
||||
buildId,
|
||||
outputUrlPrefix: Settings.apis.clsi.outputUrlPrefix,
|
||||
outputFiles: outputFiles.map(file => ({
|
||||
url:
|
||||
`${Settings.apis.clsi.url}/project/${request.project_id}` +
|
||||
(request.user_id != null
|
||||
? `/user/${request.user_id}`
|
||||
: '') +
|
||||
`/build/${file.build}/output/${file.path}`,
|
||||
...file,
|
||||
})),
|
||||
},
|
||||
'files out of sync, please retry'
|
||||
)
|
||||
} else if (
|
||||
error?.code === 'EPIPE' ||
|
||||
error instanceof Errors.TooManyCompileRequestsError
|
||||
) {
|
||||
// docker returns EPIPE when shutting down
|
||||
code = 503 // send 503 Unavailable response
|
||||
status = 'unavailable'
|
||||
} else if (error?.terminated) {
|
||||
status = 'terminated'
|
||||
} else if (error?.validate) {
|
||||
status = `validation-${error.validate}`
|
||||
} else if (error?.timedout) {
|
||||
status = 'timedout'
|
||||
logger.debug(
|
||||
{ err: error, projectId: request.project_id },
|
||||
'timeout running compile'
|
||||
)
|
||||
} else if (error) {
|
||||
status = 'error'
|
||||
code = 500
|
||||
logger.error(
|
||||
{ err: error, projectId: request.project_id },
|
||||
'error running compile'
|
||||
)
|
||||
} else {
|
||||
if (
|
||||
outputFiles.some(
|
||||
file => file.path === 'output.pdf' && file.size > 0
|
||||
)
|
||||
) {
|
||||
status = 'success'
|
||||
lastSuccessfulCompileTimestamp = Date.now()
|
||||
} else if (request.stopOnFirstError) {
|
||||
status = 'stopped-on-first-error'
|
||||
} else {
|
||||
status = 'failure'
|
||||
logger.warn(
|
||||
{ projectId: request.project_id, outputFiles },
|
||||
'project failed to compile successfully, no output.pdf generated'
|
||||
)
|
||||
}
|
||||
|
||||
// log an error if any core files are found
|
||||
if (outputFiles.some(file => file.path === 'core')) {
|
||||
logger.error(
|
||||
{ projectId: request.project_id, req, outputFiles },
|
||||
'core file found in output'
|
||||
)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
if (error) {
|
||||
outputFiles = error.outputFiles || []
|
||||
buildId = error.buildId
|
||||
}
|
||||
|
||||
timer.done()
|
||||
res.status(code || 200).send({
|
||||
compile: {
|
||||
status,
|
||||
error: error?.message || error,
|
||||
stats,
|
||||
timings,
|
||||
buildId,
|
||||
outputUrlPrefix: Settings.apis.clsi.outputUrlPrefix,
|
||||
outputFiles: outputFiles.map(file => ({
|
||||
url:
|
||||
`${Settings.apis.clsi.url}/project/${request.project_id}` +
|
||||
(request.user_id != null ? `/user/${request.user_id}` : '') +
|
||||
`/build/${file.build}/output/${file.path}`,
|
||||
...file,
|
||||
})),
|
||||
},
|
||||
})
|
||||
})
|
||||
)
|
||||
}
|
||||
)
|
||||
})
|
||||
|
@ -153,24 +188,19 @@ function clearCache(req, res, next) {
|
|||
}
|
||||
|
||||
function syncFromCode(req, res, next) {
|
||||
const { file } = req.query
|
||||
const { file, editorId, buildId, compileFromClsiCache } = req.query
|
||||
const line = parseInt(req.query.line, 10)
|
||||
const column = parseInt(req.query.column, 10)
|
||||
const { imageName } = req.query
|
||||
const projectId = req.params.project_id
|
||||
const userId = req.params.user_id
|
||||
|
||||
if (imageName && !_isImageNameAllowed(imageName)) {
|
||||
return res.status(400).send('invalid image')
|
||||
}
|
||||
|
||||
CompileManager.syncFromCode(
|
||||
projectId,
|
||||
userId,
|
||||
file,
|
||||
line,
|
||||
column,
|
||||
imageName,
|
||||
{ imageName, editorId, buildId, compileFromClsiCache },
|
||||
function (error, pdfPositions) {
|
||||
if (error) {
|
||||
return next(error)
|
||||
|
@ -186,20 +216,16 @@ function syncFromPdf(req, res, next) {
|
|||
const page = parseInt(req.query.page, 10)
|
||||
const h = parseFloat(req.query.h)
|
||||
const v = parseFloat(req.query.v)
|
||||
const { imageName } = req.query
|
||||
const { imageName, editorId, buildId, compileFromClsiCache } = req.query
|
||||
const projectId = req.params.project_id
|
||||
const userId = req.params.user_id
|
||||
|
||||
if (imageName && !_isImageNameAllowed(imageName)) {
|
||||
return res.status(400).send('invalid image')
|
||||
}
|
||||
CompileManager.syncFromPdf(
|
||||
projectId,
|
||||
userId,
|
||||
page,
|
||||
h,
|
||||
v,
|
||||
imageName,
|
||||
{ imageName, editorId, buildId, compileFromClsiCache },
|
||||
function (error, codePositions) {
|
||||
if (error) {
|
||||
return next(error)
|
||||
|
@ -216,9 +242,6 @@ function wordcount(req, res, next) {
|
|||
const projectId = req.params.project_id
|
||||
const userId = req.params.user_id
|
||||
const { image } = req.query
|
||||
if (image && !_isImageNameAllowed(image)) {
|
||||
return res.status(400).send('invalid image')
|
||||
}
|
||||
logger.debug({ image, file, projectId }, 'word count request')
|
||||
|
||||
CompileManager.wordcount(
|
||||
|
@ -241,12 +264,6 @@ function status(req, res, next) {
|
|||
res.send('OK')
|
||||
}
|
||||
|
||||
function _isImageNameAllowed(imageName) {
|
||||
const ALLOWED_IMAGES =
|
||||
Settings.clsi && Settings.clsi.docker && Settings.clsi.docker.allowedImages
|
||||
return !ALLOWED_IMAGES || ALLOWED_IMAGES.includes(imageName)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
compile,
|
||||
stopCompile,
|
||||
|
|
|
@ -19,6 +19,10 @@ const Errors = require('./Errors')
|
|||
const CommandRunner = require('./CommandRunner')
|
||||
const { emitPdfStats } = require('./ContentCacheMetrics')
|
||||
const SynctexOutputParser = require('./SynctexOutputParser')
|
||||
const {
|
||||
downloadLatestCompileCache,
|
||||
downloadOutputDotSynctexFromCompileCache,
|
||||
} = require('./CLSICacheHandler')
|
||||
|
||||
const COMPILE_TIME_BUCKETS = [
|
||||
// NOTE: These buckets are locked in per metric name.
|
||||
|
@ -42,22 +46,22 @@ function getOutputDir(projectId, userId) {
|
|||
return Path.join(Settings.path.outputDir, getCompileName(projectId, userId))
|
||||
}
|
||||
|
||||
async function doCompileWithLock(request) {
|
||||
async function doCompileWithLock(request, stats, timings) {
|
||||
const compileDir = getCompileDir(request.project_id, request.user_id)
|
||||
await fsPromises.mkdir(compileDir, { recursive: true })
|
||||
request.isInitialCompile =
|
||||
(await fsPromises.mkdir(compileDir, { recursive: true })) === compileDir
|
||||
// prevent simultaneous compiles
|
||||
const lock = LockManager.acquire(compileDir)
|
||||
try {
|
||||
return await doCompile(request)
|
||||
return await doCompile(request, stats, timings)
|
||||
} finally {
|
||||
lock.release()
|
||||
}
|
||||
}
|
||||
|
||||
async function doCompile(request) {
|
||||
async function doCompile(request, stats, timings) {
|
||||
const { project_id: projectId, user_id: userId } = request
|
||||
const compileDir = getCompileDir(request.project_id, request.user_id)
|
||||
const stats = {}
|
||||
const timings = {}
|
||||
|
||||
const timerE2E = new Metrics.Timer(
|
||||
'compile-e2e-v2',
|
||||
|
@ -65,6 +69,25 @@ async function doCompile(request) {
|
|||
request.metricsOpts,
|
||||
COMPILE_TIME_BUCKETS
|
||||
)
|
||||
if (request.isInitialCompile) {
|
||||
stats.isInitialCompile = 1
|
||||
request.metricsOpts.compile = 'initial'
|
||||
if (request.compileFromClsiCache) {
|
||||
try {
|
||||
if (await downloadLatestCompileCache(projectId, userId, compileDir)) {
|
||||
stats.restoredClsiCache = 1
|
||||
request.metricsOpts.compile = 'from-clsi-cache'
|
||||
}
|
||||
} catch (err) {
|
||||
logger.warn(
|
||||
{ err, projectId, userId },
|
||||
'failed to populate compile dir from cache'
|
||||
)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
request.metricsOpts.compile = 'recompile'
|
||||
}
|
||||
const writeToDiskTimer = new Metrics.Timer(
|
||||
'write-to-disk',
|
||||
1,
|
||||
|
@ -296,7 +319,7 @@ async function doCompile(request) {
|
|||
emitPdfStats(stats, timings, request)
|
||||
}
|
||||
|
||||
return { outputFiles, stats, timings, buildId }
|
||||
return { outputFiles, buildId }
|
||||
}
|
||||
|
||||
async function _saveOutputFiles({
|
||||
|
@ -408,14 +431,7 @@ async function _checkDirectory(compileDir) {
|
|||
return true
|
||||
}
|
||||
|
||||
async function syncFromCode(
|
||||
projectId,
|
||||
userId,
|
||||
filename,
|
||||
line,
|
||||
column,
|
||||
imageName
|
||||
) {
|
||||
async function syncFromCode(projectId, userId, filename, line, column, opts) {
|
||||
// If LaTeX was run in a virtual environment, the file path that synctex expects
|
||||
// might not match the file path on the host. The .synctex.gz file however, will be accessed
|
||||
// wherever it is on the host.
|
||||
|
@ -431,7 +447,7 @@ async function syncFromCode(
|
|||
'-o',
|
||||
outputFilePath,
|
||||
]
|
||||
const stdout = await _runSynctex(projectId, userId, command, imageName)
|
||||
const stdout = await _runSynctex(projectId, userId, command, opts)
|
||||
logger.debug(
|
||||
{ projectId, userId, filename, line, column, command, stdout },
|
||||
'synctex code output'
|
||||
|
@ -439,7 +455,7 @@ async function syncFromCode(
|
|||
return SynctexOutputParser.parseViewOutput(stdout)
|
||||
}
|
||||
|
||||
async function syncFromPdf(projectId, userId, page, h, v, imageName) {
|
||||
async function syncFromPdf(projectId, userId, page, h, v, opts) {
|
||||
const compileName = getCompileName(projectId, userId)
|
||||
const baseDir = Settings.path.synctexBaseDir(compileName)
|
||||
const outputFilePath = `${baseDir}/output.pdf`
|
||||
|
@ -449,7 +465,7 @@ async function syncFromPdf(projectId, userId, page, h, v, imageName) {
|
|||
'-o',
|
||||
`${page}:${h}:${v}:${outputFilePath}`,
|
||||
]
|
||||
const stdout = await _runSynctex(projectId, userId, command, imageName)
|
||||
const stdout = await _runSynctex(projectId, userId, command, opts)
|
||||
logger.debug({ projectId, userId, page, h, v, stdout }, 'synctex pdf output')
|
||||
return SynctexOutputParser.parseEditOutput(stdout, baseDir)
|
||||
}
|
||||
|
@ -478,32 +494,85 @@ async function _checkFileExists(dir, filename) {
|
|||
}
|
||||
}
|
||||
|
||||
async function _runSynctex(projectId, userId, command, imageName) {
|
||||
const directory = getCompileDir(projectId, userId)
|
||||
async function _runSynctex(projectId, userId, command, opts) {
|
||||
const { imageName, editorId, buildId, compileFromClsiCache } = opts
|
||||
|
||||
if (imageName && !_isImageNameAllowed(imageName)) {
|
||||
throw new Errors.InvalidParameter('invalid image')
|
||||
}
|
||||
if (editorId && !/^[a-f0-9-]+$/.test(editorId)) {
|
||||
throw new Errors.InvalidParameter('invalid editorId')
|
||||
}
|
||||
if (buildId && !OutputCacheManager.BUILD_REGEX.test(buildId)) {
|
||||
throw new Errors.InvalidParameter('invalid buildId')
|
||||
}
|
||||
|
||||
const outputDir = getOutputDir(projectId, userId)
|
||||
const runInOutputDir = buildId && CommandRunner.canRunSyncTeXInOutputDir()
|
||||
|
||||
const directory = runInOutputDir
|
||||
? Path.join(outputDir, OutputCacheManager.CACHE_SUBDIR, buildId)
|
||||
: getCompileDir(projectId, userId)
|
||||
const timeout = 60 * 1000 // increased to allow for large projects
|
||||
const compileName = getCompileName(projectId, userId)
|
||||
const compileGroup = 'synctex'
|
||||
const compileGroup = runInOutputDir ? 'synctex-output' : 'synctex'
|
||||
const defaultImageName =
|
||||
Settings.clsi && Settings.clsi.docker && Settings.clsi.docker.image
|
||||
await _checkFileExists(directory, 'output.synctex.gz')
|
||||
try {
|
||||
const output = await CommandRunner.promises.run(
|
||||
compileName,
|
||||
command,
|
||||
directory,
|
||||
imageName || defaultImageName,
|
||||
timeout,
|
||||
{},
|
||||
compileGroup
|
||||
)
|
||||
return output.stdout
|
||||
} catch (error) {
|
||||
throw OError.tag(error, 'error running synctex', {
|
||||
command,
|
||||
projectId,
|
||||
userId,
|
||||
})
|
||||
}
|
||||
// eslint-disable-next-line @typescript-eslint/return-await
|
||||
return await OutputCacheManager.promises.queueDirOperation(
|
||||
outputDir,
|
||||
/**
|
||||
* @return {Promise<string>}
|
||||
*/
|
||||
async () => {
|
||||
try {
|
||||
await _checkFileExists(directory, 'output.synctex.gz')
|
||||
} catch (err) {
|
||||
if (
|
||||
err instanceof Errors.NotFoundError &&
|
||||
compileFromClsiCache &&
|
||||
editorId &&
|
||||
buildId
|
||||
) {
|
||||
try {
|
||||
await downloadOutputDotSynctexFromCompileCache(
|
||||
projectId,
|
||||
userId,
|
||||
editorId,
|
||||
buildId,
|
||||
directory
|
||||
)
|
||||
} catch (err) {
|
||||
logger.warn(
|
||||
{ err, projectId, userId, editorId, buildId },
|
||||
'failed to download output.synctex.gz from clsi-cache'
|
||||
)
|
||||
}
|
||||
await _checkFileExists(directory, 'output.synctex.gz')
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
try {
|
||||
const output = await CommandRunner.promises.run(
|
||||
compileName,
|
||||
command,
|
||||
directory,
|
||||
imageName || defaultImageName,
|
||||
timeout,
|
||||
{},
|
||||
compileGroup
|
||||
)
|
||||
return output.stdout
|
||||
} catch (error) {
|
||||
throw OError.tag(error, 'error running synctex', {
|
||||
command,
|
||||
projectId,
|
||||
userId,
|
||||
})
|
||||
}
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
async function wordcount(projectId, userId, filename, image) {
|
||||
|
@ -515,6 +584,10 @@ async function wordcount(projectId, userId, filename, image) {
|
|||
const compileName = getCompileName(projectId, userId)
|
||||
const compileGroup = 'wordcount'
|
||||
|
||||
if (image && !_isImageNameAllowed(image)) {
|
||||
throw new Errors.InvalidParameter('invalid image')
|
||||
}
|
||||
|
||||
try {
|
||||
await fsPromises.mkdir(compileDir, { recursive: true })
|
||||
} catch (err) {
|
||||
|
@ -602,6 +675,12 @@ function _parseWordcountFromOutput(output) {
|
|||
return results
|
||||
}
|
||||
|
||||
function _isImageNameAllowed(imageName) {
|
||||
const ALLOWED_IMAGES =
|
||||
Settings.clsi && Settings.clsi.docker && Settings.clsi.docker.allowedImages
|
||||
return !ALLOWED_IMAGES || ALLOWED_IMAGES.includes(imageName)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
doCompileWithLock: callbackify(doCompileWithLock),
|
||||
stopCompile: callbackify(stopCompile),
|
||||
|
|
|
@ -6,21 +6,12 @@ const dockerode = new Docker()
|
|||
const crypto = require('node:crypto')
|
||||
const async = require('async')
|
||||
const LockManager = require('./DockerLockManager')
|
||||
const fs = require('node:fs')
|
||||
const Path = require('node:path')
|
||||
const _ = require('lodash')
|
||||
|
||||
const ONE_HOUR_IN_MS = 60 * 60 * 1000
|
||||
logger.debug('using docker runner')
|
||||
|
||||
function usingSiblingContainers() {
|
||||
return (
|
||||
Settings != null &&
|
||||
Settings.path != null &&
|
||||
Settings.path.sandboxedCompilesHostDir != null
|
||||
)
|
||||
}
|
||||
|
||||
let containerMonitorTimeout
|
||||
let containerMonitorInterval
|
||||
|
||||
|
@ -35,24 +26,6 @@ const DockerRunner = {
|
|||
compileGroup,
|
||||
callback
|
||||
) {
|
||||
if (usingSiblingContainers()) {
|
||||
const _newPath = Settings.path.sandboxedCompilesHostDir
|
||||
logger.debug(
|
||||
{ path: _newPath },
|
||||
'altering bind path for sibling containers'
|
||||
)
|
||||
// Server Pro, example:
|
||||
// '/var/lib/overleaf/data/compiles/<project-id>'
|
||||
// ... becomes ...
|
||||
// '/opt/overleaf_data/data/compiles/<project-id>'
|
||||
directory = Path.join(
|
||||
Settings.path.sandboxedCompilesHostDir,
|
||||
Path.basename(directory)
|
||||
)
|
||||
}
|
||||
|
||||
const volumes = { [directory]: '/compile' }
|
||||
|
||||
command = command.map(arg =>
|
||||
arg.toString().replace('$COMPILE_DIR', '/compile')
|
||||
)
|
||||
|
@ -72,7 +45,32 @@ const DockerRunner = {
|
|||
image = `${Settings.texliveImageNameOveride}/${img[2]}`
|
||||
}
|
||||
|
||||
if (compileGroup === 'synctex' || compileGroup === 'wordcount') {
|
||||
if (compileGroup === 'synctex-output') {
|
||||
// In: directory = '/overleaf/services/clsi/output/projectId-userId/generated-files/buildId'
|
||||
// directory.split('/').slice(-3) === 'projectId-userId/generated-files/buildId'
|
||||
// sandboxedCompilesHostDirOutput = '/host/output'
|
||||
// Out: directory = '/host/output/projectId-userId/generated-files/buildId'
|
||||
directory = Path.join(
|
||||
Settings.path.sandboxedCompilesHostDirOutput,
|
||||
...directory.split('/').slice(-3)
|
||||
)
|
||||
} else {
|
||||
// In: directory = '/overleaf/services/clsi/compiles/projectId-userId'
|
||||
// Path.basename(directory) === 'projectId-userId'
|
||||
// sandboxedCompilesHostDirCompiles = '/host/compiles'
|
||||
// Out: directory = '/host/compiles/projectId-userId'
|
||||
directory = Path.join(
|
||||
Settings.path.sandboxedCompilesHostDirCompiles,
|
||||
Path.basename(directory)
|
||||
)
|
||||
}
|
||||
|
||||
const volumes = { [directory]: '/compile' }
|
||||
if (
|
||||
compileGroup === 'synctex' ||
|
||||
compileGroup === 'synctex-output' ||
|
||||
compileGroup === 'wordcount'
|
||||
) {
|
||||
volumes[directory] += ':ro'
|
||||
}
|
||||
|
||||
|
@ -309,50 +307,17 @@ const DockerRunner = {
|
|||
LockManager.runWithLock(
|
||||
options.name,
|
||||
releaseLock =>
|
||||
// Check that volumes exist before starting the container.
|
||||
// When a container is started with volume pointing to a
|
||||
// non-existent directory then docker creates the directory but
|
||||
// with root ownership.
|
||||
DockerRunner._checkVolumes(options, volumes, err => {
|
||||
if (err != null) {
|
||||
return releaseLock(err)
|
||||
}
|
||||
DockerRunner._startContainer(
|
||||
options,
|
||||
volumes,
|
||||
attachStreamHandler,
|
||||
releaseLock
|
||||
)
|
||||
}),
|
||||
|
||||
DockerRunner._startContainer(
|
||||
options,
|
||||
volumes,
|
||||
attachStreamHandler,
|
||||
releaseLock
|
||||
),
|
||||
callback
|
||||
)
|
||||
},
|
||||
|
||||
// Check that volumes exist and are directories
|
||||
_checkVolumes(options, volumes, callback) {
|
||||
if (usingSiblingContainers()) {
|
||||
// Server Pro, with sibling-containers active, skip checks
|
||||
return callback(null)
|
||||
}
|
||||
|
||||
const checkVolume = (path, cb) =>
|
||||
fs.stat(path, (err, stats) => {
|
||||
if (err != null) {
|
||||
return cb(err)
|
||||
}
|
||||
if (!stats.isDirectory()) {
|
||||
return cb(new Error('not a directory'))
|
||||
}
|
||||
cb()
|
||||
})
|
||||
const jobs = []
|
||||
for (const vol in volumes) {
|
||||
jobs.push(cb => checkVolume(vol, cb))
|
||||
}
|
||||
async.series(jobs, callback)
|
||||
},
|
||||
|
||||
_startContainer(options, volumes, attachStreamHandler, callback) {
|
||||
callback = _.once(callback)
|
||||
const { name } = options
|
||||
|
@ -617,6 +582,10 @@ const DockerRunner = {
|
|||
containerMonitorInterval = undefined
|
||||
}
|
||||
},
|
||||
|
||||
canRunSyncTeXInOutputDir() {
|
||||
return Boolean(Settings.path.sandboxedCompilesHostDirOutput)
|
||||
},
|
||||
}
|
||||
|
||||
DockerRunner.startContainerMonitor()
|
||||
|
|
|
@ -35,6 +35,7 @@ class QueueLimitReachedError extends OError {}
|
|||
class TimedOutError extends OError {}
|
||||
class NoXrefTableError extends OError {}
|
||||
class TooManyCompileRequestsError extends OError {}
|
||||
class InvalidParameter extends OError {}
|
||||
|
||||
module.exports = Errors = {
|
||||
QueueLimitReachedError,
|
||||
|
@ -44,4 +45,5 @@ module.exports = Errors = {
|
|||
AlreadyCompilingError,
|
||||
NoXrefTableError,
|
||||
TooManyCompileRequestsError,
|
||||
InvalidParameter,
|
||||
}
|
||||
|
|
|
@ -99,6 +99,10 @@ module.exports = CommandRunner = {
|
|||
}
|
||||
return callback()
|
||||
},
|
||||
|
||||
canRunSyncTeXInOutputDir() {
|
||||
return true
|
||||
},
|
||||
}
|
||||
|
||||
module.exports.promises = {
|
||||
|
|
|
@ -83,6 +83,13 @@ async function cleanupDirectory(dir, options) {
|
|||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @template T
|
||||
*
|
||||
* @param {string} dir
|
||||
* @param {() => Promise<T>} fn
|
||||
* @return {Promise<T>}
|
||||
*/
|
||||
async function queueDirOperation(dir, fn) {
|
||||
const pending = PENDING_PROJECT_ACTIONS.get(dir) || Promise.resolve()
|
||||
const p = pending.then(fn, fn).finally(() => {
|
||||
|
@ -677,4 +684,5 @@ OutputCacheManager.promises = {
|
|||
saveOutputFilesInBuildDir: promisify(
|
||||
OutputCacheManager.saveOutputFilesInBuildDir
|
||||
),
|
||||
queueDirOperation,
|
||||
}
|
||||
|
|
|
@ -93,8 +93,11 @@ module.exports = {
|
|||
)
|
||||
|
||||
return outputFiles.filter(
|
||||
// Ignore the pdf and also ignore the files ignored by the frontend.
|
||||
({ path }) => path !== 'output.pdf' && !ignoreFiles.includes(path)
|
||||
// Ignore the pdf, clsi-cache tar-ball and also ignore the files ignored by the frontend.
|
||||
({ path }) =>
|
||||
path !== 'output.pdf' &&
|
||||
path !== 'output.tar.gz' &&
|
||||
!ignoreFiles.includes(path)
|
||||
)
|
||||
} catch (error) {
|
||||
if (
|
||||
|
|
|
@ -15,7 +15,6 @@ const logger = require('@overleaf/logger')
|
|||
const oneDay = 24 * 60 * 60 * 1000
|
||||
const Metrics = require('@overleaf/metrics')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const diskusage = require('diskusage')
|
||||
const { callbackify } = require('node:util')
|
||||
const Path = require('node:path')
|
||||
const fs = require('node:fs')
|
||||
|
@ -33,7 +32,13 @@ async function collectDiskStats() {
|
|||
const diskStats = {}
|
||||
for (const path of paths) {
|
||||
try {
|
||||
const stats = await diskusage.check(path)
|
||||
const { blocks, bavail, bsize } = await fs.promises.statfs(path)
|
||||
const stats = {
|
||||
// Warning: these values will be wrong by a factor in Docker-for-Mac.
|
||||
// See https://github.com/docker/for-mac/issues/2136
|
||||
total: blocks * bsize, // Total size of the file system in bytes
|
||||
available: bavail * bsize, // Free space available to unprivileged users.
|
||||
}
|
||||
const diskAvailablePercent = (stats.available / stats.total) * 100
|
||||
Metrics.gauge('disk_available_percent', diskAvailablePercent, 1, {
|
||||
path,
|
||||
|
|
|
@ -3,6 +3,7 @@ const OutputCacheManager = require('./OutputCacheManager')
|
|||
|
||||
const VALID_COMPILERS = ['pdflatex', 'latex', 'xelatex', 'lualatex']
|
||||
const MAX_TIMEOUT = 600
|
||||
const EDITOR_ID_REGEX = /^[a-f0-9-]{36}$/ // UUID
|
||||
|
||||
function parse(body, callback) {
|
||||
const response = {}
|
||||
|
@ -28,12 +29,24 @@ function parse(body, callback) {
|
|||
default: '',
|
||||
type: 'string',
|
||||
}),
|
||||
// Will be populated later. Must always be populated for prom library.
|
||||
compile: 'initial',
|
||||
}
|
||||
response.compiler = _parseAttribute('compiler', compile.options.compiler, {
|
||||
validValues: VALID_COMPILERS,
|
||||
default: 'pdflatex',
|
||||
type: 'string',
|
||||
})
|
||||
response.compileFromClsiCache = _parseAttribute(
|
||||
'compileFromClsiCache',
|
||||
compile.options.compileFromClsiCache,
|
||||
{ default: false, type: 'boolean' }
|
||||
)
|
||||
response.populateClsiCache = _parseAttribute(
|
||||
'populateClsiCache',
|
||||
compile.options.populateClsiCache,
|
||||
{ default: false, type: 'boolean' }
|
||||
)
|
||||
response.enablePdfCaching = _parseAttribute(
|
||||
'enablePdfCaching',
|
||||
compile.options.enablePdfCaching,
|
||||
|
@ -137,6 +150,10 @@ function parse(body, callback) {
|
|||
)
|
||||
response.rootResourcePath = _checkPath(rootResourcePath)
|
||||
|
||||
response.editorId = _parseAttribute('editorId', compile.options.editorId, {
|
||||
type: 'string',
|
||||
regex: EDITOR_ID_REGEX,
|
||||
})
|
||||
response.buildId = _parseAttribute('buildId', compile.options.buildId, {
|
||||
type: 'string',
|
||||
regex: OutputCacheManager.BUILD_REGEX,
|
||||
|
|
|
@ -262,6 +262,7 @@ module.exports = ResourceWriter = {
|
|||
shouldDelete = false
|
||||
}
|
||||
if (
|
||||
path === 'output.tar.gz' ||
|
||||
path === 'output.synctex.gz' ||
|
||||
path === 'output.pdfxref' ||
|
||||
path === 'output.pdf' ||
|
||||
|
|
|
@ -2,10 +2,10 @@ clsi
|
|||
--data-dirs=cache,compiles,output
|
||||
--dependencies=
|
||||
--docker-repos=gcr.io/overleaf-ops,us-east1-docker.pkg.dev/overleaf-ops/ol-docker
|
||||
--env-add=ENABLE_PDF_CACHING="true",PDF_CACHING_ENABLE_WORKER_POOL="true",ALLOWED_IMAGES=quay.io/sharelatex/texlive-full:2017.1,TEXLIVE_IMAGE=quay.io/sharelatex/texlive-full:2017.1,TEX_LIVE_IMAGE_NAME_OVERRIDE=gcr.io/overleaf-ops,TEXLIVE_IMAGE_USER="tex",DOCKER_RUNNER="true",COMPILES_HOST_DIR=$PWD/compiles
|
||||
--env-add=ENABLE_PDF_CACHING="true",PDF_CACHING_ENABLE_WORKER_POOL="true",ALLOWED_IMAGES=quay.io/sharelatex/texlive-full:2017.1,TEXLIVE_IMAGE=quay.io/sharelatex/texlive-full:2017.1,TEX_LIVE_IMAGE_NAME_OVERRIDE=gcr.io/overleaf-ops,TEXLIVE_IMAGE_USER="tex",DOCKER_RUNNER="true",COMPILES_HOST_DIR=$PWD/compiles,OUTPUT_HOST_DIR=$PWD/output
|
||||
--env-pass-through=
|
||||
--esmock-loader=False
|
||||
--node-version=20.18.2
|
||||
--public-repo=True
|
||||
--script-version=4.5.0
|
||||
--script-version=4.7.0
|
||||
--use-large-ci-runner=True
|
||||
|
|
|
@ -1,10 +1,12 @@
|
|||
const Path = require('node:path')
|
||||
const http = require('node:http')
|
||||
const https = require('node:https')
|
||||
const os = require('node:os')
|
||||
|
||||
http.globalAgent.keepAlive = false
|
||||
https.globalAgent.keepAlive = false
|
||||
const isPreEmptible = process.env.PREEMPTIBLE === 'TRUE'
|
||||
const CLSI_SERVER_ID = os.hostname().replace('-ctr', '')
|
||||
|
||||
module.exports = {
|
||||
compileSizeLimit: process.env.COMPILE_SIZE_LIMIT || '7mb',
|
||||
|
@ -48,12 +50,20 @@ module.exports = {
|
|||
url: `http://${process.env.CLSI_HOST || '127.0.0.1'}:3013`,
|
||||
// External url prefix for output files, e.g. for requests via load-balancers.
|
||||
outputUrlPrefix: `${process.env.ZONE ? `/zone/${process.env.ZONE}` : ''}`,
|
||||
clsiServerId: process.env.CLSI_SERVER_ID || CLSI_SERVER_ID,
|
||||
|
||||
downloadHost: process.env.DOWNLOAD_HOST || 'http://localhost:3013',
|
||||
},
|
||||
clsiPerf: {
|
||||
host: `${process.env.CLSI_PERF_HOST || '127.0.0.1'}:${
|
||||
process.env.CLSI_PERF_PORT || '3043'
|
||||
}`,
|
||||
},
|
||||
clsiCache: {
|
||||
enabled: !!process.env.CLSI_CACHE_HOST,
|
||||
url: `http://${process.env.CLSI_CACHE_HOST}:3044`,
|
||||
downloadURL: `http://${process.env.CLSI_CACHE_NGINX_HOST || process.env.CLSI_CACHE_HOST}:8080`,
|
||||
},
|
||||
},
|
||||
|
||||
smokeTest: process.env.SMOKE_TEST || false,
|
||||
|
@ -88,14 +98,15 @@ if (process.env.ALLOWED_COMPILE_GROUPS) {
|
|||
}
|
||||
}
|
||||
|
||||
if (process.env.DOCKER_RUNNER) {
|
||||
let seccompProfilePath
|
||||
if ((process.env.DOCKER_RUNNER || process.env.SANDBOXED_COMPILES) === 'true') {
|
||||
module.exports.clsi = {
|
||||
dockerRunner: process.env.DOCKER_RUNNER === 'true',
|
||||
dockerRunner: true,
|
||||
docker: {
|
||||
runtime: process.env.DOCKER_RUNTIME,
|
||||
image:
|
||||
process.env.TEXLIVE_IMAGE || 'quay.io/sharelatex/texlive-full:2017.1',
|
||||
process.env.TEXLIVE_IMAGE ||
|
||||
process.env.TEX_LIVE_DOCKER_IMAGE ||
|
||||
'quay.io/sharelatex/texlive-full:2017.1',
|
||||
env: {
|
||||
HOME: '/tmp',
|
||||
CLSI: 1,
|
||||
|
@ -121,6 +132,7 @@ if (process.env.DOCKER_RUNNER) {
|
|||
const defaultCompileGroupConfig = {
|
||||
wordcount: { 'HostConfig.AutoRemove': true },
|
||||
synctex: { 'HostConfig.AutoRemove': true },
|
||||
'synctex-output': { 'HostConfig.AutoRemove': true },
|
||||
}
|
||||
module.exports.clsi.docker.compileGroupConfig = Object.assign(
|
||||
defaultCompileGroupConfig,
|
||||
|
@ -131,6 +143,7 @@ if (process.env.DOCKER_RUNNER) {
|
|||
process.exit(1)
|
||||
}
|
||||
|
||||
let seccompProfilePath
|
||||
try {
|
||||
seccompProfilePath = Path.resolve(__dirname, '../seccomp/clsi-profile.json')
|
||||
module.exports.clsi.docker.seccomp_profile = JSON.stringify(
|
||||
|
@ -165,5 +178,23 @@ if (process.env.DOCKER_RUNNER) {
|
|||
|
||||
module.exports.path.synctexBaseDir = () => '/compile'
|
||||
|
||||
module.exports.path.sandboxedCompilesHostDir = process.env.COMPILES_HOST_DIR
|
||||
module.exports.path.sandboxedCompilesHostDirCompiles =
|
||||
process.env.SANDBOXED_COMPILES_HOST_DIR_COMPILES ||
|
||||
process.env.SANDBOXED_COMPILES_HOST_DIR ||
|
||||
process.env.COMPILES_HOST_DIR
|
||||
if (!module.exports.path.sandboxedCompilesHostDirCompiles) {
|
||||
throw new Error(
|
||||
'SANDBOXED_COMPILES enabled, but SANDBOXED_COMPILES_HOST_DIR_COMPILES not set'
|
||||
)
|
||||
}
|
||||
|
||||
module.exports.path.sandboxedCompilesHostDirOutput =
|
||||
process.env.SANDBOXED_COMPILES_HOST_DIR_OUTPUT ||
|
||||
process.env.OUTPUT_HOST_DIR
|
||||
if (!module.exports.path.sandboxedCompilesHostDirOutput) {
|
||||
// TODO(das7pad): Enforce in a future major version of Server Pro.
|
||||
// throw new Error(
|
||||
// 'SANDBOXED_COMPILES enabled, but SANDBOXED_COMPILES_HOST_DIR_OUTPUT not set'
|
||||
// )
|
||||
}
|
||||
}
|
||||
|
|
|
@ -31,6 +31,7 @@ services:
|
|||
TEXLIVE_IMAGE_USER: "tex"
|
||||
DOCKER_RUNNER: "true"
|
||||
COMPILES_HOST_DIR: $PWD/compiles
|
||||
OUTPUT_HOST_DIR: $PWD/output
|
||||
volumes:
|
||||
- ./compiles:/overleaf/services/clsi/compiles
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
|
|
|
@ -49,5 +49,6 @@ services:
|
|||
TEXLIVE_IMAGE_USER: "tex"
|
||||
DOCKER_RUNNER: "true"
|
||||
COMPILES_HOST_DIR: $PWD/compiles
|
||||
OUTPUT_HOST_DIR: $PWD/output
|
||||
command: npm run --silent test:acceptance
|
||||
|
||||
|
|
|
@ -27,13 +27,13 @@
|
|||
"async": "^3.2.5",
|
||||
"body-parser": "^1.20.3",
|
||||
"bunyan": "^1.8.15",
|
||||
"diskusage": "^1.1.3",
|
||||
"dockerode": "^4.0.5",
|
||||
"express": "^4.21.2",
|
||||
"lodash": "^4.17.21",
|
||||
"p-limit": "^3.1.0",
|
||||
"request": "^2.88.2",
|
||||
"send": "^0.19.0",
|
||||
"tar-fs": "^3.0.4",
|
||||
"workerpool": "^6.1.5"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
|
|
@ -11,6 +11,7 @@
|
|||
const Client = require('./helpers/Client')
|
||||
const request = require('request')
|
||||
const ClsiApp = require('./helpers/ClsiApp')
|
||||
const { expect } = require('chai')
|
||||
|
||||
describe('Broken LaTeX file', function () {
|
||||
before(function (done) {
|
||||
|
@ -62,6 +63,10 @@ Hello world
|
|||
return this.body.compile.status.should.equal('failure')
|
||||
})
|
||||
|
||||
it('should return isInitialCompile flag', function () {
|
||||
expect(this.body.compile.stats.isInitialCompile).to.equal(1)
|
||||
})
|
||||
|
||||
it('should return output files', function () {
|
||||
// NOTE: No output.pdf file.
|
||||
this.body.compile.outputFiles
|
||||
|
@ -98,6 +103,10 @@ Hello world
|
|||
return this.body.compile.status.should.equal('failure')
|
||||
})
|
||||
|
||||
it('should not return isInitialCompile flag', function () {
|
||||
expect(this.body.compile.stats.isInitialCompile).to.not.exist
|
||||
})
|
||||
|
||||
it('should return output files', function () {
|
||||
// NOTE: No output.pdf file.
|
||||
this.body.compile.outputFiles
|
||||
|
|
|
@ -11,6 +11,7 @@
|
|||
const Client = require('./helpers/Client')
|
||||
const request = require('request')
|
||||
const ClsiApp = require('./helpers/ClsiApp')
|
||||
const { expect } = require('chai')
|
||||
|
||||
describe('Timed out compile', function () {
|
||||
before(function (done) {
|
||||
|
@ -54,6 +55,10 @@ describe('Timed out compile', function () {
|
|||
return this.body.compile.status.should.equal('timedout')
|
||||
})
|
||||
|
||||
it('should return isInitialCompile flag', function () {
|
||||
expect(this.body.compile.stats.isInitialCompile).to.equal(1)
|
||||
})
|
||||
|
||||
return it('should return the log output file name', function () {
|
||||
const outputFilePaths = this.body.compile.outputFiles.map(x => x.path)
|
||||
return outputFilePaths.should.include('output.log')
|
||||
|
|
|
@ -20,7 +20,7 @@ SandboxedModule.configure({
|
|||
err() {},
|
||||
},
|
||||
},
|
||||
globals: { Buffer, console, process, URL },
|
||||
globals: { Buffer, console, process, URL, Math },
|
||||
sourceTransformers: {
|
||||
removeNodePrefix: function (source) {
|
||||
return source.replace(/require\(['"]node:/g, "require('")
|
||||
|
|
|
@ -1,54 +1,11 @@
|
|||
const SandboxedModule = require('sandboxed-module')
|
||||
const sinon = require('sinon')
|
||||
const { expect } = require('chai')
|
||||
const modulePath = require('node:path').join(
|
||||
__dirname,
|
||||
'../../../app/js/CompileController'
|
||||
)
|
||||
const Errors = require('../../../app/js/Errors')
|
||||
|
||||
function tryImageNameValidation(method, imageNameField) {
|
||||
describe('when allowedImages is set', function () {
|
||||
beforeEach(function () {
|
||||
this.Settings.clsi = { docker: {} }
|
||||
this.Settings.clsi.docker.allowedImages = [
|
||||
'repo/image:tag1',
|
||||
'repo/image:tag2',
|
||||
]
|
||||
this.res.send = sinon.stub()
|
||||
this.res.status = sinon.stub().returns({ send: this.res.send })
|
||||
|
||||
this.CompileManager[method].reset()
|
||||
})
|
||||
|
||||
describe('with an invalid image', function () {
|
||||
beforeEach(function () {
|
||||
this.req.query[imageNameField] = 'something/evil:1337'
|
||||
this.CompileController[method](this.req, this.res, this.next)
|
||||
})
|
||||
it('should return a 400', function () {
|
||||
expect(this.res.status.calledWith(400)).to.equal(true)
|
||||
})
|
||||
it('should not run the query', function () {
|
||||
expect(this.CompileManager[method].called).to.equal(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe('with a valid image', function () {
|
||||
beforeEach(function () {
|
||||
this.req.query[imageNameField] = 'repo/image:tag1'
|
||||
this.CompileController[method](this.req, this.res, this.next)
|
||||
})
|
||||
it('should not return a 400', function () {
|
||||
expect(this.res.status.calledWith(400)).to.equal(false)
|
||||
})
|
||||
it('should run the query', function () {
|
||||
expect(this.CompileManager[method].called).to.equal(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
describe('CompileController', function () {
|
||||
beforeEach(function () {
|
||||
this.buildId = 'build-id-123'
|
||||
|
@ -61,6 +18,11 @@ describe('CompileController', function () {
|
|||
clsi: {
|
||||
url: 'http://clsi.example.com',
|
||||
outputUrlPrefix: '/zone/b',
|
||||
downloadHost: 'http://localhost:3013',
|
||||
},
|
||||
clsiCache: {
|
||||
enabled: false,
|
||||
url: 'http://localhost:3044',
|
||||
},
|
||||
},
|
||||
}),
|
||||
|
@ -68,6 +30,11 @@ describe('CompileController', function () {
|
|||
Timer: sinon.stub().returns({ done: sinon.stub() }),
|
||||
},
|
||||
'./ProjectPersistenceManager': (this.ProjectPersistenceManager = {}),
|
||||
'./CLSICacheHandler': {
|
||||
notifyCLSICacheAboutBuild: sinon.stub(),
|
||||
downloadLatestCompileCache: sinon.stub().resolves(),
|
||||
downloadOutputDotSynctexFromCompileCache: sinon.stub().resolves(),
|
||||
},
|
||||
'./Errors': (this.Erros = Errors),
|
||||
},
|
||||
})
|
||||
|
@ -113,16 +80,21 @@ describe('CompileController', function () {
|
|||
this.timings = { bar: 2 }
|
||||
this.res.status = sinon.stub().returnsThis()
|
||||
this.res.send = sinon.stub()
|
||||
|
||||
this.CompileManager.doCompileWithLock = sinon
|
||||
.stub()
|
||||
.callsFake((_req, stats, timings, cb) => {
|
||||
Object.assign(stats, this.stats)
|
||||
Object.assign(timings, this.timings)
|
||||
cb(null, {
|
||||
outputFiles: this.output_files,
|
||||
buildId: this.buildId,
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('successfully', function () {
|
||||
beforeEach(function () {
|
||||
this.CompileManager.doCompileWithLock = sinon.stub().yields(null, {
|
||||
outputFiles: this.output_files,
|
||||
stats: this.stats,
|
||||
timings: this.timings,
|
||||
buildId: this.buildId,
|
||||
})
|
||||
this.CompileController.compile(this.req, this.res)
|
||||
})
|
||||
|
||||
|
@ -166,12 +138,6 @@ describe('CompileController', function () {
|
|||
describe('without a outputUrlPrefix', function () {
|
||||
beforeEach(function () {
|
||||
this.Settings.apis.clsi.outputUrlPrefix = ''
|
||||
this.CompileManager.doCompileWithLock = sinon.stub().yields(null, {
|
||||
outputFiles: this.output_files,
|
||||
stats: this.stats,
|
||||
timings: this.timings,
|
||||
buildId: this.buildId,
|
||||
})
|
||||
this.CompileController.compile(this.req, this.res)
|
||||
})
|
||||
|
||||
|
@ -210,33 +176,35 @@ describe('CompileController', function () {
|
|||
build: 1234,
|
||||
},
|
||||
]
|
||||
this.CompileManager.doCompileWithLock = sinon.stub().yields(null, {
|
||||
outputFiles: this.output_files,
|
||||
stats: this.stats,
|
||||
timings: this.timings,
|
||||
buildId: this.buildId,
|
||||
})
|
||||
this.CompileManager.doCompileWithLock = sinon
|
||||
.stub()
|
||||
.callsFake((_req, stats, timings, cb) => {
|
||||
Object.assign(stats, this.stats)
|
||||
Object.assign(timings, this.timings)
|
||||
cb(null, {
|
||||
outputFiles: this.output_files,
|
||||
buildId: this.buildId,
|
||||
})
|
||||
})
|
||||
this.CompileController.compile(this.req, this.res)
|
||||
})
|
||||
|
||||
it('should return the JSON response with status failure', function () {
|
||||
this.res.status.calledWith(200).should.equal(true)
|
||||
this.res.send
|
||||
.calledWith({
|
||||
compile: {
|
||||
status: 'failure',
|
||||
error: null,
|
||||
stats: this.stats,
|
||||
timings: this.timings,
|
||||
outputUrlPrefix: '/zone/b',
|
||||
buildId: this.buildId,
|
||||
outputFiles: this.output_files.map(file => ({
|
||||
url: `${this.Settings.apis.clsi.url}/project/${this.project_id}/build/${file.build}/output/${file.path}`,
|
||||
...file,
|
||||
})),
|
||||
},
|
||||
})
|
||||
.should.equal(true)
|
||||
this.res.send.should.have.been.calledWith({
|
||||
compile: {
|
||||
status: 'failure',
|
||||
error: null,
|
||||
stats: this.stats,
|
||||
timings: this.timings,
|
||||
outputUrlPrefix: '/zone/b',
|
||||
buildId: this.buildId,
|
||||
outputFiles: this.output_files.map(file => ({
|
||||
url: `${this.Settings.apis.clsi.url}/project/${this.project_id}/build/${file.build}/output/${file.path}`,
|
||||
...file,
|
||||
})),
|
||||
},
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -255,33 +223,35 @@ describe('CompileController', function () {
|
|||
build: 1234,
|
||||
},
|
||||
]
|
||||
this.CompileManager.doCompileWithLock = sinon.stub().yields(null, {
|
||||
outputFiles: this.output_files,
|
||||
stats: this.stats,
|
||||
timings: this.timings,
|
||||
buildId: this.buildId,
|
||||
})
|
||||
this.CompileManager.doCompileWithLock = sinon
|
||||
.stub()
|
||||
.callsFake((_req, stats, timings, cb) => {
|
||||
Object.assign(stats, this.stats)
|
||||
Object.assign(timings, this.timings)
|
||||
cb(null, {
|
||||
outputFiles: this.output_files,
|
||||
buildId: this.buildId,
|
||||
})
|
||||
})
|
||||
this.CompileController.compile(this.req, this.res)
|
||||
})
|
||||
|
||||
it('should return the JSON response with status failure', function () {
|
||||
this.res.status.calledWith(200).should.equal(true)
|
||||
this.res.send
|
||||
.calledWith({
|
||||
compile: {
|
||||
status: 'failure',
|
||||
error: null,
|
||||
stats: this.stats,
|
||||
buildId: this.buildId,
|
||||
timings: this.timings,
|
||||
outputUrlPrefix: '/zone/b',
|
||||
outputFiles: this.output_files.map(file => ({
|
||||
url: `${this.Settings.apis.clsi.url}/project/${this.project_id}/build/${file.build}/output/${file.path}`,
|
||||
...file,
|
||||
})),
|
||||
},
|
||||
})
|
||||
.should.equal(true)
|
||||
this.res.send.should.have.been.calledWith({
|
||||
compile: {
|
||||
status: 'failure',
|
||||
error: null,
|
||||
stats: this.stats,
|
||||
buildId: this.buildId,
|
||||
timings: this.timings,
|
||||
outputUrlPrefix: '/zone/b',
|
||||
outputFiles: this.output_files.map(file => ({
|
||||
url: `${this.Settings.apis.clsi.url}/project/${this.project_id}/build/${file.build}/output/${file.path}`,
|
||||
...file,
|
||||
})),
|
||||
},
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -291,7 +261,11 @@ describe('CompileController', function () {
|
|||
error.buildId = this.buildId
|
||||
this.CompileManager.doCompileWithLock = sinon
|
||||
.stub()
|
||||
.callsArgWith(1, error, null)
|
||||
.callsFake((_req, stats, timings, cb) => {
|
||||
Object.assign(stats, this.stats)
|
||||
Object.assign(timings, this.timings)
|
||||
cb(error)
|
||||
})
|
||||
this.CompileController.compile(this.req, this.res)
|
||||
})
|
||||
|
||||
|
@ -305,9 +279,8 @@ describe('CompileController', function () {
|
|||
outputUrlPrefix: '/zone/b',
|
||||
outputFiles: [],
|
||||
buildId: this.buildId,
|
||||
// JSON.stringify will omit these
|
||||
stats: undefined,
|
||||
timings: undefined,
|
||||
stats: this.stats,
|
||||
timings: this.timings,
|
||||
},
|
||||
})
|
||||
.should.equal(true)
|
||||
|
@ -321,7 +294,11 @@ describe('CompileController', function () {
|
|||
)
|
||||
this.CompileManager.doCompileWithLock = sinon
|
||||
.stub()
|
||||
.callsArgWith(1, error, null)
|
||||
.callsFake((_req, stats, timings, cb) => {
|
||||
Object.assign(stats, this.stats)
|
||||
Object.assign(timings, this.timings)
|
||||
cb(error)
|
||||
})
|
||||
this.CompileController.compile(this.req, this.res)
|
||||
})
|
||||
|
||||
|
@ -334,9 +311,10 @@ describe('CompileController', function () {
|
|||
error: 'too many concurrent compile requests',
|
||||
outputUrlPrefix: '/zone/b',
|
||||
outputFiles: [],
|
||||
stats: this.stats,
|
||||
timings: this.timings,
|
||||
// JSON.stringify will omit these undefined values
|
||||
buildId: undefined,
|
||||
stats: undefined,
|
||||
timings: undefined,
|
||||
},
|
||||
})
|
||||
.should.equal(true)
|
||||
|
@ -349,7 +327,11 @@ describe('CompileController', function () {
|
|||
this.error.timedout = true
|
||||
this.CompileManager.doCompileWithLock = sinon
|
||||
.stub()
|
||||
.callsArgWith(1, this.error, null)
|
||||
.callsFake((_req, stats, timings, cb) => {
|
||||
Object.assign(stats, this.stats)
|
||||
Object.assign(timings, this.timings)
|
||||
cb(this.error)
|
||||
})
|
||||
this.CompileController.compile(this.req, this.res)
|
||||
})
|
||||
|
||||
|
@ -362,10 +344,10 @@ describe('CompileController', function () {
|
|||
error: this.message,
|
||||
outputUrlPrefix: '/zone/b',
|
||||
outputFiles: [],
|
||||
// JSON.stringify will omit these
|
||||
stats: this.stats,
|
||||
timings: this.timings,
|
||||
// JSON.stringify will omit these undefined values
|
||||
buildId: undefined,
|
||||
stats: undefined,
|
||||
timings: undefined,
|
||||
},
|
||||
})
|
||||
.should.equal(true)
|
||||
|
@ -376,7 +358,11 @@ describe('CompileController', function () {
|
|||
beforeEach(function () {
|
||||
this.CompileManager.doCompileWithLock = sinon
|
||||
.stub()
|
||||
.callsArgWith(1, null, [])
|
||||
.callsFake((_req, stats, timings, cb) => {
|
||||
Object.assign(stats, this.stats)
|
||||
Object.assign(timings, this.timings)
|
||||
cb(null, {})
|
||||
})
|
||||
this.CompileController.compile(this.req, this.res)
|
||||
})
|
||||
|
||||
|
@ -389,10 +375,10 @@ describe('CompileController', function () {
|
|||
status: 'failure',
|
||||
outputUrlPrefix: '/zone/b',
|
||||
outputFiles: [],
|
||||
// JSON.stringify will omit these
|
||||
stats: this.stats,
|
||||
timings: this.timings,
|
||||
// JSON.stringify will omit these undefined values
|
||||
buildId: undefined,
|
||||
stats: undefined,
|
||||
timings: undefined,
|
||||
},
|
||||
})
|
||||
.should.equal(true)
|
||||
|
@ -439,8 +425,6 @@ describe('CompileController', function () {
|
|||
})
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
tryImageNameValidation('syncFromCode', 'imageName')
|
||||
})
|
||||
|
||||
describe('syncFromPdf', function () {
|
||||
|
@ -476,8 +460,6 @@ describe('CompileController', function () {
|
|||
})
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
tryImageNameValidation('syncFromPdf', 'imageName')
|
||||
})
|
||||
|
||||
describe('wordcount', function () {
|
||||
|
@ -511,7 +493,5 @@ describe('CompileController', function () {
|
|||
})
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
tryImageNameValidation('wordcount', 'image')
|
||||
})
|
||||
})
|
||||
|
|
|
@ -62,6 +62,7 @@ describe('CompileManager', function () {
|
|||
}
|
||||
this.OutputCacheManager = {
|
||||
promises: {
|
||||
queueDirOperation: sinon.stub().callsArg(1),
|
||||
saveOutputFiles: sinon
|
||||
.stub()
|
||||
.resolves({ outputFiles: this.buildFiles, buildId: this.buildId }),
|
||||
|
@ -160,6 +161,11 @@ describe('CompileManager', function () {
|
|||
'./LockManager': this.LockManager,
|
||||
'./SynctexOutputParser': this.SynctexOutputParser,
|
||||
'fs/promises': this.fsPromises,
|
||||
'./CLSICacheHandler': {
|
||||
notifyCLSICacheAboutBuild: sinon.stub(),
|
||||
downloadLatestCompileCache: sinon.stub().resolves(),
|
||||
downloadOutputDotSynctexFromCompileCache: sinon.stub().resolves(),
|
||||
},
|
||||
},
|
||||
})
|
||||
})
|
||||
|
@ -177,6 +183,11 @@ describe('CompileManager', function () {
|
|||
flags: (this.flags = ['-file-line-error']),
|
||||
compileGroup: (this.compileGroup = 'compile-group'),
|
||||
stopOnFirstError: false,
|
||||
metricsOpts: {
|
||||
path: 'clsi-perf',
|
||||
method: 'minimal',
|
||||
compile: 'initial',
|
||||
},
|
||||
}
|
||||
this.env = {
|
||||
OVERLEAF_PROJECT_ID: this.projectId,
|
||||
|
@ -188,7 +199,7 @@ describe('CompileManager', function () {
|
|||
const error = new Error('locked')
|
||||
this.LockManager.acquire.throws(error)
|
||||
await expect(
|
||||
this.CompileManager.promises.doCompileWithLock(this.request)
|
||||
this.CompileManager.promises.doCompileWithLock(this.request, {}, {})
|
||||
).to.be.rejectedWith(error)
|
||||
})
|
||||
|
||||
|
@ -206,7 +217,9 @@ describe('CompileManager', function () {
|
|||
describe('normally', function () {
|
||||
beforeEach(async function () {
|
||||
this.result = await this.CompileManager.promises.doCompileWithLock(
|
||||
this.request
|
||||
this.request,
|
||||
{},
|
||||
{}
|
||||
)
|
||||
})
|
||||
|
||||
|
@ -260,7 +273,11 @@ describe('CompileManager', function () {
|
|||
describe('with draft mode', function () {
|
||||
beforeEach(async function () {
|
||||
this.request.draft = true
|
||||
await this.CompileManager.promises.doCompileWithLock(this.request)
|
||||
await this.CompileManager.promises.doCompileWithLock(
|
||||
this.request,
|
||||
{},
|
||||
{}
|
||||
)
|
||||
})
|
||||
|
||||
it('should inject the draft mode header', function () {
|
||||
|
@ -273,7 +290,11 @@ describe('CompileManager', function () {
|
|||
describe('with a check option', function () {
|
||||
beforeEach(async function () {
|
||||
this.request.check = 'error'
|
||||
await this.CompileManager.promises.doCompileWithLock(this.request)
|
||||
await this.CompileManager.promises.doCompileWithLock(
|
||||
this.request,
|
||||
{},
|
||||
{}
|
||||
)
|
||||
})
|
||||
|
||||
it('should run chktex', function () {
|
||||
|
@ -305,7 +326,11 @@ describe('CompileManager', function () {
|
|||
beforeEach(async function () {
|
||||
this.request.rootResourcePath = 'main.Rtex'
|
||||
this.request.check = 'error'
|
||||
await this.CompileManager.promises.doCompileWithLock(this.request)
|
||||
await this.CompileManager.promises.doCompileWithLock(
|
||||
this.request,
|
||||
{},
|
||||
{}
|
||||
)
|
||||
})
|
||||
|
||||
it('should not run chktex', function () {
|
||||
|
@ -334,7 +359,7 @@ describe('CompileManager', function () {
|
|||
error.timedout = true
|
||||
this.LatexRunner.promises.runLatex.rejects(error)
|
||||
await expect(
|
||||
this.CompileManager.promises.doCompileWithLock(this.request)
|
||||
this.CompileManager.promises.doCompileWithLock(this.request, {}, {})
|
||||
).to.be.rejected
|
||||
})
|
||||
|
||||
|
@ -357,7 +382,7 @@ describe('CompileManager', function () {
|
|||
error.terminated = true
|
||||
this.LatexRunner.promises.runLatex.rejects(error)
|
||||
await expect(
|
||||
this.CompileManager.promises.doCompileWithLock(this.request)
|
||||
this.CompileManager.promises.doCompileWithLock(this.request, {}, {})
|
||||
).to.be.rejected
|
||||
})
|
||||
|
||||
|
@ -455,7 +480,7 @@ describe('CompileManager', function () {
|
|||
this.filename,
|
||||
this.line,
|
||||
this.column,
|
||||
customImageName
|
||||
{ imageName: customImageName }
|
||||
)
|
||||
})
|
||||
|
||||
|
@ -497,7 +522,7 @@ describe('CompileManager', function () {
|
|||
this.page,
|
||||
this.h,
|
||||
this.v,
|
||||
''
|
||||
{ imageName: '' }
|
||||
)
|
||||
})
|
||||
|
||||
|
@ -532,7 +557,7 @@ describe('CompileManager', function () {
|
|||
this.page,
|
||||
this.h,
|
||||
this.v,
|
||||
customImageName
|
||||
{ imageName: customImageName }
|
||||
)
|
||||
})
|
||||
|
||||
|
|
|
@ -76,8 +76,11 @@ describe('DockerRunner', function () {
|
|||
this.env = {}
|
||||
this.callback = sinon.stub()
|
||||
this.project_id = 'project-id-123'
|
||||
this.volumes = { '/local/compile/directory': '/compile' }
|
||||
this.volumes = { '/some/host/dir/compiles/directory': '/compile' }
|
||||
this.Settings.clsi.docker.image = this.defaultImage = 'default-image'
|
||||
this.Settings.path.sandboxedCompilesHostDirCompiles =
|
||||
'/some/host/dir/compiles'
|
||||
this.Settings.path.sandboxedCompilesHostDirOutput = '/some/host/dir/output'
|
||||
this.compileGroup = 'compile-group'
|
||||
return (this.Settings.clsi.docker.env = { PATH: 'mock-path' })
|
||||
})
|
||||
|
@ -151,9 +154,8 @@ describe('DockerRunner', function () {
|
|||
})
|
||||
})
|
||||
|
||||
describe('when path.sandboxedCompilesHostDir is set', function () {
|
||||
describe('standard compile', function () {
|
||||
beforeEach(function () {
|
||||
this.Settings.path.sandboxedCompilesHostDir = '/some/host/dir/compiles'
|
||||
this.directory = '/var/lib/overleaf/data/compiles/xyz'
|
||||
this.DockerRunner._runAndWaitForContainer = sinon
|
||||
.stub()
|
||||
|
@ -183,6 +185,99 @@ describe('DockerRunner', function () {
|
|||
})
|
||||
})
|
||||
|
||||
describe('synctex-output', function () {
|
||||
beforeEach(function () {
|
||||
this.directory = '/var/lib/overleaf/data/output/xyz/generated-files/id'
|
||||
this.DockerRunner._runAndWaitForContainer = sinon
|
||||
.stub()
|
||||
.callsArgWith(3, null, (this.output = 'mock-output'))
|
||||
this.DockerRunner.run(
|
||||
this.project_id,
|
||||
this.command,
|
||||
this.directory,
|
||||
this.image,
|
||||
this.timeout,
|
||||
this.env,
|
||||
'synctex-output',
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
it('should re-write the bind directory and set ro flag', function () {
|
||||
const volumes =
|
||||
this.DockerRunner._runAndWaitForContainer.lastCall.args[1]
|
||||
expect(volumes).to.deep.equal({
|
||||
'/some/host/dir/output/xyz/generated-files/id': '/compile:ro',
|
||||
})
|
||||
})
|
||||
|
||||
it('should call the callback', function () {
|
||||
this.callback.calledWith(null, this.output).should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('synctex', function () {
|
||||
beforeEach(function () {
|
||||
this.directory = '/var/lib/overleaf/data/compile/xyz'
|
||||
this.DockerRunner._runAndWaitForContainer = sinon
|
||||
.stub()
|
||||
.callsArgWith(3, null, (this.output = 'mock-output'))
|
||||
this.DockerRunner.run(
|
||||
this.project_id,
|
||||
this.command,
|
||||
this.directory,
|
||||
this.image,
|
||||
this.timeout,
|
||||
this.env,
|
||||
'synctex',
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
it('should re-write the bind directory', function () {
|
||||
const volumes =
|
||||
this.DockerRunner._runAndWaitForContainer.lastCall.args[1]
|
||||
expect(volumes).to.deep.equal({
|
||||
'/some/host/dir/compiles/xyz': '/compile:ro',
|
||||
})
|
||||
})
|
||||
|
||||
it('should call the callback', function () {
|
||||
this.callback.calledWith(null, this.output).should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('wordcount', function () {
|
||||
beforeEach(function () {
|
||||
this.directory = '/var/lib/overleaf/data/compile/xyz'
|
||||
this.DockerRunner._runAndWaitForContainer = sinon
|
||||
.stub()
|
||||
.callsArgWith(3, null, (this.output = 'mock-output'))
|
||||
this.DockerRunner.run(
|
||||
this.project_id,
|
||||
this.command,
|
||||
this.directory,
|
||||
this.image,
|
||||
this.timeout,
|
||||
this.env,
|
||||
'wordcount',
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
it('should re-write the bind directory', function () {
|
||||
const volumes =
|
||||
this.DockerRunner._runAndWaitForContainer.lastCall.args[1]
|
||||
expect(volumes).to.deep.equal({
|
||||
'/some/host/dir/compiles/xyz': '/compile:ro',
|
||||
})
|
||||
})
|
||||
|
||||
it('should call the callback', function () {
|
||||
this.callback.calledWith(null, this.output).should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the run throws an error', function () {
|
||||
beforeEach(function () {
|
||||
let firstTime = true
|
||||
|
@ -390,7 +485,7 @@ describe('DockerRunner', function () {
|
|||
const options =
|
||||
this.DockerRunner._runAndWaitForContainer.lastCall.args[0]
|
||||
return expect(options.HostConfig).to.deep.include({
|
||||
Binds: ['/local/compile/directory:/compile:rw'],
|
||||
Binds: ['/some/host/dir/compiles/directory:/compile:rw'],
|
||||
LogConfig: { Type: 'none', Config: {} },
|
||||
CapDrop: 'ALL',
|
||||
SecurityOpt: ['no-new-privileges'],
|
||||
|
@ -562,82 +657,6 @@ describe('DockerRunner', function () {
|
|||
})
|
||||
})
|
||||
|
||||
describe('when a volume does not exist', function () {
|
||||
beforeEach(function () {
|
||||
this.fs.stat = sinon.stub().yields(new Error('no such path'))
|
||||
return this.DockerRunner.startContainer(
|
||||
this.options,
|
||||
this.volumes,
|
||||
this.attachStreamHandler,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
it('should not try to create the container', function () {
|
||||
return this.createContainer.called.should.equal(false)
|
||||
})
|
||||
|
||||
it('should call the callback with an error', function () {
|
||||
this.callback.calledWith(sinon.match(Error)).should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when a volume exists but is not a directory', function () {
|
||||
beforeEach(function () {
|
||||
this.fs.stat = sinon.stub().yields(null, {
|
||||
isDirectory() {
|
||||
return false
|
||||
},
|
||||
})
|
||||
return this.DockerRunner.startContainer(
|
||||
this.options,
|
||||
this.volumes,
|
||||
this.attachStreamHandler,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
it('should not try to create the container', function () {
|
||||
return this.createContainer.called.should.equal(false)
|
||||
})
|
||||
|
||||
it('should call the callback with an error', function () {
|
||||
this.callback.calledWith(sinon.match(Error)).should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when a volume does not exist, but sibling-containers are used', function () {
|
||||
beforeEach(function () {
|
||||
this.fs.stat = sinon.stub().yields(new Error('no such path'))
|
||||
this.Settings.path.sandboxedCompilesHostDir = '/some/path'
|
||||
this.container.start = sinon.stub().yields()
|
||||
return this.DockerRunner.startContainer(
|
||||
this.options,
|
||||
this.volumes,
|
||||
() => {},
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
afterEach(function () {
|
||||
return delete this.Settings.path.sandboxedCompilesHostDir
|
||||
})
|
||||
|
||||
it('should start the container with the given name', function () {
|
||||
this.getContainer.calledWith(this.options.name).should.equal(true)
|
||||
return this.container.start.called.should.equal(true)
|
||||
})
|
||||
|
||||
it('should not try to create the container', function () {
|
||||
return this.createContainer.called.should.equal(false)
|
||||
})
|
||||
|
||||
return it('should call the callback', function () {
|
||||
this.callback.called.should.equal(true)
|
||||
return this.callback.calledWith(new Error()).should.equal(false)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('when the container tries to be created, but already has been (race condition)', function () {})
|
||||
})
|
||||
|
||||
|
|
|
@ -21,12 +21,16 @@ const tk = require('timekeeper')
|
|||
|
||||
describe('ProjectPersistenceManager', function () {
|
||||
beforeEach(function () {
|
||||
this.fsPromises = {
|
||||
statfs: sinon.stub(),
|
||||
}
|
||||
|
||||
this.ProjectPersistenceManager = SandboxedModule.require(modulePath, {
|
||||
requires: {
|
||||
'@overleaf/metrics': (this.Metrics = { gauge: sinon.stub() }),
|
||||
'./UrlCache': (this.UrlCache = {}),
|
||||
'./CompileManager': (this.CompileManager = {}),
|
||||
diskusage: (this.diskusage = { check: sinon.stub() }),
|
||||
fs: { promises: this.fsPromises },
|
||||
'@overleaf/settings': (this.settings = {
|
||||
project_cache_length_ms: 1000,
|
||||
path: {
|
||||
|
@ -44,9 +48,10 @@ describe('ProjectPersistenceManager', function () {
|
|||
|
||||
describe('refreshExpiryTimeout', function () {
|
||||
it('should leave expiry alone if plenty of disk', function (done) {
|
||||
this.diskusage.check.resolves({
|
||||
available: 40,
|
||||
total: 100,
|
||||
this.fsPromises.statfs.resolves({
|
||||
blocks: 100,
|
||||
bsize: 1,
|
||||
bavail: 40,
|
||||
})
|
||||
|
||||
this.ProjectPersistenceManager.refreshExpiryTimeout(() => {
|
||||
|
@ -62,9 +67,10 @@ describe('ProjectPersistenceManager', function () {
|
|||
})
|
||||
|
||||
it('should drop EXPIRY_TIMEOUT 10% if low disk usage', function (done) {
|
||||
this.diskusage.check.resolves({
|
||||
available: 5,
|
||||
total: 100,
|
||||
this.fsPromises.statfs.resolves({
|
||||
blocks: 100,
|
||||
bsize: 1,
|
||||
bavail: 5,
|
||||
})
|
||||
|
||||
this.ProjectPersistenceManager.refreshExpiryTimeout(() => {
|
||||
|
@ -78,9 +84,10 @@ describe('ProjectPersistenceManager', function () {
|
|||
})
|
||||
|
||||
it('should not drop EXPIRY_TIMEOUT to below 50% of project_cache_length_ms', function (done) {
|
||||
this.diskusage.check.resolves({
|
||||
available: 5,
|
||||
total: 100,
|
||||
this.fsPromises.statfs.resolves({
|
||||
blocks: 100,
|
||||
bsize: 1,
|
||||
bavail: 5,
|
||||
})
|
||||
this.ProjectPersistenceManager.EXPIRY_TIMEOUT = 500
|
||||
this.ProjectPersistenceManager.refreshExpiryTimeout(() => {
|
||||
|
@ -94,7 +101,7 @@ describe('ProjectPersistenceManager', function () {
|
|||
})
|
||||
|
||||
it('should not modify EXPIRY_TIMEOUT if there is an error getting disk values', function (done) {
|
||||
this.diskusage.check.throws(new Error())
|
||||
this.fsPromises.statfs.rejects(new Error())
|
||||
this.ProjectPersistenceManager.refreshExpiryTimeout(() => {
|
||||
this.ProjectPersistenceManager.EXPIRY_TIMEOUT.should.equal(1000)
|
||||
done()
|
||||
|
|
|
@ -6,4 +6,4 @@ contacts
|
|||
--esmock-loader=True
|
||||
--node-version=20.18.2
|
||||
--public-repo=False
|
||||
--script-version=4.5.0
|
||||
--script-version=4.7.0
|
||||
|
|
|
@ -6,4 +6,4 @@ docstore
|
|||
--esmock-loader=False
|
||||
--node-version=20.18.2
|
||||
--public-repo=True
|
||||
--script-version=4.5.0
|
||||
--script-version=4.7.0
|
||||
|
|
|
@ -160,14 +160,6 @@ const DocumentManager = {
|
|||
alreadyLoaded,
|
||||
} = await DocumentManager.getDoc(projectId, docId)
|
||||
|
||||
if (oldLines != null && oldLines.length > 0 && oldLines[0].text != null) {
|
||||
logger.debug(
|
||||
{ docId, projectId, oldLines, newLines },
|
||||
'document is JSON so not updating'
|
||||
)
|
||||
return
|
||||
}
|
||||
|
||||
logger.debug(
|
||||
{ docId, projectId, oldLines, newLines },
|
||||
'setting a document via http'
|
||||
|
|
|
@ -6,4 +6,4 @@ document-updater
|
|||
--esmock-loader=False
|
||||
--node-version=20.18.2
|
||||
--public-repo=True
|
||||
--script-version=4.5.0
|
||||
--script-version=4.7.0
|
||||
|
|
|
@ -21,6 +21,7 @@ services:
|
|||
ELASTIC_SEARCH_DSN: es:9200
|
||||
REDIS_HOST: redis
|
||||
QUEUES_REDIS_HOST: redis
|
||||
HISTORY_REDIS_HOST: redis
|
||||
ANALYTICS_QUEUES_REDIS_HOST: redis
|
||||
MONGO_HOST: mongo
|
||||
POSTGRES_HOST: postgres
|
||||
|
|
|
@ -30,6 +30,7 @@ services:
|
|||
environment:
|
||||
ELASTIC_SEARCH_DSN: es:9200
|
||||
REDIS_HOST: redis
|
||||
HISTORY_REDIS_HOST: redis
|
||||
QUEUES_REDIS_HOST: redis
|
||||
ANALYTICS_QUEUES_REDIS_HOST: redis
|
||||
MONGO_HOST: mongo
|
||||
|
|
|
@ -0,0 +1,211 @@
|
|||
// @ts-check
|
||||
|
||||
const Settings = require('@overleaf/settings')
|
||||
const logger = require('@overleaf/logger')
|
||||
const RedisManager = require('../app/js/RedisManager')
|
||||
const minimist = require('minimist')
|
||||
const { db, ObjectId } = require('../app/js/mongodb')
|
||||
const ProjectManager = require('../app/js/ProjectManager')
|
||||
const OError = require('@overleaf/o-error')
|
||||
|
||||
const docUpdaterKeys = Settings.redis.documentupdater.key_schema
|
||||
|
||||
const rclient = RedisManager.rclient
|
||||
|
||||
const { verbose, commit, ...args } = minimist(process.argv.slice(2), {
|
||||
boolean: ['verbose', 'commit'],
|
||||
string: ['batchSize'],
|
||||
default: {
|
||||
batchSize: '1000',
|
||||
},
|
||||
})
|
||||
|
||||
logger.logger.level(verbose ? 'debug' : 'warn')
|
||||
|
||||
const batchSize = parseInt(args.batchSize, 10)
|
||||
|
||||
/**
|
||||
* @typedef {import('ioredis').Redis} Redis
|
||||
*/
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} key
|
||||
* @return {string|void}
|
||||
*/
|
||||
function extractDocId(key) {
|
||||
const matches = key.match(/ProjectHistoryId:\{(.*?)\}/)
|
||||
if (matches) {
|
||||
return matches[1]
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} docId
|
||||
* @return {Promise<{projectId: string, historyId: string}>}
|
||||
*/
|
||||
async function getHistoryId(docId) {
|
||||
const doc = await db.docs.findOne(
|
||||
{ _id: new ObjectId(docId) },
|
||||
{ projection: { project_id: 1 }, readPreference: 'secondaryPreferred' }
|
||||
)
|
||||
|
||||
if (!doc) {
|
||||
throw new OError('Doc not present in mongo', { docId })
|
||||
}
|
||||
|
||||
const project = await db.projects.findOne(
|
||||
{ _id: doc.project_id },
|
||||
{
|
||||
projection: { 'overleaf.history': 1 },
|
||||
readPreference: 'secondaryPreferred',
|
||||
}
|
||||
)
|
||||
|
||||
if (!project?.overleaf?.history?.id) {
|
||||
throw new OError('Project not present in mongo (or has no history id)', {
|
||||
docId,
|
||||
project,
|
||||
doc,
|
||||
})
|
||||
}
|
||||
|
||||
return {
|
||||
historyId: project?.overleaf?.history?.id,
|
||||
projectId: doc.project_id.toString(),
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @typedef {Object} UpdateableDoc
|
||||
* @property {string} docId
|
||||
* @property {string} projectId
|
||||
* @property {string} historyId
|
||||
*/
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {Redis} node
|
||||
* @param {Array<string>} docIds
|
||||
* @return {Promise<Array<UpdateableDoc>>}
|
||||
*/
|
||||
async function findDocsWithMissingHistoryIds(node, docIds) {
|
||||
const historyIds = await node.mget(
|
||||
docIds.map(docId => docUpdaterKeys.projectHistoryId({ doc_id: docId }))
|
||||
)
|
||||
|
||||
const results = []
|
||||
|
||||
for (const index in docIds) {
|
||||
const historyId = historyIds[index]
|
||||
const docId = docIds[index]
|
||||
if (!historyId) {
|
||||
try {
|
||||
const { projectId, historyId } = await getHistoryId(docId)
|
||||
results.push({ projectId, historyId, docId })
|
||||
} catch (error) {
|
||||
logger.warn(
|
||||
{ error },
|
||||
'Error gathering data for doc with missing history id'
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
return results
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {Array<UpdateableDoc>} updates
|
||||
* @return {Promise<void>}
|
||||
*/
|
||||
async function fixAndFlushProjects(updates) {
|
||||
for (const update of updates) {
|
||||
if (commit) {
|
||||
try {
|
||||
await rclient.set(
|
||||
docUpdaterKeys.projectHistoryId({ doc_id: update.docId }),
|
||||
update.historyId
|
||||
)
|
||||
logger.debug({ ...update }, 'Set history id in redis')
|
||||
await ProjectManager.promises.flushAndDeleteProjectWithLocks(
|
||||
update.projectId,
|
||||
{}
|
||||
)
|
||||
logger.debug({ ...update }, 'Flushed project')
|
||||
} catch (err) {
|
||||
logger.error({ err, ...update }, 'Error fixing and flushing project')
|
||||
}
|
||||
} else {
|
||||
logger.debug(
|
||||
{ ...update },
|
||||
'Would have set history id in redis and flushed'
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {Array<Redis>} nodes
|
||||
* @param {number} batchSize
|
||||
* @return {Promise<void>}
|
||||
*/
|
||||
async function scanNodes(nodes, batchSize = 1000) {
|
||||
let scanned = 0
|
||||
|
||||
for (const node of nodes) {
|
||||
const stream = node.scanStream({
|
||||
match: docUpdaterKeys.projectHistoryId({ doc_id: '*' }),
|
||||
count: batchSize,
|
||||
})
|
||||
|
||||
for await (const docKeys of stream) {
|
||||
if (docKeys.length === 0) {
|
||||
continue
|
||||
}
|
||||
stream.pause()
|
||||
scanned += docKeys.length
|
||||
|
||||
const docIds = docKeys
|
||||
.map((/** @type {string} */ docKey) => extractDocId(docKey))
|
||||
.filter(Boolean)
|
||||
|
||||
try {
|
||||
const updates = await findDocsWithMissingHistoryIds(node, docIds)
|
||||
if (updates.length > 0) {
|
||||
logger.info({ updates }, 'Found doc(s) with missing history ids')
|
||||
await fixAndFlushProjects(updates)
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error({ docKeys }, 'Error processing batch')
|
||||
} finally {
|
||||
stream.resume()
|
||||
}
|
||||
}
|
||||
|
||||
logger.info({ scanned, server: node.serverInfo.role }, 'Scanned node')
|
||||
}
|
||||
}
|
||||
|
||||
async function main({ batchSize }) {
|
||||
const nodes = (typeof rclient.nodes === 'function'
|
||||
? rclient.nodes('master')
|
||||
: undefined) || [rclient]
|
||||
await scanNodes(nodes, batchSize)
|
||||
}
|
||||
|
||||
let code = 0
|
||||
|
||||
main({ batchSize })
|
||||
.then(() => {
|
||||
logger.info({}, 'done')
|
||||
})
|
||||
.catch(error => {
|
||||
logger.error({ error }, 'error')
|
||||
code = 1
|
||||
})
|
||||
.finally(() => {
|
||||
rclient.quit().then(() => process.exit(code))
|
||||
})
|
|
@ -16,13 +16,16 @@ const DocUpdaterClient = require('./helpers/DocUpdaterClient')
|
|||
const DocUpdaterApp = require('./helpers/DocUpdaterApp')
|
||||
|
||||
describe('Applying updates to a doc', function () {
|
||||
before(function (done) {
|
||||
beforeEach(function (done) {
|
||||
sinon.spy(MockWebApi, 'getDocument')
|
||||
this.lines = ['one', 'two', 'three']
|
||||
this.version = 42
|
||||
this.op = {
|
||||
i: 'one and a half\n',
|
||||
p: 4,
|
||||
}
|
||||
this.project_id = DocUpdaterClient.randomId()
|
||||
this.doc_id = DocUpdaterClient.randomId()
|
||||
this.update = {
|
||||
doc: this.doc_id,
|
||||
op: [this.op],
|
||||
|
@ -31,12 +34,12 @@ describe('Applying updates to a doc', function () {
|
|||
this.result = ['one', 'one and a half', 'two', 'three']
|
||||
DocUpdaterApp.ensureRunning(done)
|
||||
})
|
||||
afterEach(function () {
|
||||
sinon.restore()
|
||||
})
|
||||
|
||||
describe('when the document is not loaded', function () {
|
||||
before(function (done) {
|
||||
this.project_id = DocUpdaterClient.randomId()
|
||||
this.doc_id = DocUpdaterClient.randomId()
|
||||
sinon.spy(MockWebApi, 'getDocument')
|
||||
beforeEach(function (done) {
|
||||
this.startTime = Date.now()
|
||||
MockWebApi.insertDoc(this.project_id, this.doc_id, {
|
||||
lines: this.lines,
|
||||
|
@ -50,15 +53,25 @@ describe('Applying updates to a doc', function () {
|
|||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
setTimeout(done, 200)
|
||||
setTimeout(() => {
|
||||
rclientProjectHistory.get(
|
||||
ProjectHistoryKeys.projectHistoryFirstOpTimestamp({
|
||||
project_id: this.project_id,
|
||||
}),
|
||||
(error, result) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
result = parseInt(result, 10)
|
||||
this.firstOpTimestamp = result
|
||||
done()
|
||||
}
|
||||
)
|
||||
}, 200)
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
after(function () {
|
||||
MockWebApi.getDocument.restore()
|
||||
})
|
||||
|
||||
it('should load the document from the web API', function () {
|
||||
MockWebApi.getDocument
|
||||
.calledWith(this.project_id, this.doc_id)
|
||||
|
@ -92,21 +105,8 @@ describe('Applying updates to a doc', function () {
|
|||
)
|
||||
})
|
||||
|
||||
it('should set the first op timestamp', function (done) {
|
||||
rclientProjectHistory.get(
|
||||
ProjectHistoryKeys.projectHistoryFirstOpTimestamp({
|
||||
project_id: this.project_id,
|
||||
}),
|
||||
(error, result) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
result = parseInt(result, 10)
|
||||
result.should.be.within(this.startTime, Date.now())
|
||||
this.firstOpTimestamp = result
|
||||
done()
|
||||
}
|
||||
)
|
||||
it('should set the first op timestamp', function () {
|
||||
this.firstOpTimestamp.should.be.within(this.startTime, Date.now())
|
||||
})
|
||||
|
||||
it('should yield last updated time', function (done) {
|
||||
|
@ -138,7 +138,7 @@ describe('Applying updates to a doc', function () {
|
|||
})
|
||||
|
||||
describe('when sending another update', function () {
|
||||
before(function (done) {
|
||||
beforeEach(function (done) {
|
||||
this.timeout(10000)
|
||||
this.second_update = Object.assign({}, this.update)
|
||||
this.second_update.v = this.version + 1
|
||||
|
@ -207,13 +207,85 @@ describe('Applying updates to a doc', function () {
|
|||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when another client is sending a concurrent update', function () {
|
||||
beforeEach(function (done) {
|
||||
this.timeout(10000)
|
||||
this.otherUpdate = {
|
||||
doc: this.doc_id,
|
||||
op: [{ p: 8, i: 'two and a half\n' }],
|
||||
v: this.version,
|
||||
meta: { source: 'other-random-publicId' },
|
||||
}
|
||||
this.secondStartTime = Date.now()
|
||||
DocUpdaterClient.sendUpdate(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.otherUpdate,
|
||||
error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
setTimeout(done, 200)
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should update the doc', function (done) {
|
||||
DocUpdaterClient.getDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
(error, res, doc) => {
|
||||
if (error) done(error)
|
||||
doc.lines.should.deep.equal([
|
||||
'one',
|
||||
'one and a half',
|
||||
'two',
|
||||
'two and a half',
|
||||
'three',
|
||||
])
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should not change the first op timestamp', function (done) {
|
||||
rclientProjectHistory.get(
|
||||
ProjectHistoryKeys.projectHistoryFirstOpTimestamp({
|
||||
project_id: this.project_id,
|
||||
}),
|
||||
(error, result) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
result = parseInt(result, 10)
|
||||
result.should.equal(this.firstOpTimestamp)
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should yield last updated time', function (done) {
|
||||
DocUpdaterClient.getProjectLastUpdatedAt(
|
||||
this.project_id,
|
||||
(error, res, body) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
res.statusCode.should.equal(200)
|
||||
body.lastUpdatedAt.should.be.within(
|
||||
this.secondStartTime,
|
||||
Date.now()
|
||||
)
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the document is loaded', function () {
|
||||
before(function (done) {
|
||||
this.project_id = DocUpdaterClient.randomId()
|
||||
this.doc_id = DocUpdaterClient.randomId()
|
||||
|
||||
beforeEach(function (done) {
|
||||
MockWebApi.insertDoc(this.project_id, this.doc_id, {
|
||||
lines: this.lines,
|
||||
version: this.version,
|
||||
|
@ -222,7 +294,7 @@ describe('Applying updates to a doc', function () {
|
|||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
sinon.spy(MockWebApi, 'getDocument')
|
||||
sinon.resetHistory()
|
||||
DocUpdaterClient.sendUpdate(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
|
@ -237,10 +309,6 @@ describe('Applying updates to a doc', function () {
|
|||
})
|
||||
})
|
||||
|
||||
after(function () {
|
||||
MockWebApi.getDocument.restore()
|
||||
})
|
||||
|
||||
it('should not need to call the web api', function () {
|
||||
MockWebApi.getDocument.called.should.equal(false)
|
||||
})
|
||||
|
@ -272,10 +340,7 @@ describe('Applying updates to a doc', function () {
|
|||
})
|
||||
|
||||
describe('when the document is loaded and is using project-history only', function () {
|
||||
before(function (done) {
|
||||
this.project_id = DocUpdaterClient.randomId()
|
||||
this.doc_id = DocUpdaterClient.randomId()
|
||||
|
||||
beforeEach(function (done) {
|
||||
MockWebApi.insertDoc(this.project_id, this.doc_id, {
|
||||
lines: this.lines,
|
||||
version: this.version,
|
||||
|
@ -284,7 +349,7 @@ describe('Applying updates to a doc', function () {
|
|||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
sinon.spy(MockWebApi, 'getDocument')
|
||||
sinon.resetHistory()
|
||||
DocUpdaterClient.sendUpdate(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
|
@ -299,10 +364,6 @@ describe('Applying updates to a doc', function () {
|
|||
})
|
||||
})
|
||||
|
||||
after(function () {
|
||||
MockWebApi.getDocument.restore()
|
||||
})
|
||||
|
||||
it('should update the doc', function (done) {
|
||||
DocUpdaterClient.getDoc(
|
||||
this.project_id,
|
||||
|
@ -331,9 +392,7 @@ describe('Applying updates to a doc', function () {
|
|||
|
||||
describe('when the document has been deleted', function () {
|
||||
describe('when the ops come in a single linear order', function () {
|
||||
before(function (done) {
|
||||
this.project_id = DocUpdaterClient.randomId()
|
||||
this.doc_id = DocUpdaterClient.randomId()
|
||||
beforeEach(function (done) {
|
||||
const lines = ['', '', '']
|
||||
MockWebApi.insertDoc(this.project_id, this.doc_id, {
|
||||
lines,
|
||||
|
@ -353,54 +412,49 @@ describe('Applying updates to a doc', function () {
|
|||
{ doc_id: this.doc_id, v: 10, op: [{ i: 'd', p: 10 }] },
|
||||
]
|
||||
this.my_result = ['hello world', '', '']
|
||||
done()
|
||||
})
|
||||
|
||||
it('should be able to continue applying updates when the project has been deleted', function (done) {
|
||||
let update
|
||||
const actions = []
|
||||
for (update of this.updates.slice(0, 6)) {
|
||||
;(update => {
|
||||
actions.push(callback =>
|
||||
DocUpdaterClient.sendUpdate(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
update,
|
||||
callback
|
||||
)
|
||||
for (const update of this.updates.slice(0, 6)) {
|
||||
actions.push(callback =>
|
||||
DocUpdaterClient.sendUpdate(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
update,
|
||||
callback
|
||||
)
|
||||
})(update)
|
||||
)
|
||||
}
|
||||
actions.push(callback =>
|
||||
DocUpdaterClient.deleteDoc(this.project_id, this.doc_id, callback)
|
||||
)
|
||||
for (update of this.updates.slice(6)) {
|
||||
;(update => {
|
||||
actions.push(callback =>
|
||||
DocUpdaterClient.sendUpdate(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
update,
|
||||
callback
|
||||
)
|
||||
for (const update of this.updates.slice(6)) {
|
||||
actions.push(callback =>
|
||||
DocUpdaterClient.sendUpdate(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
update,
|
||||
callback
|
||||
)
|
||||
})(update)
|
||||
)
|
||||
}
|
||||
|
||||
async.series(actions, error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
// process updates
|
||||
actions.push(cb =>
|
||||
DocUpdaterClient.getDoc(this.project_id, this.doc_id, cb)
|
||||
)
|
||||
|
||||
async.series(actions, done)
|
||||
})
|
||||
|
||||
it('should be able to continue applying updates when the project has been deleted', function (done) {
|
||||
DocUpdaterClient.getDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
(error, res, doc) => {
|
||||
if (error) return done(error)
|
||||
doc.lines.should.deep.equal(this.my_result)
|
||||
done()
|
||||
}
|
||||
DocUpdaterClient.getDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
(error, res, doc) => {
|
||||
if (error) return done(error)
|
||||
doc.lines.should.deep.equal(this.my_result)
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
)
|
||||
})
|
||||
|
||||
it('should store the doc ops in the correct order', function (done) {
|
||||
|
@ -422,9 +476,7 @@ describe('Applying updates to a doc', function () {
|
|||
})
|
||||
|
||||
describe('when older ops come in after the delete', function () {
|
||||
before(function (done) {
|
||||
this.project_id = DocUpdaterClient.randomId()
|
||||
this.doc_id = DocUpdaterClient.randomId()
|
||||
beforeEach(function (done) {
|
||||
const lines = ['', '', '']
|
||||
MockWebApi.insertDoc(this.project_id, this.doc_id, {
|
||||
lines,
|
||||
|
@ -492,11 +544,9 @@ describe('Applying updates to a doc', function () {
|
|||
})
|
||||
|
||||
describe('with a broken update', function () {
|
||||
before(function (done) {
|
||||
this.project_id = DocUpdaterClient.randomId()
|
||||
this.doc_id = DocUpdaterClient.randomId()
|
||||
beforeEach(function (done) {
|
||||
this.broken_update = {
|
||||
doc_id: this.doc_id,
|
||||
doc: this.doc_id,
|
||||
v: this.version,
|
||||
op: [{ d: 'not the correct content', p: 0 }],
|
||||
}
|
||||
|
@ -547,9 +597,7 @@ describe('Applying updates to a doc', function () {
|
|||
})
|
||||
|
||||
describe('when there is no version in Mongo', function () {
|
||||
before(function (done) {
|
||||
this.project_id = DocUpdaterClient.randomId()
|
||||
this.doc_id = DocUpdaterClient.randomId()
|
||||
beforeEach(function (done) {
|
||||
MockWebApi.insertDoc(this.project_id, this.doc_id, {
|
||||
lines: this.lines,
|
||||
})
|
||||
|
@ -586,9 +634,7 @@ describe('Applying updates to a doc', function () {
|
|||
})
|
||||
|
||||
describe('when the sending duplicate ops', function () {
|
||||
before(function (done) {
|
||||
this.project_id = DocUpdaterClient.randomId()
|
||||
this.doc_id = DocUpdaterClient.randomId()
|
||||
beforeEach(function (done) {
|
||||
MockWebApi.insertDoc(this.project_id, this.doc_id, {
|
||||
lines: this.lines,
|
||||
version: this.version,
|
||||
|
@ -671,11 +717,9 @@ describe('Applying updates to a doc', function () {
|
|||
})
|
||||
|
||||
describe('when sending updates for a non-existing doc id', function () {
|
||||
before(function (done) {
|
||||
this.project_id = DocUpdaterClient.randomId()
|
||||
this.doc_id = DocUpdaterClient.randomId()
|
||||
beforeEach(function (done) {
|
||||
this.non_existing = {
|
||||
doc_id: this.doc_id,
|
||||
doc: this.doc_id,
|
||||
v: this.version,
|
||||
op: [{ d: 'content', p: 0 }],
|
||||
}
|
||||
|
|
|
@ -50,64 +50,68 @@ app.use((req, res, next) => {
|
|||
|
||||
Metrics.injectMetricsRoute(app)
|
||||
|
||||
app.head(
|
||||
'/project/:project_id/file/:file_id',
|
||||
keyBuilder.userFileKeyMiddleware,
|
||||
fileController.getFileHead
|
||||
)
|
||||
app.get(
|
||||
'/project/:project_id/file/:file_id',
|
||||
keyBuilder.userFileKeyMiddleware,
|
||||
fileController.getFile
|
||||
)
|
||||
app.post(
|
||||
'/project/:project_id/file/:file_id',
|
||||
keyBuilder.userFileKeyMiddleware,
|
||||
fileController.insertFile
|
||||
)
|
||||
app.put(
|
||||
'/project/:project_id/file/:file_id',
|
||||
keyBuilder.userFileKeyMiddleware,
|
||||
bodyParser.json(),
|
||||
fileController.copyFile
|
||||
)
|
||||
app.delete(
|
||||
'/project/:project_id/file/:file_id',
|
||||
keyBuilder.userFileKeyMiddleware,
|
||||
fileController.deleteFile
|
||||
)
|
||||
app.delete(
|
||||
'/project/:project_id',
|
||||
keyBuilder.userProjectKeyMiddleware,
|
||||
fileController.deleteProject
|
||||
)
|
||||
if (settings.filestore.stores.user_files) {
|
||||
app.head(
|
||||
'/project/:project_id/file/:file_id',
|
||||
keyBuilder.userFileKeyMiddleware,
|
||||
fileController.getFileHead
|
||||
)
|
||||
app.get(
|
||||
'/project/:project_id/file/:file_id',
|
||||
keyBuilder.userFileKeyMiddleware,
|
||||
fileController.getFile
|
||||
)
|
||||
app.post(
|
||||
'/project/:project_id/file/:file_id',
|
||||
keyBuilder.userFileKeyMiddleware,
|
||||
fileController.insertFile
|
||||
)
|
||||
app.put(
|
||||
'/project/:project_id/file/:file_id',
|
||||
keyBuilder.userFileKeyMiddleware,
|
||||
bodyParser.json(),
|
||||
fileController.copyFile
|
||||
)
|
||||
app.delete(
|
||||
'/project/:project_id/file/:file_id',
|
||||
keyBuilder.userFileKeyMiddleware,
|
||||
fileController.deleteFile
|
||||
)
|
||||
app.delete(
|
||||
'/project/:project_id',
|
||||
keyBuilder.userProjectKeyMiddleware,
|
||||
fileController.deleteProject
|
||||
)
|
||||
|
||||
app.get(
|
||||
'/project/:project_id/size',
|
||||
keyBuilder.userProjectKeyMiddleware,
|
||||
fileController.directorySize
|
||||
)
|
||||
app.get(
|
||||
'/project/:project_id/size',
|
||||
keyBuilder.userProjectKeyMiddleware,
|
||||
fileController.directorySize
|
||||
)
|
||||
}
|
||||
|
||||
app.head(
|
||||
'/template/:template_id/v/:version/:format',
|
||||
keyBuilder.templateFileKeyMiddleware,
|
||||
fileController.getFileHead
|
||||
)
|
||||
app.get(
|
||||
'/template/:template_id/v/:version/:format',
|
||||
keyBuilder.templateFileKeyMiddleware,
|
||||
fileController.getFile
|
||||
)
|
||||
app.get(
|
||||
'/template/:template_id/v/:version/:format/:sub_type',
|
||||
keyBuilder.templateFileKeyMiddleware,
|
||||
fileController.getFile
|
||||
)
|
||||
app.post(
|
||||
'/template/:template_id/v/:version/:format',
|
||||
keyBuilder.templateFileKeyMiddleware,
|
||||
fileController.insertFile
|
||||
)
|
||||
if (settings.filestore.stores.template_files) {
|
||||
app.head(
|
||||
'/template/:template_id/v/:version/:format',
|
||||
keyBuilder.templateFileKeyMiddleware,
|
||||
fileController.getFileHead
|
||||
)
|
||||
app.get(
|
||||
'/template/:template_id/v/:version/:format',
|
||||
keyBuilder.templateFileKeyMiddleware,
|
||||
fileController.getFile
|
||||
)
|
||||
app.get(
|
||||
'/template/:template_id/v/:version/:format/:sub_type',
|
||||
keyBuilder.templateFileKeyMiddleware,
|
||||
fileController.getFile
|
||||
)
|
||||
app.post(
|
||||
'/template/:template_id/v/:version/:format',
|
||||
keyBuilder.templateFileKeyMiddleware,
|
||||
fileController.insertFile
|
||||
)
|
||||
}
|
||||
|
||||
app.get(
|
||||
'/bucket/:bucket/key/*',
|
||||
|
|
|
@ -7,6 +7,6 @@ filestore
|
|||
--esmock-loader=False
|
||||
--node-version=20.18.2
|
||||
--public-repo=True
|
||||
--script-version=4.5.0
|
||||
--script-version=4.7.0
|
||||
--test-acceptance-shards=SHARD_01_,SHARD_02_,SHARD_03_
|
||||
--use-large-ci-runner=True
|
||||
|
|
|
@ -138,6 +138,45 @@ async function getHistoryBefore(req, res, next) {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all changes since the beginning of history or since a given version
|
||||
*/
|
||||
async function getChanges(req, res, next) {
|
||||
const projectId = req.swagger.params.project_id.value
|
||||
const since = req.swagger.params.since.value ?? 0
|
||||
|
||||
if (since < 0) {
|
||||
// Negative values would cause an infinite loop
|
||||
return res.status(400).json({
|
||||
error: `Version out of bounds: ${since}`,
|
||||
})
|
||||
}
|
||||
|
||||
const changes = []
|
||||
let chunk = await chunkStore.loadLatest(projectId)
|
||||
|
||||
if (since > chunk.getEndVersion()) {
|
||||
return res.status(400).json({
|
||||
error: `Version out of bounds: ${since}`,
|
||||
})
|
||||
}
|
||||
|
||||
// Fetch all chunks that come after the chunk that contains the start version
|
||||
while (chunk.getStartVersion() > since) {
|
||||
const changesInChunk = chunk.getChanges()
|
||||
changes.unshift(...changesInChunk)
|
||||
chunk = await chunkStore.loadAtVersion(projectId, chunk.getStartVersion())
|
||||
}
|
||||
|
||||
// Extract the relevant changes from the chunk that contains the start version
|
||||
const changesInChunk = chunk
|
||||
.getChanges()
|
||||
.slice(since - chunk.getStartVersion())
|
||||
changes.unshift(...changesInChunk)
|
||||
|
||||
res.json(changes.map(change => change.toRaw()))
|
||||
}
|
||||
|
||||
async function getZip(req, res, next) {
|
||||
const projectId = req.swagger.params.project_id.value
|
||||
const version = req.swagger.params.version.value
|
||||
|
@ -337,6 +376,7 @@ module.exports = {
|
|||
getLatestHistoryRaw: expressify(getLatestHistoryRaw),
|
||||
getHistory: expressify(getHistory),
|
||||
getHistoryBefore: expressify(getHistoryBefore),
|
||||
getChanges: expressify(getChanges),
|
||||
getZip: expressify(getZip),
|
||||
createZip: expressify(createZip),
|
||||
deleteProject: expressify(deleteProject),
|
||||
|
|
|
@ -100,9 +100,48 @@ const importChanges = {
|
|||
],
|
||||
}
|
||||
|
||||
const getChanges = {
|
||||
'x-swagger-router-controller': 'projects',
|
||||
operationId: 'getChanges',
|
||||
tags: ['Project'],
|
||||
description: 'Get changes applied to a project',
|
||||
parameters: [
|
||||
{
|
||||
name: 'project_id',
|
||||
in: 'path',
|
||||
description: 'project id',
|
||||
required: true,
|
||||
type: 'string',
|
||||
},
|
||||
{
|
||||
name: 'since',
|
||||
in: 'query',
|
||||
description: 'start version',
|
||||
required: false,
|
||||
type: 'number',
|
||||
},
|
||||
],
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Success',
|
||||
schema: {
|
||||
type: 'array',
|
||||
items: {
|
||||
$ref: '#/definitions/Change',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
security: [
|
||||
{
|
||||
basic: [],
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
exports.paths = {
|
||||
'/projects/{project_id}/import': { post: importSnapshot },
|
||||
'/projects/{project_id}/legacy_import': { post: importSnapshot },
|
||||
'/projects/{project_id}/changes': { post: importChanges },
|
||||
'/projects/{project_id}/changes': { get: getChanges, post: importChanges },
|
||||
'/projects/{project_id}/legacy_changes': { post: importChanges },
|
||||
}
|
||||
|
|
|
@ -90,15 +90,16 @@ process.on('SIGINT', () => {
|
|||
|
||||
/**
|
||||
* @param {number} port
|
||||
* @param {boolean} enableVerificationLoop
|
||||
* @return {Promise<http.Server>}
|
||||
*/
|
||||
export async function startApp(port) {
|
||||
export async function startApp(port, enableVerificationLoop = true) {
|
||||
await mongodb.client.connect()
|
||||
await loadGlobalBlobs()
|
||||
await healthCheck()
|
||||
const server = http.createServer(app)
|
||||
await promisify(server.listen.bind(server, port))()
|
||||
loopRandomProjects(shutdownEmitter)
|
||||
enableVerificationLoop && loopRandomProjects(shutdownEmitter)
|
||||
return server
|
||||
}
|
||||
|
||||
|
|
|
@ -6,5 +6,5 @@ history-v1
|
|||
--esmock-loader=False
|
||||
--node-version=20.18.2
|
||||
--public-repo=False
|
||||
--script-version=4.5.0
|
||||
--script-version=4.7.0
|
||||
--tsconfig-extra-includes=backup-deletion-app.mjs,backup-verifier-app.mjs,backup-worker-app.mjs,api/**/*,migrations/**/*,storage/**/*
|
||||
|
|
|
@ -89,6 +89,16 @@
|
|||
"host": "QUEUES_REDIS_HOST",
|
||||
"password": "QUEUES_REDIS_PASSWORD",
|
||||
"port": "QUEUES_REDIS_PORT"
|
||||
},
|
||||
"history": {
|
||||
"host": "HISTORY_REDIS_HOST",
|
||||
"password": "HISTORY_REDIS_PASSWORD",
|
||||
"port": "HISTORY_REDIS_PORT"
|
||||
},
|
||||
"lock": {
|
||||
"host": "REDIS_HOST",
|
||||
"password": "REDIS_PASSWORD",
|
||||
"port": "REDIS_PORT"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,6 +21,7 @@ services:
|
|||
ELASTIC_SEARCH_DSN: es:9200
|
||||
REDIS_HOST: redis
|
||||
QUEUES_REDIS_HOST: redis
|
||||
HISTORY_REDIS_HOST: redis
|
||||
ANALYTICS_QUEUES_REDIS_HOST: redis
|
||||
MONGO_HOST: mongo
|
||||
POSTGRES_HOST: postgres
|
||||
|
|
|
@ -37,6 +37,7 @@ services:
|
|||
environment:
|
||||
ELASTIC_SEARCH_DSN: es:9200
|
||||
REDIS_HOST: redis
|
||||
HISTORY_REDIS_HOST: redis
|
||||
QUEUES_REDIS_HOST: redis
|
||||
ANALYTICS_QUEUES_REDIS_HOST: redis
|
||||
MONGO_HOST: mongo
|
||||
|
|
|
@ -0,0 +1,27 @@
|
|||
// @ts-check
|
||||
|
||||
/**
|
||||
* @import { Knex } from "knex"
|
||||
*/
|
||||
|
||||
/**
|
||||
* @param { Knex } knex
|
||||
* @returns { Promise<void> }
|
||||
*/
|
||||
exports.up = async function (knex) {
|
||||
await knex.raw(`
|
||||
ALTER TABLE chunks
|
||||
ADD COLUMN closed BOOLEAN NOT NULL DEFAULT FALSE
|
||||
`)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param { Knex } knex
|
||||
* @returns { Promise<void> }
|
||||
*/
|
||||
exports.down = async function (knex) {
|
||||
await knex.raw(`
|
||||
ALTER TABLE chunks
|
||||
DROP COLUMN closed
|
||||
`)
|
||||
}
|
|
@ -1,10 +1,12 @@
|
|||
exports.BatchBlobStore = require('./lib/batch_blob_store')
|
||||
exports.blobHash = require('./lib/blob_hash')
|
||||
exports.HashCheckBlobStore = require('./lib/hash_check_blob_store')
|
||||
exports.chunkBuffer = require('./lib/chunk_buffer')
|
||||
exports.chunkStore = require('./lib/chunk_store')
|
||||
exports.historyStore = require('./lib/history_store').historyStore
|
||||
exports.knex = require('./lib/knex')
|
||||
exports.mongodb = require('./lib/mongodb')
|
||||
exports.redis = require('./lib/redis')
|
||||
exports.persistChanges = require('./lib/persist_changes')
|
||||
exports.persistor = require('./lib/persistor')
|
||||
exports.ProjectArchive = require('./lib/project_archive')
|
||||
|
@ -18,3 +20,6 @@ exports.loadGlobalBlobs = loadGlobalBlobs
|
|||
|
||||
const { InvalidChangeError } = require('./lib/errors')
|
||||
exports.InvalidChangeError = InvalidChangeError
|
||||
|
||||
const { ChunkVersionConflictError } = require('./lib/chunk_store/errors')
|
||||
exports.ChunkVersionConflictError = ChunkVersionConflictError
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
'use strict'
|
||||
|
||||
const OError = require('@overleaf/o-error')
|
||||
|
||||
const check = require('check-types')
|
||||
const { Blob } = require('overleaf-editor-core')
|
||||
|
||||
|
@ -7,41 +9,58 @@ const assert = check.assert
|
|||
|
||||
const MONGO_ID_REGEXP = /^[0-9a-f]{24}$/
|
||||
const POSTGRES_ID_REGEXP = /^[1-9][0-9]{0,9}$/
|
||||
const PROJECT_ID_REGEXP = /^([0-9a-f]{24}|[1-9][0-9]{0,9})$/
|
||||
const MONGO_OR_POSTGRES_ID_REGEXP = /^([0-9a-f]{24}|[1-9][0-9]{0,9})$/
|
||||
|
||||
function transaction(transaction, message) {
|
||||
assert.function(transaction, message)
|
||||
}
|
||||
|
||||
function blobHash(arg, message) {
|
||||
assert.match(arg, Blob.HEX_HASH_RX, message)
|
||||
try {
|
||||
assert.match(arg, Blob.HEX_HASH_RX, message)
|
||||
} catch (error) {
|
||||
throw OError.tag(error, message, { arg })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A project id is a string that contains either an integer (for projects stored in Postgres) or 24
|
||||
* hex digits (for projects stored in Mongo)
|
||||
*/
|
||||
function projectId(arg, message) {
|
||||
try {
|
||||
assert.match(arg, MONGO_OR_POSTGRES_ID_REGEXP, message)
|
||||
} catch (error) {
|
||||
throw OError.tag(error, message, { arg })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A chunk id is a string that contains either an integer (for projects stored in Postgres) or 24
|
||||
* hex digits (for projects stored in Mongo)
|
||||
*/
|
||||
function projectId(arg, message) {
|
||||
assert.match(arg, PROJECT_ID_REGEXP, message)
|
||||
}
|
||||
|
||||
/**
|
||||
* A chunk id is either a number (for projects stored in Postgres) or a 24
|
||||
* character string (for projects stored in Mongo)
|
||||
*/
|
||||
function chunkId(arg, message) {
|
||||
const valid = check.integer(arg) || check.match(arg, MONGO_ID_REGEXP)
|
||||
if (!valid) {
|
||||
throw new TypeError(message)
|
||||
try {
|
||||
assert.match(arg, MONGO_OR_POSTGRES_ID_REGEXP, message)
|
||||
} catch (error) {
|
||||
throw OError.tag(error, message, { arg })
|
||||
}
|
||||
}
|
||||
|
||||
function mongoId(arg, message) {
|
||||
assert.match(arg, MONGO_ID_REGEXP)
|
||||
try {
|
||||
assert.match(arg, MONGO_ID_REGEXP, message)
|
||||
} catch (error) {
|
||||
throw OError.tag(error, message, { arg })
|
||||
}
|
||||
}
|
||||
|
||||
function postgresId(arg, message) {
|
||||
assert.match(arg, POSTGRES_ID_REGEXP, message)
|
||||
try {
|
||||
assert.match(arg, POSTGRES_ID_REGEXP, message)
|
||||
} catch (error) {
|
||||
throw OError.tag(error, message, { arg })
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
|
|
|
@ -2,11 +2,7 @@
|
|||
* Provides a generator function to back up project chunks and blobs.
|
||||
*/
|
||||
|
||||
import {
|
||||
getProjectChunksFromVersion,
|
||||
loadAtVersion,
|
||||
loadByChunkRecord,
|
||||
} from './chunk_store/index.js'
|
||||
import chunkStore from './chunk_store/index.js'
|
||||
|
||||
import {
|
||||
GLOBAL_BLOBS, // NOTE: must call loadGlobalBlobs() before using this
|
||||
|
@ -33,7 +29,10 @@ async function lookBehindForSeenBlobs(
|
|||
) {
|
||||
// the snapshot in this chunk has not been backed up
|
||||
// so we find the set of backed up blobs from the previous chunk
|
||||
const previousChunk = await loadAtVersion(projectId, lastBackedUpVersion)
|
||||
const previousChunk = await chunkStore.loadAtVersion(
|
||||
projectId,
|
||||
lastBackedUpVersion
|
||||
)
|
||||
const previousChunkHistory = previousChunk.getHistory()
|
||||
previousChunkHistory.findBlobHashes(seenBlobs)
|
||||
}
|
||||
|
@ -115,13 +114,13 @@ export async function* backupGenerator(projectId, lastBackedUpVersion) {
|
|||
lastBackedUpVersion >= 0 ? lastBackedUpVersion + 1 : 0
|
||||
let isStartingChunk = true
|
||||
let currentBackedUpVersion = lastBackedUpVersion
|
||||
const chunkRecordIterator = getProjectChunksFromVersion(
|
||||
const chunkRecordIterator = chunkStore.getProjectChunksFromVersion(
|
||||
projectId,
|
||||
firstPendingVersion
|
||||
)
|
||||
|
||||
for await (const chunkRecord of chunkRecordIterator) {
|
||||
const { chunk, chunkBuffer } = await loadByChunkRecord(
|
||||
const { chunk, chunkBuffer } = await chunkStore.loadByChunkRecord(
|
||||
projectId,
|
||||
chunkRecord
|
||||
)
|
||||
|
|
|
@ -13,7 +13,7 @@ async function initialize(projectId) {
|
|||
* Return blob metadata for the given project and hash
|
||||
*/
|
||||
async function findBlob(projectId, hash) {
|
||||
assert.postgresId(projectId, `bad projectId ${projectId}`)
|
||||
assert.postgresId(projectId, 'bad projectId')
|
||||
projectId = parseInt(projectId, 10)
|
||||
assert.blobHash(hash, 'bad hash')
|
||||
|
||||
|
@ -35,7 +35,7 @@ async function findBlob(projectId, hash) {
|
|||
* @return {Promise.<Array.<Blob?>>} no guarantee on order
|
||||
*/
|
||||
async function findBlobs(projectId, hashes) {
|
||||
assert.postgresId(projectId, `bad projectId ${projectId}`)
|
||||
assert.postgresId(projectId, 'bad projectId')
|
||||
projectId = parseInt(projectId, 10)
|
||||
assert.array(hashes, 'bad hashes: not array')
|
||||
hashes.forEach(function (hash) {
|
||||
|
@ -57,7 +57,7 @@ async function findBlobs(projectId, hashes) {
|
|||
* Return metadata for all blobs in the given project
|
||||
*/
|
||||
async function getProjectBlobs(projectId) {
|
||||
assert.postgresId(projectId, `bad projectId ${projectId}`)
|
||||
assert.postgresId(projectId, 'bad projectId')
|
||||
projectId = parseInt(projectId, 10)
|
||||
|
||||
const records = await knex('project_blobs')
|
||||
|
@ -103,7 +103,7 @@ async function getProjectBlobsBatch(projectIds) {
|
|||
* Add a blob's metadata to the blobs table after it has been uploaded.
|
||||
*/
|
||||
async function insertBlob(projectId, blob) {
|
||||
assert.postgresId(projectId, `bad projectId ${projectId}`)
|
||||
assert.postgresId(projectId, 'bad projectId')
|
||||
projectId = parseInt(projectId, 10)
|
||||
|
||||
await knex('project_blobs')
|
||||
|
@ -116,7 +116,7 @@ async function insertBlob(projectId, blob) {
|
|||
* Deletes all blobs for a given project
|
||||
*/
|
||||
async function deleteBlobs(projectId) {
|
||||
assert.postgresId(projectId, `bad projectId ${projectId}`)
|
||||
assert.postgresId(projectId, 'bad projectId')
|
||||
projectId = parseInt(projectId, 10)
|
||||
|
||||
await knex('project_blobs').where('project_id', projectId).delete()
|
||||
|
|
39
services/history-v1/storage/lib/chunk_buffer/index.js
Normal file
39
services/history-v1/storage/lib/chunk_buffer/index.js
Normal file
|
@ -0,0 +1,39 @@
|
|||
'use strict'
|
||||
|
||||
/**
|
||||
* @module storage/lib/chunk_buffer
|
||||
*/
|
||||
|
||||
const chunkStore = require('../chunk_store')
|
||||
const redisBackend = require('../chunk_store/redis')
|
||||
const metrics = require('@overleaf/metrics')
|
||||
/**
|
||||
* Load the latest Chunk stored for a project, including blob metadata.
|
||||
*
|
||||
* @param {string} projectId
|
||||
* @return {Promise.<Chunk>}
|
||||
*/
|
||||
async function loadLatest(projectId) {
|
||||
const chunkRecord = await chunkStore.loadLatestRaw(projectId)
|
||||
const cachedChunk = await redisBackend.getCurrentChunkIfValid(
|
||||
projectId,
|
||||
chunkRecord
|
||||
)
|
||||
if (cachedChunk) {
|
||||
metrics.inc('chunk_buffer.loadLatest', 1, {
|
||||
status: 'cache-hit',
|
||||
})
|
||||
return cachedChunk
|
||||
} else {
|
||||
metrics.inc('chunk_buffer.loadLatest', 1, {
|
||||
status: 'cache-miss',
|
||||
})
|
||||
const chunk = await chunkStore.loadLatest(projectId)
|
||||
await redisBackend.setCurrentChunk(projectId, chunk)
|
||||
return chunk
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
loadLatest,
|
||||
}
|
|
@ -1,3 +1,5 @@
|
|||
// @ts-check
|
||||
|
||||
'use strict'
|
||||
|
||||
/**
|
||||
|
@ -156,7 +158,6 @@ async function loadAtTimestamp(projectId, timestamp) {
|
|||
* @param {string} projectId
|
||||
* @param {Chunk} chunk
|
||||
* @param {Date} [earliestChangeTimestamp]
|
||||
* @return {Promise.<number>} for the chunkId of the inserted chunk
|
||||
*/
|
||||
async function create(projectId, chunk, earliestChangeTimestamp) {
|
||||
assert.projectId(projectId, 'bad projectId')
|
||||
|
@ -164,13 +165,18 @@ async function create(projectId, chunk, earliestChangeTimestamp) {
|
|||
assert.maybe.date(earliestChangeTimestamp, 'bad timestamp')
|
||||
|
||||
const backend = getBackend(projectId)
|
||||
const chunkStart = chunk.getStartVersion()
|
||||
const chunkId = await uploadChunk(projectId, chunk)
|
||||
await backend.confirmCreate(
|
||||
projectId,
|
||||
chunk,
|
||||
chunkId,
|
||||
earliestChangeTimestamp
|
||||
)
|
||||
|
||||
const opts = {}
|
||||
if (chunkStart > 0) {
|
||||
opts.oldChunkId = await getChunkIdForVersion(projectId, chunkStart - 1)
|
||||
}
|
||||
if (earliestChangeTimestamp != null) {
|
||||
opts.earliestChangeTimestamp = earliestChangeTimestamp
|
||||
}
|
||||
|
||||
await backend.confirmCreate(projectId, chunk, chunkId, opts)
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -220,13 +226,12 @@ async function update(
|
|||
const oldChunkId = await getChunkIdForVersion(projectId, oldEndVersion)
|
||||
const newChunkId = await uploadChunk(projectId, newChunk)
|
||||
|
||||
await backend.confirmUpdate(
|
||||
projectId,
|
||||
oldChunkId,
|
||||
newChunk,
|
||||
newChunkId,
|
||||
earliestChangeTimestamp
|
||||
)
|
||||
const opts = {}
|
||||
if (earliestChangeTimestamp != null) {
|
||||
opts.earliestChangeTimestamp = earliestChangeTimestamp
|
||||
}
|
||||
|
||||
await backend.confirmUpdate(projectId, oldChunkId, newChunk, newChunkId, opts)
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -234,7 +239,7 @@ async function update(
|
|||
*
|
||||
* @param {string} projectId
|
||||
* @param {number} version
|
||||
* @return {Promise.<number>}
|
||||
* @return {Promise.<string>}
|
||||
*/
|
||||
async function getChunkIdForVersion(projectId, version) {
|
||||
const backend = getBackend(projectId)
|
||||
|
@ -343,10 +348,14 @@ async function deleteProjectChunks(projectId) {
|
|||
* Delete a given number of old chunks from both the database
|
||||
* and from object storage.
|
||||
*
|
||||
* @param {number} count - number of chunks to delete
|
||||
* @param {number} minAgeSecs - how many seconds ago must chunks have been
|
||||
* deleted
|
||||
* @return {Promise}
|
||||
* @param {object} options
|
||||
* @param {number} [options.batchSize] - number of chunks to delete in each
|
||||
* batch
|
||||
* @param {number} [options.maxBatches] - maximum number of batches to process
|
||||
* @param {number} [options.minAgeSecs] - minimum age of chunks to delete
|
||||
* @param {number} [options.timeout] - maximum time to spend deleting chunks
|
||||
*
|
||||
* @return {Promise<number>} number of chunks deleted
|
||||
*/
|
||||
async function deleteOldChunks(options = {}) {
|
||||
const batchSize = options.batchSize ?? DEFAULT_DELETE_BATCH_SIZE
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
const { ObjectId, ReadPreference } = require('mongodb')
|
||||
// @ts-check
|
||||
|
||||
const { ObjectId, ReadPreference, MongoError } = require('mongodb')
|
||||
const { Chunk } = require('overleaf-editor-core')
|
||||
const OError = require('@overleaf/o-error')
|
||||
const assert = require('../assert')
|
||||
|
@ -7,6 +9,10 @@ const { ChunkVersionConflictError } = require('./errors')
|
|||
|
||||
const DUPLICATE_KEY_ERROR_CODE = 11000
|
||||
|
||||
/**
|
||||
* @import { ClientSession } from 'mongodb'
|
||||
*/
|
||||
|
||||
/**
|
||||
* Get the latest chunk's metadata from the database
|
||||
* @param {string} projectId
|
||||
|
@ -18,7 +24,10 @@ async function getLatestChunk(projectId, opts = {}) {
|
|||
const { readOnly = false } = opts
|
||||
|
||||
const record = await mongodb.chunks.findOne(
|
||||
{ projectId: new ObjectId(projectId), state: 'active' },
|
||||
{
|
||||
projectId: new ObjectId(projectId),
|
||||
state: { $in: ['active', 'closed'] },
|
||||
},
|
||||
{
|
||||
sort: { startVersion: -1 },
|
||||
readPreference: readOnly
|
||||
|
@ -42,7 +51,7 @@ async function getChunkForVersion(projectId, version) {
|
|||
const record = await mongodb.chunks.findOne(
|
||||
{
|
||||
projectId: new ObjectId(projectId),
|
||||
state: 'active',
|
||||
state: { $in: ['active', 'closed'] },
|
||||
startVersion: { $lte: version },
|
||||
endVersion: { $gte: version },
|
||||
},
|
||||
|
@ -94,7 +103,7 @@ async function getChunkForTimestamp(projectId, timestamp) {
|
|||
const record = await mongodb.chunks.findOne(
|
||||
{
|
||||
projectId: new ObjectId(projectId),
|
||||
state: 'active',
|
||||
state: { $in: ['active', 'closed'] },
|
||||
endTimestamp: { $gte: timestamp },
|
||||
},
|
||||
// We use the index on the startVersion for sorting records. This assumes
|
||||
|
@ -126,7 +135,7 @@ async function getLastActiveChunkBeforeTimestamp(projectId, timestamp) {
|
|||
const record = await mongodb.chunks.findOne(
|
||||
{
|
||||
projectId: new ObjectId(projectId),
|
||||
state: 'active',
|
||||
state: { $in: ['active', 'closed'] },
|
||||
$or: [
|
||||
{
|
||||
endTimestamp: {
|
||||
|
@ -155,7 +164,10 @@ async function getProjectChunkIds(projectId) {
|
|||
assert.mongoId(projectId, 'bad projectId')
|
||||
|
||||
const cursor = mongodb.chunks.find(
|
||||
{ projectId: new ObjectId(projectId), state: 'active' },
|
||||
{
|
||||
projectId: new ObjectId(projectId),
|
||||
state: { $in: ['active', 'closed'] },
|
||||
},
|
||||
{ projection: { _id: 1 } }
|
||||
)
|
||||
return await cursor.map(record => record._id).toArray()
|
||||
|
@ -169,7 +181,10 @@ async function getProjectChunks(projectId) {
|
|||
|
||||
const cursor = mongodb.chunks
|
||||
.find(
|
||||
{ projectId: new ObjectId(projectId), state: 'active' },
|
||||
{
|
||||
projectId: new ObjectId(projectId),
|
||||
state: { $in: ['active', 'closed'] },
|
||||
},
|
||||
{ projection: { state: 0 } }
|
||||
)
|
||||
.sort({ startVersion: 1 })
|
||||
|
@ -198,48 +213,35 @@ async function insertPendingChunk(projectId, chunk) {
|
|||
|
||||
/**
|
||||
* Record that a new chunk was created.
|
||||
*
|
||||
* @param {string} projectId
|
||||
* @param {Chunk} chunk
|
||||
* @param {string} chunkId
|
||||
* @param {object} opts
|
||||
* @param {Date} [opts.earliestChangeTimestamp]
|
||||
* @param {string} [opts.oldChunkId]
|
||||
*/
|
||||
async function confirmCreate(
|
||||
projectId,
|
||||
chunk,
|
||||
chunkId,
|
||||
earliestChangeTimestamp,
|
||||
mongoOpts = {}
|
||||
) {
|
||||
async function confirmCreate(projectId, chunk, chunkId, opts = {}) {
|
||||
assert.mongoId(projectId, 'bad projectId')
|
||||
assert.instance(chunk, Chunk, 'bad chunk')
|
||||
assert.mongoId(chunkId, 'bad chunkId')
|
||||
assert.instance(chunk, Chunk, 'bad newChunk')
|
||||
assert.mongoId(chunkId, 'bad newChunkId')
|
||||
|
||||
let result
|
||||
try {
|
||||
result = await mongodb.chunks.updateOne(
|
||||
{
|
||||
_id: new ObjectId(chunkId),
|
||||
projectId: new ObjectId(projectId),
|
||||
state: 'pending',
|
||||
},
|
||||
{ $set: { state: 'active', updatedAt: new Date() } },
|
||||
mongoOpts
|
||||
)
|
||||
} catch (err) {
|
||||
if (err.code === DUPLICATE_KEY_ERROR_CODE) {
|
||||
throw new ChunkVersionConflictError('chunk start version is not unique', {
|
||||
await mongodb.client.withSession(async session => {
|
||||
await session.withTransaction(async () => {
|
||||
if (opts.oldChunkId != null) {
|
||||
await closeChunk(projectId, opts.oldChunkId, { session })
|
||||
}
|
||||
|
||||
await activateChunk(projectId, chunkId, { session })
|
||||
|
||||
await updateProjectRecord(
|
||||
projectId,
|
||||
chunkId,
|
||||
})
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
if (result.matchedCount === 0) {
|
||||
throw new OError('pending chunk not found', { projectId, chunkId })
|
||||
}
|
||||
await updateProjectRecord(
|
||||
projectId,
|
||||
chunk,
|
||||
earliestChangeTimestamp,
|
||||
mongoOpts
|
||||
)
|
||||
chunk,
|
||||
opts.earliestChangeTimestamp,
|
||||
{ session }
|
||||
)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -276,41 +278,145 @@ async function updateProjectRecord(
|
|||
|
||||
/**
|
||||
* Record that a chunk was replaced by a new one.
|
||||
*
|
||||
* @param {string} projectId
|
||||
* @param {string} oldChunkId
|
||||
* @param {Chunk} newChunk
|
||||
* @param {string} newChunkId
|
||||
* @param {object} [opts]
|
||||
* @param {Date} [opts.earliestChangeTimestamp]
|
||||
*/
|
||||
async function confirmUpdate(
|
||||
projectId,
|
||||
oldChunkId,
|
||||
newChunk,
|
||||
newChunkId,
|
||||
earliestChangeTimestamp
|
||||
opts = {}
|
||||
) {
|
||||
assert.mongoId(projectId, 'bad projectId')
|
||||
assert.mongoId(oldChunkId, 'bad oldChunkId')
|
||||
assert.instance(newChunk, Chunk, 'bad newChunk')
|
||||
assert.mongoId(newChunkId, 'bad newChunkId')
|
||||
|
||||
const session = mongodb.client.startSession()
|
||||
try {
|
||||
await mongodb.client.withSession(async session => {
|
||||
await session.withTransaction(async () => {
|
||||
await deleteChunk(projectId, oldChunkId, { session })
|
||||
await confirmCreate(
|
||||
await deleteActiveChunk(projectId, oldChunkId, { session })
|
||||
|
||||
await activateChunk(projectId, newChunkId, { session })
|
||||
|
||||
await updateProjectRecord(
|
||||
projectId,
|
||||
newChunk,
|
||||
newChunkId,
|
||||
earliestChangeTimestamp,
|
||||
opts.earliestChangeTimestamp,
|
||||
{ session }
|
||||
)
|
||||
})
|
||||
} finally {
|
||||
await session.endSession()
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Activate a pending chunk
|
||||
*
|
||||
* @param {string} projectId
|
||||
* @param {string} chunkId
|
||||
* @param {object} [opts]
|
||||
* @param {ClientSession} [opts.session]
|
||||
*/
|
||||
async function activateChunk(projectId, chunkId, opts = {}) {
|
||||
assert.mongoId(projectId, 'bad projectId')
|
||||
assert.mongoId(chunkId, 'bad chunkId')
|
||||
|
||||
let result
|
||||
try {
|
||||
result = await mongodb.chunks.updateOne(
|
||||
{
|
||||
_id: new ObjectId(chunkId),
|
||||
projectId: new ObjectId(projectId),
|
||||
state: 'pending',
|
||||
},
|
||||
{ $set: { state: 'active', updatedAt: new Date() } },
|
||||
opts
|
||||
)
|
||||
} catch (err) {
|
||||
if (err instanceof MongoError && err.code === DUPLICATE_KEY_ERROR_CODE) {
|
||||
throw new ChunkVersionConflictError('chunk start version is not unique', {
|
||||
projectId,
|
||||
chunkId,
|
||||
})
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
if (result.matchedCount === 0) {
|
||||
throw new OError('pending chunk not found', { projectId, chunkId })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Close a chunk
|
||||
*
|
||||
* A closed chunk is one that can't be extended anymore.
|
||||
*
|
||||
* @param {string} projectId
|
||||
* @param {string} chunkId
|
||||
* @param {object} [opts]
|
||||
* @param {ClientSession} [opts.session]
|
||||
*/
|
||||
async function closeChunk(projectId, chunkId, opts = {}) {
|
||||
const result = await mongodb.chunks.updateOne(
|
||||
{
|
||||
_id: new ObjectId(chunkId),
|
||||
projectId: new ObjectId(projectId),
|
||||
state: 'active',
|
||||
},
|
||||
{ $set: { state: 'closed' } },
|
||||
opts
|
||||
)
|
||||
|
||||
if (result.matchedCount === 0) {
|
||||
throw new ChunkVersionConflictError('unable to close chunk', {
|
||||
projectId,
|
||||
chunkId,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete an active chunk
|
||||
*
|
||||
* This is used to delete chunks that are in the process of being extended. It
|
||||
* will refuse to delete chunks that are already closed and can therefore not be
|
||||
* extended.
|
||||
*
|
||||
* @param {string} projectId
|
||||
* @param {string} chunkId
|
||||
* @param {object} [opts]
|
||||
* @param {ClientSession} [opts.session]
|
||||
*/
|
||||
async function deleteActiveChunk(projectId, chunkId, opts = {}) {
|
||||
const updateResult = await mongodb.chunks.updateOne(
|
||||
{
|
||||
_id: new ObjectId(chunkId),
|
||||
projectId: new ObjectId(projectId),
|
||||
state: 'active',
|
||||
},
|
||||
{ $set: { state: 'deleted', updatedAt: new Date() } },
|
||||
opts
|
||||
)
|
||||
|
||||
if (updateResult.matchedCount === 0) {
|
||||
throw new ChunkVersionConflictError('unable to delete active chunk', {
|
||||
projectId,
|
||||
chunkId,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a chunk.
|
||||
*
|
||||
* @param {number} projectId
|
||||
* @param {number} chunkId
|
||||
* @param {string} projectId
|
||||
* @param {string} chunkId
|
||||
* @return {Promise}
|
||||
*/
|
||||
async function deleteChunk(projectId, chunkId, mongoOpts = {}) {
|
||||
|
@ -331,7 +437,10 @@ async function deleteProjectChunks(projectId) {
|
|||
assert.mongoId(projectId, 'bad projectId')
|
||||
|
||||
await mongodb.chunks.updateMany(
|
||||
{ projectId: new ObjectId(projectId), state: 'active' },
|
||||
{
|
||||
projectId: new ObjectId(projectId),
|
||||
state: { $in: ['active', 'closed'] },
|
||||
},
|
||||
{ $set: { state: 'deleted', updatedAt: new Date() } }
|
||||
)
|
||||
}
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
// @ts-check
|
||||
|
||||
const { Chunk } = require('overleaf-editor-core')
|
||||
const assert = require('../assert')
|
||||
const knex = require('../knex')
|
||||
|
@ -7,6 +9,10 @@ const { updateProjectRecord } = require('./mongo')
|
|||
|
||||
const DUPLICATE_KEY_ERROR_CODE = '23505'
|
||||
|
||||
/**
|
||||
* @import { Knex } from 'knex'
|
||||
*/
|
||||
|
||||
/**
|
||||
* Get the latest chunk's metadata from the database
|
||||
* @param {string} projectId
|
||||
|
@ -14,12 +20,11 @@ const DUPLICATE_KEY_ERROR_CODE = '23505'
|
|||
* @param {boolean} [opts.readOnly]
|
||||
*/
|
||||
async function getLatestChunk(projectId, opts = {}) {
|
||||
assert.postgresId(projectId, `bad projectId ${projectId}`)
|
||||
projectId = parseInt(projectId, 10)
|
||||
assert.postgresId(projectId, 'bad projectId')
|
||||
const { readOnly = false } = opts
|
||||
|
||||
const record = await (readOnly ? knexReadOnly : knex)('chunks')
|
||||
.where('doc_id', projectId)
|
||||
.where('doc_id', parseInt(projectId, 10))
|
||||
.orderBy('end_version', 'desc')
|
||||
.first()
|
||||
if (record == null) {
|
||||
|
@ -30,13 +35,15 @@ async function getLatestChunk(projectId, opts = {}) {
|
|||
|
||||
/**
|
||||
* Get the metadata for the chunk that contains the given version.
|
||||
*
|
||||
* @param {string} projectId
|
||||
* @param {number} version
|
||||
*/
|
||||
async function getChunkForVersion(projectId, version) {
|
||||
assert.postgresId(projectId, `bad projectId ${projectId}`)
|
||||
projectId = parseInt(projectId, 10)
|
||||
assert.postgresId(projectId, 'bad projectId')
|
||||
|
||||
const record = await knex('chunks')
|
||||
.where('doc_id', projectId)
|
||||
.where('doc_id', parseInt(projectId, 10))
|
||||
.where('end_version', '>=', version)
|
||||
.orderBy('end_version')
|
||||
.first()
|
||||
|
@ -48,20 +55,23 @@ async function getChunkForVersion(projectId, version) {
|
|||
|
||||
/**
|
||||
* Get the metadata for the chunk that contains the given version.
|
||||
*
|
||||
* @param {string} projectId
|
||||
* @param {Date} timestamp
|
||||
*/
|
||||
async function getFirstChunkBeforeTimestamp(projectId, timestamp) {
|
||||
assert.date(timestamp, 'bad timestamp')
|
||||
|
||||
const recordActive = await getChunkForVersion(projectId, 0)
|
||||
|
||||
// projectId must be valid if getChunkForVersion did not throw
|
||||
projectId = parseInt(projectId, 10)
|
||||
if (recordActive && recordActive.endTimestamp <= timestamp) {
|
||||
return recordActive
|
||||
}
|
||||
|
||||
// fallback to deleted chunk
|
||||
const recordDeleted = await knex('old_chunks')
|
||||
.where('doc_id', projectId)
|
||||
.where('doc_id', parseInt(projectId, 10))
|
||||
.where('start_version', '=', 0)
|
||||
.where('end_timestamp', '<=', timestamp)
|
||||
.orderBy('end_version', 'desc')
|
||||
|
@ -75,14 +85,16 @@ async function getFirstChunkBeforeTimestamp(projectId, timestamp) {
|
|||
/**
|
||||
* Get the metadata for the chunk that contains the version that was current at
|
||||
* the given timestamp.
|
||||
*
|
||||
* @param {string} projectId
|
||||
* @param {Date} timestamp
|
||||
*/
|
||||
async function getLastActiveChunkBeforeTimestamp(projectId, timestamp) {
|
||||
assert.date(timestamp, 'bad timestamp')
|
||||
assert.postgresId(projectId, 'bad projectId')
|
||||
projectId = parseInt(projectId, 10)
|
||||
|
||||
const query = knex('chunks')
|
||||
.where('doc_id', projectId)
|
||||
.where('doc_id', parseInt(projectId, 10))
|
||||
.where(function () {
|
||||
this.where('end_timestamp', '<=', timestamp).orWhere(
|
||||
'end_timestamp',
|
||||
|
@ -102,10 +114,12 @@ async function getLastActiveChunkBeforeTimestamp(projectId, timestamp) {
|
|||
/**
|
||||
* Get the metadata for the chunk that contains the version that was current at
|
||||
* the given timestamp.
|
||||
*
|
||||
* @param {string} projectId
|
||||
* @param {Date} timestamp
|
||||
*/
|
||||
async function getChunkForTimestamp(projectId, timestamp) {
|
||||
assert.postgresId(projectId, `bad projectId ${projectId}`)
|
||||
projectId = parseInt(projectId, 10)
|
||||
assert.postgresId(projectId, 'bad projectId')
|
||||
|
||||
// This query will find the latest chunk after the timestamp (query orders
|
||||
// in reverse chronological order), OR the latest chunk
|
||||
|
@ -118,11 +132,11 @@ async function getChunkForTimestamp(projectId, timestamp) {
|
|||
'WHERE doc_id = ? ' +
|
||||
'ORDER BY end_version desc LIMIT 1' +
|
||||
')',
|
||||
[timestamp, projectId]
|
||||
[timestamp, parseInt(projectId, 10)]
|
||||
)
|
||||
|
||||
const record = await knex('chunks')
|
||||
.where('doc_id', projectId)
|
||||
.where('doc_id', parseInt(projectId, 10))
|
||||
.where(whereAfterEndTimestampOrLatestChunk)
|
||||
.orderBy('end_version')
|
||||
.first()
|
||||
|
@ -137,7 +151,7 @@ async function getChunkForTimestamp(projectId, timestamp) {
|
|||
*/
|
||||
function chunkFromRecord(record) {
|
||||
return {
|
||||
id: record.id,
|
||||
id: record.id.toString(),
|
||||
startVersion: record.start_version,
|
||||
endVersion: record.end_version,
|
||||
endTimestamp: record.end_timestamp,
|
||||
|
@ -146,35 +160,41 @@ function chunkFromRecord(record) {
|
|||
|
||||
/**
|
||||
* Get all of a project's chunk ids
|
||||
*
|
||||
* @param {string} projectId
|
||||
*/
|
||||
async function getProjectChunkIds(projectId) {
|
||||
assert.postgresId(projectId, `bad projectId ${projectId}`)
|
||||
projectId = parseInt(projectId, 10)
|
||||
assert.postgresId(projectId, 'bad projectId')
|
||||
|
||||
const records = await knex('chunks').select('id').where('doc_id', projectId)
|
||||
const records = await knex('chunks')
|
||||
.select('id')
|
||||
.where('doc_id', parseInt(projectId, 10))
|
||||
return records.map(record => record.id)
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all of a projects chunks directly
|
||||
*
|
||||
* @param {string} projectId
|
||||
*/
|
||||
async function getProjectChunks(projectId) {
|
||||
assert.postgresId(projectId, `bad projectId ${projectId}`)
|
||||
projectId = parseInt(projectId, 10)
|
||||
assert.postgresId(projectId, 'bad projectId')
|
||||
|
||||
const records = await knex('chunks')
|
||||
.select()
|
||||
.where('doc_id', projectId)
|
||||
.where('doc_id', parseInt(projectId, 10))
|
||||
.orderBy('end_version')
|
||||
return records.map(chunkFromRecord)
|
||||
}
|
||||
|
||||
/**
|
||||
* Insert a pending chunk before sending it to object storage.
|
||||
*
|
||||
* @param {string} projectId
|
||||
* @param {Chunk} chunk
|
||||
*/
|
||||
async function insertPendingChunk(projectId, chunk) {
|
||||
assert.postgresId(projectId, `bad projectId ${projectId}`)
|
||||
projectId = parseInt(projectId, 10)
|
||||
assert.postgresId(projectId, 'bad projectId')
|
||||
|
||||
const result = await knex.first(
|
||||
knex.raw("nextval('chunks_id_seq'::regclass)::integer as chunkid")
|
||||
|
@ -182,80 +202,119 @@ async function insertPendingChunk(projectId, chunk) {
|
|||
const chunkId = result.chunkid
|
||||
await knex('pending_chunks').insert({
|
||||
id: chunkId,
|
||||
doc_id: projectId,
|
||||
doc_id: parseInt(projectId, 10),
|
||||
end_version: chunk.getEndVersion(),
|
||||
start_version: chunk.getStartVersion(),
|
||||
end_timestamp: chunk.getEndTimestamp(),
|
||||
})
|
||||
return chunkId
|
||||
return chunkId.toString()
|
||||
}
|
||||
|
||||
/**
|
||||
* Record that a new chunk was created.
|
||||
*
|
||||
* @param {string} projectId
|
||||
* @param {Chunk} chunk
|
||||
* @param {string} chunkId
|
||||
* @param {object} opts
|
||||
* @param {Date} [opts.earliestChangeTimestamp]
|
||||
* @param {string} [opts.oldChunkId]
|
||||
*/
|
||||
async function confirmCreate(
|
||||
projectId,
|
||||
chunk,
|
||||
chunkId,
|
||||
earliestChangeTimestamp
|
||||
) {
|
||||
assert.postgresId(projectId, `bad projectId ${projectId}`)
|
||||
projectId = parseInt(projectId, 10)
|
||||
async function confirmCreate(projectId, chunk, chunkId, opts = {}) {
|
||||
assert.postgresId(projectId, 'bad projectId')
|
||||
|
||||
await knex.transaction(async tx => {
|
||||
if (opts.oldChunkId != null) {
|
||||
await _assertChunkIsNotClosed(tx, projectId, opts.oldChunkId)
|
||||
await _closeChunk(tx, projectId, opts.oldChunkId)
|
||||
}
|
||||
await Promise.all([
|
||||
_deletePendingChunk(tx, projectId, chunkId),
|
||||
_insertChunk(tx, projectId, chunk, chunkId),
|
||||
])
|
||||
await updateProjectRecord(projectId, chunk, earliestChangeTimestamp)
|
||||
await updateProjectRecord(
|
||||
// The history id in Mongo is an integer for Postgres projects
|
||||
parseInt(projectId, 10),
|
||||
chunk,
|
||||
opts.earliestChangeTimestamp
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Record that a chunk was replaced by a new one.
|
||||
*
|
||||
* @param {string} projectId
|
||||
* @param {string} oldChunkId
|
||||
* @param {Chunk} newChunk
|
||||
* @param {string} newChunkId
|
||||
*/
|
||||
async function confirmUpdate(
|
||||
projectId,
|
||||
oldChunkId,
|
||||
newChunk,
|
||||
newChunkId,
|
||||
earliestChangeTimestamp
|
||||
opts = {}
|
||||
) {
|
||||
assert.postgresId(projectId, `bad projectId ${projectId}`)
|
||||
projectId = parseInt(projectId, 10)
|
||||
assert.postgresId(projectId, 'bad projectId')
|
||||
|
||||
await knex.transaction(async tx => {
|
||||
await _assertChunkIsNotClosed(tx, projectId, oldChunkId)
|
||||
await _deleteChunks(tx, { doc_id: projectId, id: oldChunkId })
|
||||
await Promise.all([
|
||||
_deletePendingChunk(tx, projectId, newChunkId),
|
||||
_insertChunk(tx, projectId, newChunk, newChunkId),
|
||||
])
|
||||
await updateProjectRecord(projectId, newChunk, earliestChangeTimestamp)
|
||||
await updateProjectRecord(
|
||||
// The history id in Mongo is an integer for Postgres projects
|
||||
parseInt(projectId, 10),
|
||||
newChunk,
|
||||
opts.earliestChangeTimestamp
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a pending chunk
|
||||
*
|
||||
* @param {Knex} tx
|
||||
* @param {string} projectId
|
||||
* @param {string} chunkId
|
||||
*/
|
||||
async function _deletePendingChunk(tx, projectId, chunkId) {
|
||||
await tx('pending_chunks')
|
||||
.where({
|
||||
doc_id: projectId,
|
||||
id: chunkId,
|
||||
doc_id: parseInt(projectId, 10),
|
||||
id: parseInt(chunkId, 10),
|
||||
})
|
||||
.del()
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds an active chunk
|
||||
*
|
||||
* @param {Knex} tx
|
||||
* @param {string} projectId
|
||||
* @param {Chunk} chunk
|
||||
* @param {string} chunkId
|
||||
*/
|
||||
async function _insertChunk(tx, projectId, chunk, chunkId) {
|
||||
const startVersion = chunk.getStartVersion()
|
||||
const endVersion = chunk.getEndVersion()
|
||||
try {
|
||||
await tx('chunks').insert({
|
||||
id: chunkId,
|
||||
doc_id: projectId,
|
||||
id: parseInt(chunkId, 10),
|
||||
doc_id: parseInt(projectId, 10),
|
||||
start_version: startVersion,
|
||||
end_version: endVersion,
|
||||
end_timestamp: chunk.getEndTimestamp(),
|
||||
})
|
||||
} catch (err) {
|
||||
if (err.code === DUPLICATE_KEY_ERROR_CODE) {
|
||||
if (
|
||||
err instanceof Error &&
|
||||
'code' in err &&
|
||||
err.code === DUPLICATE_KEY_ERROR_CODE
|
||||
) {
|
||||
throw new ChunkVersionConflictError(
|
||||
'chunk start or end version is not unique',
|
||||
{ projectId, chunkId, startVersion, endVersion }
|
||||
|
@ -265,35 +324,92 @@ async function _insertChunk(tx, projectId, chunk, chunkId) {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check that a chunk is not closed
|
||||
*
|
||||
* This is used to synchronize chunk creations and extensions.
|
||||
*
|
||||
* @param {Knex} tx
|
||||
* @param {string} projectId
|
||||
* @param {string} chunkId
|
||||
*/
|
||||
async function _assertChunkIsNotClosed(tx, projectId, chunkId) {
|
||||
const record = await tx('chunks')
|
||||
.forUpdate()
|
||||
.select('closed')
|
||||
.where('doc_id', parseInt(projectId, 10))
|
||||
.where('id', parseInt(chunkId, 10))
|
||||
.first()
|
||||
if (!record) {
|
||||
throw new ChunkVersionConflictError('unable to close chunk: not found', {
|
||||
projectId,
|
||||
chunkId,
|
||||
})
|
||||
}
|
||||
if (record.closed) {
|
||||
throw new ChunkVersionConflictError(
|
||||
'unable to close chunk: already closed',
|
||||
{
|
||||
projectId,
|
||||
chunkId,
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Close a chunk
|
||||
*
|
||||
* A closed chunk can no longer be extended.
|
||||
*
|
||||
* @param {Knex} tx
|
||||
* @param {string} projectId
|
||||
* @param {string} chunkId
|
||||
*/
|
||||
async function _closeChunk(tx, projectId, chunkId) {
|
||||
await tx('chunks')
|
||||
.update({ closed: true })
|
||||
.where('doc_id', parseInt(projectId, 10))
|
||||
.where('id', parseInt(chunkId, 10))
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a chunk.
|
||||
*
|
||||
* @param {number} projectId
|
||||
* @param {number} chunkId
|
||||
* @return {Promise}
|
||||
* @param {string} projectId
|
||||
* @param {string} chunkId
|
||||
*/
|
||||
async function deleteChunk(projectId, chunkId) {
|
||||
assert.postgresId(projectId, `bad projectId ${projectId}`)
|
||||
projectId = parseInt(projectId, 10)
|
||||
assert.postgresId(projectId, 'bad projectId')
|
||||
assert.integer(chunkId, 'bad chunkId')
|
||||
|
||||
await _deleteChunks(knex, { doc_id: projectId, id: chunkId })
|
||||
await _deleteChunks(knex, {
|
||||
doc_id: parseInt(projectId, 10),
|
||||
id: parseInt(chunkId, 10),
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete all of a project's chunks
|
||||
*
|
||||
* @param {string} projectId
|
||||
*/
|
||||
async function deleteProjectChunks(projectId) {
|
||||
assert.postgresId(projectId, `bad projectId ${projectId}`)
|
||||
projectId = parseInt(projectId, 10)
|
||||
assert.postgresId(projectId, 'bad projectId')
|
||||
|
||||
await knex.transaction(async tx => {
|
||||
await _deleteChunks(knex, { doc_id: projectId })
|
||||
await _deleteChunks(knex, { doc_id: parseInt(projectId, 10) })
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete many chunks
|
||||
*
|
||||
* @param {Knex} tx
|
||||
* @param {any} whereClause
|
||||
*/
|
||||
async function _deleteChunks(tx, whereClause) {
|
||||
const rows = await tx('chunks').returning('*').where(whereClause).del()
|
||||
const rows = await tx('chunks').where(whereClause).del().returning('*')
|
||||
if (rows.length === 0) {
|
||||
return
|
||||
}
|
||||
|
@ -311,6 +427,9 @@ async function _deleteChunks(tx, whereClause) {
|
|||
|
||||
/**
|
||||
* Get a batch of old chunks for deletion
|
||||
*
|
||||
* @param {number} count
|
||||
* @param {number} minAgeSecs
|
||||
*/
|
||||
async function getOldChunksBatch(count, minAgeSecs) {
|
||||
const maxDeletedAt = new Date(Date.now() - minAgeSecs * 1000)
|
||||
|
@ -321,15 +440,22 @@ async function getOldChunksBatch(count, minAgeSecs) {
|
|||
.limit(count)
|
||||
return records.map(oldChunk => ({
|
||||
projectId: oldChunk.doc_id.toString(),
|
||||
chunkId: oldChunk.chunk_id,
|
||||
chunkId: oldChunk.chunk_id.toString(),
|
||||
}))
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a batch of old chunks from the database
|
||||
*
|
||||
* @param {string[]} chunkIds
|
||||
*/
|
||||
async function deleteOldChunks(chunkIds) {
|
||||
await knex('old_chunks').whereIn('chunk_id', chunkIds).del()
|
||||
await knex('old_chunks')
|
||||
.whereIn(
|
||||
'chunk_id',
|
||||
chunkIds.map(id => parseInt(id, 10))
|
||||
)
|
||||
.del()
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
478
services/history-v1/storage/lib/chunk_store/redis.js
Normal file
478
services/history-v1/storage/lib/chunk_store/redis.js
Normal file
|
@ -0,0 +1,478 @@
|
|||
const metrics = require('@overleaf/metrics')
|
||||
const logger = require('@overleaf/logger')
|
||||
const redis = require('../redis')
|
||||
const rclient = redis.rclientHistory //
|
||||
const { Snapshot, Change, History, Chunk } = require('overleaf-editor-core')
|
||||
|
||||
const TEMPORARY_CACHE_LIFETIME = 300 // 5 minutes
|
||||
|
||||
const keySchema = {
|
||||
snapshot({ projectId }) {
|
||||
return `snapshot:{${projectId}}`
|
||||
},
|
||||
startVersion({ projectId }) {
|
||||
return `snapshot-version:{${projectId}}`
|
||||
},
|
||||
changes({ projectId }) {
|
||||
return `changes:{${projectId}}`
|
||||
},
|
||||
expireTime({ projectId }) {
|
||||
return `expire-time:{${projectId}}`
|
||||
},
|
||||
persistTime({ projectId }) {
|
||||
return `persist-time:{${projectId}}`
|
||||
},
|
||||
}
|
||||
|
||||
rclient.defineCommand('get_current_chunk', {
|
||||
numberOfKeys: 3,
|
||||
lua: `
|
||||
local startVersionValue = redis.call('GET', KEYS[2])
|
||||
if not startVersionValue then
|
||||
return nil -- this is a cache-miss
|
||||
end
|
||||
local snapshotValue = redis.call('GET', KEYS[1])
|
||||
local changesValues = redis.call('LRANGE', KEYS[3], 0, -1)
|
||||
return {snapshotValue, startVersionValue, changesValues}
|
||||
`,
|
||||
})
|
||||
|
||||
/**
|
||||
* Retrieves the current chunk of project history from Redis storage
|
||||
* @param {string} projectId - The unique identifier of the project
|
||||
* @returns {Promise<Chunk|null>} A Promise that resolves to a Chunk object containing project history,
|
||||
* or null if retrieval fails
|
||||
* @throws {Error} If Redis operations fail
|
||||
*/
|
||||
async function getCurrentChunk(projectId) {
|
||||
try {
|
||||
const result = await rclient.get_current_chunk(
|
||||
keySchema.snapshot({ projectId }),
|
||||
keySchema.startVersion({ projectId }),
|
||||
keySchema.changes({ projectId })
|
||||
)
|
||||
if (!result) {
|
||||
return null // cache-miss
|
||||
}
|
||||
const snapshot = Snapshot.fromRaw(JSON.parse(result[0]))
|
||||
const startVersion = JSON.parse(result[1])
|
||||
const changes = result[2].map(c => Change.fromRaw(JSON.parse(c)))
|
||||
const history = new History(snapshot, changes)
|
||||
const chunk = new Chunk(history, startVersion)
|
||||
metrics.inc('chunk_store.redis.get_current_chunk', 1, { status: 'success' })
|
||||
return chunk
|
||||
} catch (err) {
|
||||
logger.error({ err, projectId }, 'error getting current chunk from redis')
|
||||
metrics.inc('chunk_store.redis.get_current_chunk', 1, { status: 'error' })
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
rclient.defineCommand('get_current_chunk_if_valid', {
|
||||
numberOfKeys: 3,
|
||||
lua: `
|
||||
local expectedStartVersion = ARGV[1]
|
||||
local expectedChangesCount = tonumber(ARGV[2])
|
||||
local startVersionValue = redis.call('GET', KEYS[2])
|
||||
if not startVersionValue then
|
||||
return nil -- this is a cache-miss
|
||||
end
|
||||
if startVersionValue ~= expectedStartVersion then
|
||||
return nil -- this is a cache-miss
|
||||
end
|
||||
local changesCount = redis.call('LLEN', KEYS[3])
|
||||
if changesCount ~= expectedChangesCount then
|
||||
return nil -- this is a cache-miss
|
||||
end
|
||||
local snapshotValue = redis.call('GET', KEYS[1])
|
||||
local changesValues = redis.call('LRANGE', KEYS[3], 0, -1)
|
||||
return {snapshotValue, startVersionValue, changesValues}
|
||||
`,
|
||||
})
|
||||
|
||||
async function getCurrentChunkIfValid(projectId, chunkRecord) {
|
||||
try {
|
||||
const changesCount = chunkRecord.endVersion - chunkRecord.startVersion
|
||||
const result = await rclient.get_current_chunk_if_valid(
|
||||
keySchema.snapshot({ projectId }),
|
||||
keySchema.startVersion({ projectId }),
|
||||
keySchema.changes({ projectId }),
|
||||
chunkRecord.startVersion,
|
||||
changesCount
|
||||
)
|
||||
if (!result) {
|
||||
return null // cache-miss
|
||||
}
|
||||
const snapshot = Snapshot.fromRaw(JSON.parse(result[0]))
|
||||
const startVersion = parseInt(result[1], 10)
|
||||
const changes = result[2].map(c => Change.fromRaw(JSON.parse(c)))
|
||||
const history = new History(snapshot, changes)
|
||||
const chunk = new Chunk(history, startVersion)
|
||||
metrics.inc('chunk_store.redis.get_current_chunk_if_valid', 1, {
|
||||
status: 'success',
|
||||
})
|
||||
return chunk
|
||||
} catch (err) {
|
||||
logger.error(
|
||||
{ err, projectId, chunkRecord },
|
||||
'error getting current chunk from redis'
|
||||
)
|
||||
metrics.inc('chunk_store.redis.get_current_chunk_if_valid', 1, {
|
||||
status: 'error',
|
||||
})
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
rclient.defineCommand('get_current_chunk_metadata', {
|
||||
numberOfKeys: 2,
|
||||
lua: `
|
||||
local startVersionValue = redis.call('GET', KEYS[1])
|
||||
if not startVersionValue then
|
||||
return nil -- this is a cache-miss
|
||||
end
|
||||
local changesCount = redis.call('LLEN', KEYS[2])
|
||||
return {startVersionValue, changesCount}
|
||||
`,
|
||||
})
|
||||
|
||||
/**
|
||||
* Retrieves the current chunk metadata for a given project from Redis
|
||||
* @param {string} projectId - The ID of the project to get metadata for
|
||||
* @returns {Promise<Object|null>} Object containing startVersion and changesCount if found, null on error or cache miss
|
||||
* @property {number} startVersion - The starting version information
|
||||
* @property {number} changesCount - The number of changes in the chunk
|
||||
*/
|
||||
async function getCurrentChunkMetadata(projectId) {
|
||||
try {
|
||||
const result = await rclient.get_current_chunk_metadata(
|
||||
keySchema.startVersion({ projectId }),
|
||||
keySchema.changes({ projectId })
|
||||
)
|
||||
if (!result) {
|
||||
return null // cache-miss
|
||||
}
|
||||
const startVersion = JSON.parse(result[0])
|
||||
const changesCount = parseInt(result[1], 10)
|
||||
return { startVersion, changesCount }
|
||||
} catch (err) {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
rclient.defineCommand('set_current_chunk', {
|
||||
numberOfKeys: 4,
|
||||
lua: `
|
||||
local snapshotValue = ARGV[1]
|
||||
local startVersionValue = ARGV[2]
|
||||
local expireTime = ARGV[3]
|
||||
redis.call('SET', KEYS[1], snapshotValue)
|
||||
redis.call('SET', KEYS[2], startVersionValue)
|
||||
redis.call('SET', KEYS[3], expireTime)
|
||||
redis.call('DEL', KEYS[4]) -- clear the old changes list
|
||||
if #ARGV >= 4 then
|
||||
redis.call('RPUSH', KEYS[4], unpack(ARGV, 4))
|
||||
end
|
||||
|
||||
`,
|
||||
})
|
||||
|
||||
/**
|
||||
* Stores the current chunk of project history in Redis
|
||||
* @param {string} projectId - The ID of the project
|
||||
* @param {Chunk} chunk - The chunk object containing history data
|
||||
* @returns {Promise<*>} Returns the result of the Redis operation, or null if an error occurs
|
||||
* @throws {Error} May throw Redis-related errors which are caught internally
|
||||
*/
|
||||
async function setCurrentChunk(projectId, chunk) {
|
||||
try {
|
||||
const snapshotKey = keySchema.snapshot({ projectId })
|
||||
const startVersionKey = keySchema.startVersion({ projectId })
|
||||
const changesKey = keySchema.changes({ projectId })
|
||||
const expireTimeKey = keySchema.expireTime({ projectId })
|
||||
|
||||
const snapshot = chunk.history.snapshot
|
||||
const startVersion = chunk.startVersion
|
||||
const changes = chunk.history.changes
|
||||
const expireTime = Date.now() + TEMPORARY_CACHE_LIFETIME * 1000
|
||||
|
||||
await rclient.set_current_chunk(
|
||||
snapshotKey, // KEYS[1]
|
||||
startVersionKey, // KEYS[2]
|
||||
expireTimeKey, // KEYS[3]
|
||||
changesKey, // KEYS[4]
|
||||
JSON.stringify(snapshot.toRaw()), // ARGV[1]
|
||||
startVersion, // ARGV[2]
|
||||
expireTime, // ARGV[3]
|
||||
...changes.map(c => JSON.stringify(c.toRaw())) // ARGV[4..]
|
||||
)
|
||||
metrics.inc('chunk_store.redis.set_current_chunk', 1, { status: 'success' })
|
||||
} catch (err) {
|
||||
logger.error(
|
||||
{ err, projectId, chunk },
|
||||
'error setting current chunk in redis'
|
||||
)
|
||||
metrics.inc('chunk_store.redis.set_current_chunk', 1, { status: 'error' })
|
||||
return null // while testing we will suppress any errors
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks whether a cached chunk's version metadata matches the current chunk's metadata
|
||||
* @param {Chunk} cachedChunk - The chunk retrieved from cache
|
||||
* @param {Chunk} currentChunk - The current chunk to compare against
|
||||
* @returns {boolean} - Returns true if the chunks have matching start and end versions, false otherwise
|
||||
*/
|
||||
function checkCacheValidity(cachedChunk, currentChunk) {
|
||||
return Boolean(
|
||||
cachedChunk &&
|
||||
cachedChunk.getStartVersion() === currentChunk.getStartVersion() &&
|
||||
cachedChunk.getEndVersion() === currentChunk.getEndVersion()
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates if a cached chunk matches the current chunk metadata by comparing versions
|
||||
* @param {Object} cachedChunk - The cached chunk object to validate
|
||||
* @param {Object} currentChunkMetadata - The current chunk metadata to compare against
|
||||
* @param {number} currentChunkMetadata.startVersion - The starting version number
|
||||
* @param {number} currentChunkMetadata.endVersion - The ending version number
|
||||
* @returns {boolean} - True if the cached chunk is valid, false otherwise
|
||||
*/
|
||||
function checkCacheValidityWithMetadata(cachedChunk, currentChunkMetadata) {
|
||||
return Boolean(
|
||||
cachedChunk &&
|
||||
cachedChunk.getStartVersion() === currentChunkMetadata.startVersion &&
|
||||
cachedChunk.getEndVersion() === currentChunkMetadata.endVersion
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Compares two chunks for equality using stringified JSON comparison
|
||||
* @param {string} projectId - The ID of the project
|
||||
* @param {Chunk} cachedChunk - The cached chunk to compare
|
||||
* @param {Chunk} currentChunk - The current chunk to compare against
|
||||
* @returns {boolean} - Returns false if either chunk is null/undefined, otherwise returns the comparison result
|
||||
*/
|
||||
function compareChunks(projectId, cachedChunk, currentChunk) {
|
||||
if (!cachedChunk || !currentChunk) {
|
||||
return false
|
||||
}
|
||||
const identical = JSON.stringify(cachedChunk) === JSON.stringify(currentChunk)
|
||||
if (!identical) {
|
||||
try {
|
||||
logger.error(
|
||||
{
|
||||
projectId,
|
||||
cachedChunkStartVersion: cachedChunk.getStartVersion(),
|
||||
cachedChunkEndVersion: cachedChunk.getEndVersion(),
|
||||
currentChunkStartVersion: currentChunk.getStartVersion(),
|
||||
currentChunkEndVersion: currentChunk.getEndVersion(),
|
||||
},
|
||||
'chunk cache mismatch'
|
||||
)
|
||||
} catch (err) {
|
||||
// ignore errors while logging
|
||||
}
|
||||
}
|
||||
metrics.inc('chunk_store.redis.compare_chunks', 1, {
|
||||
status: identical ? 'success' : 'fail',
|
||||
})
|
||||
return identical
|
||||
}
|
||||
|
||||
// Define Lua script for atomic cache clearing
|
||||
rclient.defineCommand('expire_chunk_cache', {
|
||||
numberOfKeys: 5,
|
||||
lua: `
|
||||
local persistTimeExists = redis.call('EXISTS', KEYS[5])
|
||||
if persistTimeExists == 1 then
|
||||
return nil -- chunk has changes pending, do not expire
|
||||
end
|
||||
local currentTime = tonumber(ARGV[1])
|
||||
local expireTimeValue = redis.call('GET', KEYS[4])
|
||||
if not expireTimeValue then
|
||||
return nil -- this is a cache-miss
|
||||
end
|
||||
local expireTime = tonumber(expireTimeValue)
|
||||
if currentTime < expireTime then
|
||||
return nil -- cache is still valid
|
||||
end
|
||||
-- Cache is expired and all changes are persisted, proceed to delete the keys atomically
|
||||
redis.call('DEL', KEYS[1]) -- snapshot key
|
||||
redis.call('DEL', KEYS[2]) -- startVersion key
|
||||
redis.call('DEL', KEYS[3]) -- changes key
|
||||
redis.call('DEL', KEYS[4]) -- expireTime key
|
||||
return 1
|
||||
`,
|
||||
})
|
||||
|
||||
/**
|
||||
* Expire cache entries for a project's chunk data if needed
|
||||
* @param {string} projectId - The ID of the project whose cache should be cleared
|
||||
* @returns {Promise<boolean>} A promise that resolves to true if successful, false on error
|
||||
*/
|
||||
async function expireCurrentChunk(projectId, currentTime) {
|
||||
try {
|
||||
const snapshotKey = keySchema.snapshot({ projectId })
|
||||
const startVersionKey = keySchema.startVersion({ projectId })
|
||||
const changesKey = keySchema.changes({ projectId })
|
||||
const expireTimeKey = keySchema.expireTime({ projectId })
|
||||
const persistTimeKey = keySchema.persistTime({ projectId })
|
||||
const result = await rclient.expire_chunk_cache(
|
||||
snapshotKey,
|
||||
startVersionKey,
|
||||
changesKey,
|
||||
expireTimeKey,
|
||||
persistTimeKey,
|
||||
currentTime || Date.now()
|
||||
)
|
||||
if (!result) {
|
||||
logger.debug(
|
||||
{ projectId },
|
||||
'chunk cache not expired due to pending changes'
|
||||
)
|
||||
metrics.inc('chunk_store.redis.expire_cache', 1, {
|
||||
status: 'skip-due-to-pending-changes',
|
||||
})
|
||||
return false // not expired
|
||||
}
|
||||
metrics.inc('chunk_store.redis.expire_cache', 1, { status: 'success' })
|
||||
return true
|
||||
} catch (err) {
|
||||
logger.error({ err, projectId }, 'error clearing chunk cache from redis')
|
||||
metrics.inc('chunk_store.redis.expire_cache', 1, { status: 'error' })
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// Define Lua script for atomic cache clearing
|
||||
rclient.defineCommand('clear_chunk_cache', {
|
||||
numberOfKeys: 5,
|
||||
lua: `
|
||||
local persistTimeExists = redis.call('EXISTS', KEYS[5])
|
||||
if persistTimeExists == 1 then
|
||||
return nil -- chunk has changes pending, do not clear
|
||||
end
|
||||
-- Delete all keys related to a project's chunk cache atomically
|
||||
redis.call('DEL', KEYS[1]) -- snapshot key
|
||||
redis.call('DEL', KEYS[2]) -- startVersion key
|
||||
redis.call('DEL', KEYS[3]) -- changes key
|
||||
redis.call('DEL', KEYS[4]) -- expireTime key
|
||||
return 1
|
||||
`,
|
||||
})
|
||||
|
||||
/**
|
||||
* Clears all cache entries for a project's chunk data
|
||||
* @param {string} projectId - The ID of the project whose cache should be cleared
|
||||
* @returns {Promise<boolean>} A promise that resolves to true if successful, false on error
|
||||
*/
|
||||
async function clearCache(projectId) {
|
||||
try {
|
||||
const snapshotKey = keySchema.snapshot({ projectId })
|
||||
const startVersionKey = keySchema.startVersion({ projectId })
|
||||
const changesKey = keySchema.changes({ projectId })
|
||||
const expireTimeKey = keySchema.expireTime({ projectId })
|
||||
const persistTimeKey = keySchema.persistTime({ projectId }) // Add persistTimeKey
|
||||
|
||||
const result = await rclient.clear_chunk_cache(
|
||||
snapshotKey,
|
||||
startVersionKey,
|
||||
changesKey,
|
||||
expireTimeKey,
|
||||
persistTimeKey
|
||||
)
|
||||
if (result === null) {
|
||||
logger.debug(
|
||||
{ projectId },
|
||||
'chunk cache not cleared due to pending changes'
|
||||
)
|
||||
metrics.inc('chunk_store.redis.clear_cache', 1, {
|
||||
status: 'skip-due-to-pending-changes',
|
||||
})
|
||||
return false
|
||||
}
|
||||
metrics.inc('chunk_store.redis.clear_cache', 1, { status: 'success' })
|
||||
return true
|
||||
} catch (err) {
|
||||
logger.error({ err, projectId }, 'error clearing chunk cache from redis')
|
||||
metrics.inc('chunk_store.redis.clear_cache', 1, { status: 'error' })
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// Define Lua script for getting chunk status
|
||||
rclient.defineCommand('get_chunk_status', {
|
||||
numberOfKeys: 2, // expireTimeKey, persistTimeKey
|
||||
lua: `
|
||||
local expireTimeValue = redis.call('GET', KEYS[1])
|
||||
local persistTimeValue = redis.call('GET', KEYS[2])
|
||||
return {expireTimeValue, persistTimeValue}
|
||||
`,
|
||||
})
|
||||
|
||||
/**
|
||||
* Retrieves the current chunk status for a given project from Redis
|
||||
* @param {string} projectId - The ID of the project to get status for
|
||||
* @returns {Promise<Object>} Object containing expireTime and persistTime, or nulls on error
|
||||
* @property {number|null} expireTime - The expiration time of the chunk
|
||||
* @property {number|null} persistTime - The persistence time of the chunk
|
||||
*/
|
||||
async function getCurrentChunkStatus(projectId) {
|
||||
try {
|
||||
const expireTimeKey = keySchema.expireTime({ projectId })
|
||||
const persistTimeKey = keySchema.persistTime({ projectId })
|
||||
|
||||
const result = await rclient.get_chunk_status(expireTimeKey, persistTimeKey)
|
||||
|
||||
// Lua script returns an array [expireTimeValue, persistTimeValue]
|
||||
// Redis nil replies are converted to null by ioredis
|
||||
const [expireTime, persistTime] = result
|
||||
|
||||
return {
|
||||
expireTime: expireTime ? parseInt(expireTime, 10) : null, // Parse to number or null
|
||||
persistTime: persistTime ? parseInt(persistTime, 10) : null, // Parse to number or null
|
||||
}
|
||||
} catch (err) {
|
||||
logger.warn({ err, projectId }, 'error getting chunk status from redis')
|
||||
return { expireTime: null, persistTime: null } // Return nulls on error
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the persist time for a project's chunk cache.
|
||||
* This is primarily intended for testing purposes.
|
||||
* @param {string} projectId - The ID of the project.
|
||||
* @param {number} timestamp - The timestamp to set as the persist time.
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async function setPersistTime(projectId, timestamp) {
|
||||
try {
|
||||
const persistTimeKey = keySchema.persistTime({ projectId })
|
||||
await rclient.set(persistTimeKey, timestamp)
|
||||
metrics.inc('chunk_store.redis.set_persist_time', 1, { status: 'success' })
|
||||
} catch (err) {
|
||||
logger.error(
|
||||
{ err, projectId, timestamp },
|
||||
'error setting persist time in redis'
|
||||
)
|
||||
metrics.inc('chunk_store.redis.set_persist_time', 1, { status: 'error' })
|
||||
// Re-throw the error so the test fails if setting fails
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getCurrentChunk,
|
||||
getCurrentChunkIfValid,
|
||||
setCurrentChunk,
|
||||
getCurrentChunkMetadata,
|
||||
checkCacheValidity,
|
||||
checkCacheValidityWithMetadata,
|
||||
compareChunks,
|
||||
expireCurrentChunk,
|
||||
clearCache,
|
||||
getCurrentChunkStatus,
|
||||
setPersistTime, // Export the new function
|
||||
}
|
|
@ -25,8 +25,8 @@ const gunzip = promisify(zlib.gunzip)
|
|||
|
||||
class LoadError extends OError {
|
||||
/**
|
||||
* @param {number|string} projectId
|
||||
* @param {number|string} chunkId
|
||||
* @param {string} projectId
|
||||
* @param {string} chunkId
|
||||
* @param {any} cause
|
||||
*/
|
||||
constructor(projectId, chunkId, cause) {
|
||||
|
@ -42,8 +42,8 @@ class LoadError extends OError {
|
|||
|
||||
class StoreError extends OError {
|
||||
/**
|
||||
* @param {number|string} projectId
|
||||
* @param {number|string} chunkId
|
||||
* @param {string} projectId
|
||||
* @param {string} chunkId
|
||||
* @param {any} cause
|
||||
*/
|
||||
constructor(projectId, chunkId, cause) {
|
||||
|
@ -58,8 +58,8 @@ class StoreError extends OError {
|
|||
}
|
||||
|
||||
/**
|
||||
* @param {number|string} projectId
|
||||
* @param {number|string} chunkId
|
||||
* @param {string} projectId
|
||||
* @param {string} chunkId
|
||||
* @return {string}
|
||||
*/
|
||||
function getKey(projectId, chunkId) {
|
||||
|
@ -89,8 +89,8 @@ class HistoryStore {
|
|||
/**
|
||||
* Load the raw object for a History.
|
||||
*
|
||||
* @param {number|string} projectId
|
||||
* @param {number|string} chunkId
|
||||
* @param {string} projectId
|
||||
* @param {string} chunkId
|
||||
* @return {Promise<import('overleaf-editor-core/lib/types').RawHistory>}
|
||||
*/
|
||||
async loadRaw(projectId, chunkId) {
|
||||
|
@ -144,8 +144,8 @@ class HistoryStore {
|
|||
/**
|
||||
* Compress and store a {@link History}.
|
||||
*
|
||||
* @param {number|string} projectId
|
||||
* @param {number|string} chunkId
|
||||
* @param {string} projectId
|
||||
* @param {string} chunkId
|
||||
* @param {import('overleaf-editor-core/lib/types').RawHistory} rawHistory
|
||||
*/
|
||||
async storeRaw(projectId, chunkId, rawHistory) {
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
// @ts-check
|
||||
|
||||
'use strict'
|
||||
|
||||
const env = process.env.NODE_ENV || 'development'
|
||||
|
||||
const knexfile = require('../../knexfile')
|
||||
module.exports = require('knex')(knexfile[env])
|
||||
module.exports = require('knex').default(knexfile[env])
|
||||
|
|
19
services/history-v1/storage/lib/redis.js
Normal file
19
services/history-v1/storage/lib/redis.js
Normal file
|
@ -0,0 +1,19 @@
|
|||
const config = require('config')
|
||||
const redis = require('@overleaf/redis-wrapper')
|
||||
|
||||
const historyRedisOptions = config.get('redis.history')
|
||||
const rclientHistory = redis.createClient(historyRedisOptions)
|
||||
|
||||
const lockRedisOptions = config.get('redis.history')
|
||||
const rclientLock = redis.createClient(lockRedisOptions)
|
||||
|
||||
async function disconnect() {
|
||||
await Promise.all([rclientHistory.disconnect(), rclientLock.disconnect()])
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
rclientHistory,
|
||||
rclientLock,
|
||||
redis,
|
||||
disconnect,
|
||||
}
|
52
services/history-v1/storage/lib/scan.js
Normal file
52
services/history-v1/storage/lib/scan.js
Normal file
|
@ -0,0 +1,52 @@
|
|||
const BATCH_SIZE = 1000 // Default batch size for SCAN
|
||||
|
||||
/**
|
||||
* Asynchronously scans a Redis instance or cluster for keys matching a pattern.
|
||||
*
|
||||
* This function handles both standalone Redis instances and Redis clusters.
|
||||
* For clusters, it iterates over all master nodes. It yields keys in batches
|
||||
* as they are found by the SCAN command.
|
||||
*
|
||||
* @param {object} redisClient - The Redis client instance (from @overleaf/redis-wrapper).
|
||||
* @param {string} pattern - The pattern to match keys against (e.g., 'user:*').
|
||||
* @param {number} [count=BATCH_SIZE] - Optional hint for Redis SCAN count per iteration.
|
||||
* @yields {string[]} A batch of matching keys.
|
||||
*/
|
||||
async function* scanRedisCluster(redisClient, pattern, count = BATCH_SIZE) {
|
||||
const nodes = redisClient.nodes ? redisClient.nodes('master') : [redisClient]
|
||||
|
||||
for (const node of nodes) {
|
||||
let cursor = '0'
|
||||
do {
|
||||
// redisClient from @overleaf/redis-wrapper uses ioredis style commands
|
||||
const [nextCursor, keys] = await node.scan(
|
||||
cursor,
|
||||
'MATCH',
|
||||
pattern,
|
||||
'COUNT',
|
||||
count
|
||||
)
|
||||
cursor = nextCursor
|
||||
if (keys.length > 0) {
|
||||
yield keys
|
||||
}
|
||||
} while (cursor !== '0')
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts the content within the first pair of curly braces {} from a string.
|
||||
* This is used to extract a user ID or project ID from a Redis key.
|
||||
*
|
||||
* @param {string} key - The input string containing content within curly braces.
|
||||
* @returns {string | null} The extracted content (the key ID) if found, otherwise null.
|
||||
*/
|
||||
function extractKeyId(key) {
|
||||
const match = key.match(/\{(.*?)\}/)
|
||||
if (match && match[1]) {
|
||||
return match[1]
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
module.exports = { scanRedisCluster, extractKeyId }
|
|
@ -9,6 +9,7 @@ import {
|
|||
create,
|
||||
} from '../lib/chunk_store/index.js'
|
||||
import { client } from '../lib/mongodb.js'
|
||||
import redis from '../lib/redis.js'
|
||||
import knex from '../lib/knex.js'
|
||||
import { historyStore } from '../lib/history_store.js'
|
||||
import pLimit from 'p-limit'
|
||||
|
@ -1091,5 +1092,13 @@ if (import.meta.url === `file://${process.argv[1]}`) {
|
|||
.catch(err => {
|
||||
console.error('Error closing MongoDB connection:', err)
|
||||
})
|
||||
redis
|
||||
.disconnect()
|
||||
.then(() => {
|
||||
console.log('Redis connection closed')
|
||||
})
|
||||
.catch(err => {
|
||||
console.error('Error closing Redis connection:', err)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
|
|
@ -10,6 +10,7 @@ import {
|
|||
import assert from '../lib/assert.js'
|
||||
import knex from '../lib/knex.js'
|
||||
import { client } from '../lib/mongodb.js'
|
||||
import redis from '../lib/redis.js'
|
||||
import { setTimeout } from 'node:timers/promises'
|
||||
import fs from 'node:fs'
|
||||
|
||||
|
@ -23,6 +24,7 @@ async function gracefulShutdown() {
|
|||
console.log('Gracefully shutting down')
|
||||
await knex.destroy()
|
||||
await client.close()
|
||||
await redis.disconnect()
|
||||
await setTimeout(100)
|
||||
process.exit()
|
||||
}
|
||||
|
|
|
@ -240,17 +240,25 @@ async function processPendingProjects(
|
|||
changeTimes.push(pendingAt)
|
||||
const pendingAge = Math.floor((Date.now() - pendingAt.getTime()) / 1000)
|
||||
if (pendingAge > WARN_THRESHOLD) {
|
||||
const backupStatus = await getBackupStatus(projectId)
|
||||
logger.warn(
|
||||
{
|
||||
projectId,
|
||||
pendingAt,
|
||||
pendingAge,
|
||||
backupStatus,
|
||||
warnThreshold: WARN_THRESHOLD,
|
||||
},
|
||||
`pending change exceeds rpo warning threshold`
|
||||
)
|
||||
try {
|
||||
const backupStatus = await getBackupStatus(projectId)
|
||||
logger.warn(
|
||||
{
|
||||
projectId,
|
||||
pendingAt,
|
||||
pendingAge,
|
||||
backupStatus,
|
||||
warnThreshold: WARN_THRESHOLD,
|
||||
},
|
||||
`pending change exceeds rpo warning threshold`
|
||||
)
|
||||
} catch (err) {
|
||||
logger.error(
|
||||
{ projectId, pendingAt, pendingAge },
|
||||
'Error getting backup status'
|
||||
)
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
||||
if (showOnly && verbose) {
|
||||
|
@ -290,10 +298,11 @@ async function processPendingProjects(
|
|||
)
|
||||
}
|
||||
}
|
||||
|
||||
const oldestChange = changeTimes.reduce((min, time) =>
|
||||
time < min ? time : min
|
||||
)
|
||||
// Set oldestChange to undefined if there are no changes
|
||||
const oldestChange =
|
||||
changeTimes.length > 0
|
||||
? changeTimes.reduce((min, time) => (time < min ? time : min))
|
||||
: undefined
|
||||
|
||||
if (showOnly) {
|
||||
console.log(
|
||||
|
@ -303,7 +312,9 @@ async function processPendingProjects(
|
|||
console.log(`Found ${count} projects with pending changes:`)
|
||||
console.log(` ${addedCount} jobs added to queue`)
|
||||
console.log(` ${existingCount} jobs already existed in queue`)
|
||||
console.log(` Oldest pending change: ${formatPendingTime(oldestChange)}`)
|
||||
if (oldestChange) {
|
||||
console.log(` Oldest pending change: ${formatPendingTime(oldestChange)}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
98
services/history-v1/storage/scripts/expire_redis_chunks.js
Normal file
98
services/history-v1/storage/scripts/expire_redis_chunks.js
Normal file
|
@ -0,0 +1,98 @@
|
|||
const logger = require('@overleaf/logger')
|
||||
const commandLineArgs = require('command-line-args') // Add this line
|
||||
const redis = require('../lib/redis')
|
||||
const { scanRedisCluster, extractKeyId } = require('../lib/scan')
|
||||
const { expireCurrentChunk } = require('../lib/chunk_store/redis')
|
||||
|
||||
const rclient = redis.rclientHistory
|
||||
const EXPIRE_TIME_KEY_PATTERN = `expire-time:{*}`
|
||||
|
||||
const optionDefinitions = [{ name: 'dry-run', alias: 'd', type: Boolean }]
|
||||
const options = commandLineArgs(optionDefinitions)
|
||||
const DRY_RUN = options['dry-run'] || false
|
||||
|
||||
logger.initialize('expire-redis-chunks')
|
||||
|
||||
function isExpiredKey(expireTimestamp, currentTime) {
|
||||
const expireTime = parseInt(expireTimestamp, 10)
|
||||
if (isNaN(expireTime)) {
|
||||
return false
|
||||
}
|
||||
logger.debug(
|
||||
{
|
||||
expireTime,
|
||||
currentTime,
|
||||
expireIn: expireTime - currentTime,
|
||||
expired: currentTime > expireTime,
|
||||
},
|
||||
'Checking if key is expired'
|
||||
)
|
||||
return currentTime > expireTime
|
||||
}
|
||||
|
||||
async function processKeysBatch(keysBatch, rclient) {
|
||||
let clearedKeyCount = 0
|
||||
if (keysBatch.length === 0) {
|
||||
return 0
|
||||
}
|
||||
// For efficiency, we use MGET to fetch all the timestamps in a single request
|
||||
const expireTimestamps = await rclient.mget(keysBatch)
|
||||
const currentTime = Date.now()
|
||||
for (let i = 0; i < keysBatch.length; i++) {
|
||||
const key = keysBatch[i]
|
||||
// For each key, do a quick check to see if the key is expired before calling
|
||||
// the LUA script to expire the chunk atomically.
|
||||
if (isExpiredKey(expireTimestamps[i], currentTime)) {
|
||||
const projectId = extractKeyId(key)
|
||||
if (DRY_RUN) {
|
||||
logger.info({ projectId }, '[Dry Run] Would expire chunk for project')
|
||||
} else {
|
||||
await expireCurrentChunk(projectId)
|
||||
}
|
||||
clearedKeyCount++
|
||||
}
|
||||
}
|
||||
return clearedKeyCount
|
||||
}
|
||||
|
||||
async function expireRedisChunks() {
|
||||
let scannedKeyCount = 0
|
||||
let clearedKeyCount = 0
|
||||
const START_TIME = Date.now()
|
||||
|
||||
if (DRY_RUN) {
|
||||
// Use global DRY_RUN
|
||||
logger.info({}, 'starting expireRedisChunks scan in DRY RUN mode')
|
||||
} else {
|
||||
logger.info({}, 'starting expireRedisChunks scan')
|
||||
}
|
||||
|
||||
for await (const keysBatch of scanRedisCluster(
|
||||
rclient,
|
||||
EXPIRE_TIME_KEY_PATTERN
|
||||
)) {
|
||||
scannedKeyCount += keysBatch.length
|
||||
clearedKeyCount += await processKeysBatch(keysBatch, rclient)
|
||||
if (scannedKeyCount % 1000 === 0) {
|
||||
logger.info(
|
||||
{ scannedKeyCount, clearedKeyCount },
|
||||
'expireRedisChunks scan progress'
|
||||
)
|
||||
}
|
||||
}
|
||||
logger.info(
|
||||
{
|
||||
scannedKeyCount,
|
||||
clearedKeyCount,
|
||||
elapsedTimeInSeconds: Math.floor((Date.now() - START_TIME) / 1000),
|
||||
dryRun: DRY_RUN,
|
||||
},
|
||||
'expireRedisChunks scan complete'
|
||||
)
|
||||
await redis.disconnect()
|
||||
}
|
||||
|
||||
expireRedisChunks().catch(err => {
|
||||
logger.fatal({ err }, 'unhandled error in expireRedisChunks')
|
||||
process.exit(1)
|
||||
})
|
145
services/history-v1/storage/scripts/list_redis_buffer_stats.js
Normal file
145
services/history-v1/storage/scripts/list_redis_buffer_stats.js
Normal file
|
@ -0,0 +1,145 @@
|
|||
const { rclientHistory, disconnect } = require('../lib/redis')
|
||||
const { scanRedisCluster } = require('../lib/scan')
|
||||
|
||||
// Lua script to get snapshot length, change lengths, and change timestamps
|
||||
// Assumes snapshot key is a string and changes key is a list.
|
||||
const LUA_SCRIPT = `
|
||||
-- local cjson = require('cjson')
|
||||
local snapshotKey = KEYS[1]
|
||||
local changesKey = KEYS[2]
|
||||
|
||||
-- Get snapshot length (returns 0 if key does not exist)
|
||||
local snapshotLen = redis.call('STRLEN', snapshotKey)
|
||||
|
||||
-- Return nil if snapshot is empty
|
||||
if snapshotLen == 0 then
|
||||
return nil
|
||||
end
|
||||
|
||||
local changeLengths = {}
|
||||
local changeTimestamps = {}
|
||||
|
||||
-- Get all changes (returns empty list if key does not exist)
|
||||
local changes = redis.call('LRANGE', changesKey, 0, -1)
|
||||
|
||||
-- FIXME: it would be better to send all the changes back and do the processing
|
||||
-- in JS to avoid blocking redis, if we need to run this script regularly
|
||||
for i, change in ipairs(changes) do
|
||||
-- Calculate length
|
||||
table.insert(changeLengths, string.len(change))
|
||||
|
||||
-- Attempt to decode JSON and extract timestamp
|
||||
local ok, decoded = pcall(cjson.decode, change)
|
||||
if ok and type(decoded) == 'table' and decoded.timestamp then
|
||||
table.insert(changeTimestamps, decoded.timestamp)
|
||||
else
|
||||
-- Handle cases where decoding fails or timestamp is missing
|
||||
-- Log or insert a placeholder like nil if needed, otherwise skip
|
||||
table.insert(changeTimestamps, nil) -- Keep placeholder for consistency
|
||||
end
|
||||
end
|
||||
|
||||
-- Return snapshot length, list of change lengths, and list of change timestamps
|
||||
return {snapshotLen, changeLengths, changeTimestamps}
|
||||
`
|
||||
|
||||
// Define the command if it doesn't exist
|
||||
if (!rclientHistory.getProjectBufferStats) {
|
||||
rclientHistory.defineCommand('getProjectBufferStats', {
|
||||
numberOfKeys: 2,
|
||||
lua: LUA_SCRIPT,
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Processes a single project ID: fetches its buffer stats from Redis
|
||||
* and writes the results to the output stream in CSV format.
|
||||
*
|
||||
* @param {string} projectId The project ID to process.
|
||||
* @param {WritableStream} outputStream The stream to write CSV output to.
|
||||
*/
|
||||
async function processProject(projectId, outputStream) {
|
||||
try {
|
||||
// Get current time in milliseconds *before* fetching data
|
||||
const nowMs = Date.now()
|
||||
|
||||
// Execute the Lua script
|
||||
const result = await rclientHistory.getProjectBufferStats(
|
||||
`snapshot:${projectId}`,
|
||||
`changes:${projectId}`
|
||||
)
|
||||
|
||||
// Check if the result is null (e.g., snapshot is empty)
|
||||
if (result === null) {
|
||||
console.log(
|
||||
`Skipping project ${projectId}: Snapshot is empty or does not exist.`
|
||||
)
|
||||
return
|
||||
}
|
||||
|
||||
const [snapshotSize, changeSizes, changeTimestamps] = result
|
||||
|
||||
// Output snapshot size
|
||||
outputStream.write(`${projectId},snapshotSize,${snapshotSize}\n`)
|
||||
outputStream.write(`${projectId},changeCount,${changeSizes.length}\n`)
|
||||
|
||||
const changes = changeSizes.map((size, index) => [
|
||||
size,
|
||||
changeTimestamps[index],
|
||||
])
|
||||
|
||||
let totalChangeSize = 0
|
||||
// Output change sizes
|
||||
for (const [changeSize, changeTimestamp] of changes) {
|
||||
totalChangeSize += parseInt(changeSize, 10)
|
||||
const age = nowMs - new Date(changeTimestamp)
|
||||
const ageInSeconds = Math.floor(age / 1000)
|
||||
outputStream.write(`${projectId},change,${changeSize},${ageInSeconds}\n`)
|
||||
}
|
||||
outputStream.write(`${projectId},totalChangeSize,${totalChangeSize}\n`)
|
||||
} catch (err) {
|
||||
// Log error for this specific project but continue with others
|
||||
console.error(`Error processing project ${projectId}:`, err)
|
||||
}
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const outputStream = process.stdout
|
||||
|
||||
// Write CSV header
|
||||
outputStream.write('projectId,type,size,age\n')
|
||||
|
||||
try {
|
||||
const scanPattern = 'snapshot:*'
|
||||
console.log(`Scanning Redis for keys matching "${scanPattern}"...`)
|
||||
|
||||
for await (const keysBatch of scanRedisCluster(
|
||||
rclientHistory,
|
||||
scanPattern
|
||||
)) {
|
||||
for (const key of keysBatch) {
|
||||
const parts = key.split(':')
|
||||
if (parts.length !== 2 || parts[0] !== 'snapshot') {
|
||||
console.warn(`Skipping malformed key: ${key}`)
|
||||
continue
|
||||
}
|
||||
const projectId = parts[1]
|
||||
|
||||
// Call processProject directly and await it sequentially
|
||||
await processProject(projectId, outputStream)
|
||||
}
|
||||
}
|
||||
|
||||
console.log('Finished processing keys.')
|
||||
} catch (error) {
|
||||
console.error('Error during Redis scan:', error)
|
||||
} finally {
|
||||
await disconnect()
|
||||
console.log('Redis connections closed.')
|
||||
}
|
||||
}
|
||||
|
||||
main().catch(err => {
|
||||
console.error('Unhandled error in main:', err)
|
||||
process.exit(1)
|
||||
})
|
|
@ -1,11 +1,28 @@
|
|||
import redis from '@overleaf/redis-wrapper'
|
||||
import config from 'config'
|
||||
|
||||
const redisOptions = config.get('redis.queue')
|
||||
// Get allowed Redis dbs from config
|
||||
const redisConfig = config.get('redis')
|
||||
const allowedDbs = Object.keys(redisConfig)
|
||||
|
||||
// Get the Redis db from command line argument or use the first available db as default
|
||||
const db = process.argv[2]
|
||||
|
||||
// Validate redis db
|
||||
if (!allowedDbs.includes(db)) {
|
||||
if (db) {
|
||||
console.error('Invalid redis db:', db)
|
||||
}
|
||||
console.error(`Usage: node redis.mjs [${allowedDbs.join('|')}]`)
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
// Get redis options based on command line argument
|
||||
const redisOptions = config.get(`redis.${db}`)
|
||||
console.log('Using redis db:', db)
|
||||
console.log('REDIS CONFIG', {
|
||||
...redisOptions,
|
||||
password: '*'.repeat(redisOptions.password.length),
|
||||
password: '*'.repeat(redisOptions.password?.length),
|
||||
})
|
||||
const rclient = redis.createClient(redisOptions)
|
||||
|
||||
|
|
|
@ -6,6 +6,7 @@ import {
|
|||
} from '../lib/chunk_store/index.js'
|
||||
import { client } from '../lib/mongodb.js'
|
||||
import knex from '../lib/knex.js'
|
||||
import redis from '../lib/redis.js'
|
||||
import {
|
||||
loadGlobalBlobs,
|
||||
BlobStore,
|
||||
|
@ -247,4 +248,7 @@ main()
|
|||
.finally(() => {
|
||||
knex.destroy().catch(err => console.error('Error closing Postgres:', err))
|
||||
client.close().catch(err => console.error('Error closing MongoDB:', err))
|
||||
redis
|
||||
.disconnect()
|
||||
.catch(err => console.error('Error disconnecting Redis:', err))
|
||||
})
|
||||
|
|
|
@ -16,6 +16,7 @@ import {
|
|||
db,
|
||||
client,
|
||||
} from '../lib/mongodb.js'
|
||||
import redis from '../lib/redis.js'
|
||||
import commandLineArgs from 'command-line-args'
|
||||
import fs from 'node:fs'
|
||||
|
||||
|
@ -146,4 +147,7 @@ main()
|
|||
console.error('Error closing Postgres connection:', err)
|
||||
})
|
||||
client.close().catch(err => console.error('Error closing MongoDB:', err))
|
||||
redis.disconnect().catch(err => {
|
||||
console.error('Error disconnecting Redis:', err)
|
||||
})
|
||||
})
|
||||
|
|
|
@ -2,6 +2,7 @@ import commandLineArgs from 'command-line-args'
|
|||
import { verifyProjectWithErrorContext } from '../lib/backupVerifier.mjs'
|
||||
import knex from '../lib/knex.js'
|
||||
import { client } from '../lib/mongodb.js'
|
||||
import redis from '../lib/redis.js'
|
||||
import { setTimeout } from 'node:timers/promises'
|
||||
import { loadGlobalBlobs } from '../lib/blob_store/index.js'
|
||||
|
||||
|
@ -10,6 +11,7 @@ const { historyId } = commandLineArgs([{ name: 'historyId', type: String }])
|
|||
async function gracefulShutdown(code = process.exitCode) {
|
||||
await knex.destroy()
|
||||
await client.close()
|
||||
await redis.disconnect()
|
||||
await setTimeout(1_000)
|
||||
process.exit(code)
|
||||
}
|
||||
|
|
|
@ -14,6 +14,7 @@ import { loadGlobalBlobs } from '../lib/blob_store/index.js'
|
|||
import { getDatesBeforeRPO } from '../../backupVerifier/utils.mjs'
|
||||
import { EventEmitter } from 'node:events'
|
||||
import { mongodb } from '../index.js'
|
||||
import redis from '../lib/redis.js'
|
||||
|
||||
logger.logger.level('fatal')
|
||||
|
||||
|
@ -30,6 +31,7 @@ const usageMessage = [
|
|||
async function gracefulShutdown(code = process.exitCode) {
|
||||
await knex.destroy()
|
||||
await client.close()
|
||||
await redis.disconnect()
|
||||
await setTimeout(1_000)
|
||||
process.exit(code)
|
||||
}
|
||||
|
|
|
@ -30,14 +30,17 @@ import { historyStore } from '../../../../storage/lib/history_store.js'
|
|||
* @typedef {import("overleaf-editor-core").Blob} Blob
|
||||
*/
|
||||
|
||||
async function verifyProjectScript(historyId) {
|
||||
// Timeout for script execution, increased to avoid flaky tests
|
||||
const SCRIPT_TIMEOUT = 15_000
|
||||
|
||||
async function verifyProjectScript(historyId, expectFail = true) {
|
||||
try {
|
||||
const result = await promisify(execFile)(
|
||||
process.argv0,
|
||||
['storage/scripts/verify_project.mjs', `--historyId=${historyId}`],
|
||||
{
|
||||
encoding: 'utf-8',
|
||||
timeout: 5_000,
|
||||
timeout: SCRIPT_TIMEOUT,
|
||||
env: {
|
||||
...process.env,
|
||||
LOG_LEVEL: 'warn',
|
||||
|
@ -53,6 +56,9 @@ async function verifyProjectScript(historyId) {
|
|||
'code' in err &&
|
||||
'stderr' in err
|
||||
) {
|
||||
if (!expectFail) {
|
||||
console.log(err)
|
||||
}
|
||||
return {
|
||||
stdout: typeof err.stdout === 'string' ? err.stdout : '',
|
||||
status: typeof err.code === 'number' ? err.code : -1,
|
||||
|
@ -68,7 +74,7 @@ async function verifyProjectScript(historyId) {
|
|||
* @param {string} hash
|
||||
* @return {Promise<{stdout: string, status:number }>}
|
||||
*/
|
||||
async function verifyBlobScript(historyId, hash) {
|
||||
async function verifyBlobScript(historyId, hash, expectFail = true) {
|
||||
try {
|
||||
const result = await promisify(execFile)(
|
||||
process.argv0,
|
||||
|
@ -79,7 +85,7 @@ async function verifyBlobScript(historyId, hash) {
|
|||
],
|
||||
{
|
||||
encoding: 'utf-8',
|
||||
timeout: 5_000,
|
||||
timeout: SCRIPT_TIMEOUT,
|
||||
env: {
|
||||
...process.env,
|
||||
LOG_LEVEL: 'warn',
|
||||
|
@ -89,6 +95,9 @@ async function verifyBlobScript(historyId, hash) {
|
|||
return { status: 0, stdout: result.stdout }
|
||||
} catch (err) {
|
||||
if (err && typeof err === 'object' && 'stdout' in err && 'code' in err) {
|
||||
if (!expectFail) {
|
||||
console.log(err)
|
||||
}
|
||||
return {
|
||||
stdout: typeof err.stdout === 'string' ? err.stdout : '',
|
||||
status: typeof err.code === 'number' ? err.code : -1,
|
||||
|
@ -202,6 +211,7 @@ async function checkDEKExists(historyId) {
|
|||
}
|
||||
|
||||
describe('backupVerifier', function () {
|
||||
this.timeout(5_000 + SCRIPT_TIMEOUT) // allow time for external scripts to run
|
||||
const historyIdPostgres = '42'
|
||||
const historyIdMongo = '000000000000000000000042'
|
||||
let blobHashPG, blobHashMongo, blobPathPG
|
||||
|
@ -228,7 +238,7 @@ describe('backupVerifier', function () {
|
|||
describe('storage/scripts/verify_project.mjs', function () {
|
||||
describe('when the project is appropriately backed up', function () {
|
||||
it('should return 0', async function () {
|
||||
const response = await verifyProjectScript(historyIdPostgres)
|
||||
const response = await verifyProjectScript(historyIdPostgres, false)
|
||||
expect(response.status).to.equal(0)
|
||||
})
|
||||
})
|
||||
|
@ -306,12 +316,20 @@ describe('backupVerifier', function () {
|
|||
expect(result.stdout).to.include('hash mismatch for backed up blob')
|
||||
})
|
||||
it('should successfully verify from postgres', async function () {
|
||||
const result = await verifyBlobScript(historyIdPostgres, blobHashPG)
|
||||
const result = await verifyBlobScript(
|
||||
historyIdPostgres,
|
||||
blobHashPG,
|
||||
false
|
||||
)
|
||||
expect(result.status).to.equal(0)
|
||||
expect(result.stdout.split('\n')).to.include('OK')
|
||||
})
|
||||
it('should successfully verify from mongo', async function () {
|
||||
const result = await verifyBlobScript(historyIdMongo, blobHashMongo)
|
||||
const result = await verifyBlobScript(
|
||||
historyIdMongo,
|
||||
blobHashMongo,
|
||||
false
|
||||
)
|
||||
expect(result.status).to.equal(0)
|
||||
expect(result.stdout.split('\n')).to.include('OK')
|
||||
})
|
||||
|
|
|
@ -22,6 +22,7 @@ const TextOperation = core.TextOperation
|
|||
const V2DocVersions = core.V2DocVersions
|
||||
|
||||
const knex = require('../../../../storage').knex
|
||||
const redis = require('../../../../storage/lib/chunk_store/redis')
|
||||
|
||||
describe('history import', function () {
|
||||
beforeEach(cleanup.everything)
|
||||
|
@ -580,7 +581,7 @@ describe('history import', function () {
|
|||
.catch(expectResponse.unprocessableEntity)
|
||||
.then(getLatestContent)
|
||||
.then(response => {
|
||||
// Check that no chaes were made
|
||||
// Check that no changes were made
|
||||
const snapshot = Snapshot.fromRaw(response.obj)
|
||||
expect(snapshot.countFiles()).to.equal(1)
|
||||
expect(snapshot.getFile(mainFilePathname).getHash()).to.equal(
|
||||
|
@ -594,6 +595,10 @@ describe('history import', function () {
|
|||
testFiles.NULL_CHARACTERS_TXT_BYTE_LENGTH
|
||||
)
|
||||
})
|
||||
.then(() => {
|
||||
// Now clear the cache because we have changed the string length in the database
|
||||
return redis.clearCache(testProjectId)
|
||||
})
|
||||
.then(importChanges)
|
||||
.then(getLatestContent)
|
||||
.then(response => {
|
||||
|
|
|
@ -21,6 +21,8 @@ const {
|
|||
Snapshot,
|
||||
Change,
|
||||
AddFileOperation,
|
||||
EditFileOperation,
|
||||
TextOperation,
|
||||
} = require('overleaf-editor-core')
|
||||
const testProjects = require('./support/test_projects')
|
||||
|
||||
|
@ -103,56 +105,176 @@ describe('project controller', function () {
|
|||
// https://github.com/overleaf/write_latex/pull/5120#discussion_r244291862
|
||||
})
|
||||
|
||||
describe('getLatestHashedContent', function () {
|
||||
let limitsToPersistImmediately
|
||||
describe('project with changes', function () {
|
||||
let projectId
|
||||
|
||||
before(function () {
|
||||
beforeEach(async function () {
|
||||
// used to provide a limit which forces us to persist all of the changes.
|
||||
const farFuture = new Date()
|
||||
farFuture.setTime(farFuture.getTime() + 7 * 24 * 3600 * 1000)
|
||||
limitsToPersistImmediately = {
|
||||
const limits = {
|
||||
minChangeTimestamp: farFuture,
|
||||
maxChangeTimestamp: farFuture,
|
||||
}
|
||||
})
|
||||
|
||||
it('returns a snaphot', async function () {
|
||||
const changes = [
|
||||
new Change(
|
||||
[new AddFileOperation('test.tex', File.fromString('ab'))],
|
||||
new Date(),
|
||||
[]
|
||||
),
|
||||
new Change(
|
||||
[new AddFileOperation('other.tex', File.fromString('hello'))],
|
||||
new Date(),
|
||||
[]
|
||||
),
|
||||
]
|
||||
|
||||
const projectId = await createEmptyProject()
|
||||
await persistChanges(projectId, changes, limitsToPersistImmediately, 0)
|
||||
const response =
|
||||
await testServer.basicAuthClient.apis.Project.getLatestHashedContent({
|
||||
project_id: projectId,
|
||||
})
|
||||
expect(response.status).to.equal(HTTPStatus.OK)
|
||||
const snapshot = Snapshot.fromRaw(response.obj)
|
||||
expect(snapshot.countFiles()).to.equal(1)
|
||||
expect(snapshot.getFile('test.tex').getHash()).to.equal(
|
||||
testFiles.STRING_AB_HASH
|
||||
)
|
||||
projectId = await createEmptyProject()
|
||||
await persistChanges(projectId, changes, limits, 0)
|
||||
})
|
||||
describe('getLatestHistoryRaw', function () {
|
||||
it('should handles read', async function () {
|
||||
const projectId = fixtures.docs.initializedProject.id
|
||||
|
||||
describe('getLatestHashedContent', function () {
|
||||
it('returns a snapshot', async function () {
|
||||
const response =
|
||||
await testServer.pseudoJwtBasicAuthClient.apis.Project.getLatestHistoryRaw(
|
||||
{
|
||||
project_id: projectId,
|
||||
readOnly: 'true',
|
||||
}
|
||||
await testServer.basicAuthClient.apis.Project.getLatestHashedContent({
|
||||
project_id: projectId,
|
||||
})
|
||||
expect(response.status).to.equal(HTTPStatus.OK)
|
||||
const snapshot = Snapshot.fromRaw(response.obj)
|
||||
expect(snapshot.countFiles()).to.equal(2)
|
||||
expect(snapshot.getFile('test.tex').getHash()).to.equal(
|
||||
testFiles.STRING_AB_HASH
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('getChanges', function () {
|
||||
it('returns all changes when not given a limit', async function () {
|
||||
const response =
|
||||
await testServer.basicAuthClient.apis.Project.getChanges({
|
||||
project_id: projectId,
|
||||
})
|
||||
expect(response.status).to.equal(HTTPStatus.OK)
|
||||
const changes = response.obj
|
||||
expect(changes.length).to.equal(2)
|
||||
const filenames = changes
|
||||
.flatMap(change => change.operations)
|
||||
.map(operation => operation.pathname)
|
||||
expect(filenames).to.deep.equal(['test.tex', 'other.tex'])
|
||||
})
|
||||
|
||||
it('returns only requested changes', async function () {
|
||||
const response =
|
||||
await testServer.basicAuthClient.apis.Project.getChanges({
|
||||
project_id: projectId,
|
||||
since: 1,
|
||||
})
|
||||
expect(response.status).to.equal(HTTPStatus.OK)
|
||||
const changes = response.obj
|
||||
expect(changes.length).to.equal(1)
|
||||
const filenames = changes
|
||||
.flatMap(change => change.operations)
|
||||
.map(operation => operation.pathname)
|
||||
expect(filenames).to.deep.equal(['other.tex'])
|
||||
})
|
||||
|
||||
it('rejects negative versions', async function () {
|
||||
await expect(
|
||||
testServer.basicAuthClient.apis.Project.getChanges({
|
||||
project_id: projectId,
|
||||
since: -1,
|
||||
})
|
||||
).to.be.rejectedWith('Bad Request')
|
||||
})
|
||||
|
||||
it('rejects out of bounds versions', async function () {
|
||||
await expect(
|
||||
testServer.basicAuthClient.apis.Project.getChanges({
|
||||
project_id: projectId,
|
||||
since: 20,
|
||||
})
|
||||
).to.be.rejectedWith('Bad Request')
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('project with many chunks', function () {
|
||||
let projectId
|
||||
|
||||
beforeEach(async function () {
|
||||
// used to provide a limit which forces us to persist all of the changes.
|
||||
const farFuture = new Date()
|
||||
farFuture.setTime(farFuture.getTime() + 7 * 24 * 3600 * 1000)
|
||||
const limits = {
|
||||
minChangeTimestamp: farFuture,
|
||||
maxChangeTimestamp: farFuture,
|
||||
maxChunkChanges: 5,
|
||||
}
|
||||
const changes = [
|
||||
new Change(
|
||||
[new AddFileOperation('test.tex', File.fromString(''))],
|
||||
new Date(),
|
||||
[]
|
||||
),
|
||||
]
|
||||
|
||||
for (let i = 0; i < 20; i++) {
|
||||
const textOperation = new TextOperation()
|
||||
textOperation.retain(i)
|
||||
textOperation.insert('x')
|
||||
changes.push(
|
||||
new Change(
|
||||
[new EditFileOperation('test.tex', textOperation)],
|
||||
new Date(),
|
||||
[]
|
||||
)
|
||||
expect(response.body).to.deep.equal({
|
||||
startVersion: 0,
|
||||
endVersion: 1,
|
||||
endTimestamp: '2032-01-01T00:00:00.000Z',
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
projectId = await createEmptyProject()
|
||||
await persistChanges(projectId, changes, limits, 0)
|
||||
})
|
||||
|
||||
it('returns all changes when not given a limit', async function () {
|
||||
const response = await testServer.basicAuthClient.apis.Project.getChanges(
|
||||
{
|
||||
project_id: projectId,
|
||||
}
|
||||
)
|
||||
expect(response.status).to.equal(HTTPStatus.OK)
|
||||
const changes = response.obj
|
||||
expect(changes.length).to.equal(21)
|
||||
expect(changes[10].operations[0].textOperation).to.deep.equal([9, 'x'])
|
||||
})
|
||||
|
||||
it('returns only requested changes', async function () {
|
||||
const response = await testServer.basicAuthClient.apis.Project.getChanges(
|
||||
{
|
||||
project_id: projectId,
|
||||
since: 10,
|
||||
}
|
||||
)
|
||||
expect(response.status).to.equal(HTTPStatus.OK)
|
||||
const changes = response.obj
|
||||
expect(changes.length).to.equal(11)
|
||||
expect(changes[2].operations[0].textOperation).to.deep.equal([11, 'x'])
|
||||
})
|
||||
})
|
||||
|
||||
describe('getLatestHistoryRaw', function () {
|
||||
it('should handles read', async function () {
|
||||
const projectId = fixtures.docs.initializedProject.id
|
||||
const response =
|
||||
await testServer.pseudoJwtBasicAuthClient.apis.Project.getLatestHistoryRaw(
|
||||
{
|
||||
project_id: projectId,
|
||||
readOnly: 'true',
|
||||
}
|
||||
)
|
||||
expect(response.body).to.deep.equal({
|
||||
startVersion: 0,
|
||||
endVersion: 1,
|
||||
endTimestamp: '2032-01-01T00:00:00.000Z',
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -26,7 +26,7 @@ async function listenOnRandomPort() {
|
|||
return
|
||||
} catch {}
|
||||
}
|
||||
server = await startApp(0)
|
||||
server = await startApp(0, false)
|
||||
}
|
||||
|
||||
after('close server', function (done) {
|
||||
|
|
248
services/history-v1/test/acceptance/js/storage/assert.test.js
Normal file
248
services/history-v1/test/acceptance/js/storage/assert.test.js
Normal file
|
@ -0,0 +1,248 @@
|
|||
'use strict'
|
||||
|
||||
const OError = require('@overleaf/o-error')
|
||||
const { expect } = require('chai')
|
||||
const assert = require('../../../../storage/lib/assert')
|
||||
|
||||
describe('assert', function () {
|
||||
describe('blobHash', function () {
|
||||
it('should not throw for valid blob hashes', function () {
|
||||
expect(() =>
|
||||
assert.blobHash(
|
||||
'aad321caf77ca6c5ab09e6c638c237705f93b001',
|
||||
'should be a blob hash'
|
||||
)
|
||||
).to.not.throw()
|
||||
})
|
||||
|
||||
it('should throw for invalid blob hashes', function () {
|
||||
try {
|
||||
assert.blobHash('invalid-hash', 'should be a blob hash')
|
||||
expect.fail()
|
||||
} catch (error) {
|
||||
expect(error).to.be.instanceOf(TypeError)
|
||||
expect(error.message).to.equal('should be a blob hash')
|
||||
expect(OError.getFullInfo(error)).to.deep.equal({ arg: 'invalid-hash' })
|
||||
}
|
||||
})
|
||||
|
||||
it('should throw for string integer blob hashes', function () {
|
||||
try {
|
||||
assert.blobHash('123', 'should be a blob hash')
|
||||
expect.fail()
|
||||
} catch (error) {
|
||||
expect(error).to.be.instanceOf(TypeError)
|
||||
expect(error.message).to.equal('should be a blob hash')
|
||||
expect(OError.getFullInfo(error)).to.deep.equal({ arg: '123' })
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
describe('projectId', function () {
|
||||
it('should not throw for valid mongo project ids', function () {
|
||||
expect(() =>
|
||||
assert.projectId('507f1f77bcf86cd799439011', 'should be a project id')
|
||||
).to.not.throw()
|
||||
})
|
||||
|
||||
it('should not throw for valid postgres project ids', function () {
|
||||
expect(() =>
|
||||
assert.projectId('123456789', 'should be a project id')
|
||||
).to.not.throw()
|
||||
})
|
||||
|
||||
it('should throw for invalid project ids', function () {
|
||||
try {
|
||||
assert.projectId('invalid-id', 'should be a project id')
|
||||
expect.fail()
|
||||
} catch (error) {
|
||||
expect(error).to.be.instanceOf(TypeError)
|
||||
expect(error.message).to.equal('should be a project id')
|
||||
expect(OError.getFullInfo(error)).to.deep.equal({ arg: 'invalid-id' })
|
||||
}
|
||||
})
|
||||
|
||||
it('should throw for non-numeric project ids', function () {
|
||||
try {
|
||||
assert.projectId('12345x', 'should be a project id')
|
||||
expect.fail()
|
||||
} catch (error) {
|
||||
expect(error).to.be.instanceOf(TypeError)
|
||||
expect(error.message).to.equal('should be a project id')
|
||||
expect(OError.getFullInfo(error)).to.deep.equal({ arg: '12345x' })
|
||||
}
|
||||
})
|
||||
|
||||
it('should throw for postgres ids starting with 0', function () {
|
||||
try {
|
||||
assert.projectId('0123456', 'should be a project id')
|
||||
expect.fail()
|
||||
} catch (error) {
|
||||
expect(error).to.be.instanceOf(TypeError)
|
||||
expect(error.message).to.equal('should be a project id')
|
||||
expect(OError.getFullInfo(error)).to.deep.equal({ arg: '0123456' })
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
describe('chunkId', function () {
|
||||
it('should not throw for valid mongo chunk ids', function () {
|
||||
expect(() =>
|
||||
assert.chunkId('507f1f77bcf86cd799439011', 'should be a chunk id')
|
||||
).to.not.throw()
|
||||
})
|
||||
|
||||
it('should not throw for valid postgres chunk ids', function () {
|
||||
expect(() =>
|
||||
assert.chunkId('123456789', 'should be a chunk id')
|
||||
).to.not.throw()
|
||||
})
|
||||
|
||||
it('should throw for invalid chunk ids', function () {
|
||||
try {
|
||||
assert.chunkId('invalid-id', 'should be a chunk id')
|
||||
expect.fail()
|
||||
} catch (error) {
|
||||
expect(error).to.be.instanceOf(TypeError)
|
||||
expect(error.message).to.equal('should be a chunk id')
|
||||
expect(OError.getFullInfo(error)).to.deep.equal({ arg: 'invalid-id' })
|
||||
}
|
||||
})
|
||||
|
||||
it('should throw for integer chunk ids', function () {
|
||||
try {
|
||||
assert.chunkId(12345, 'should be a chunk id')
|
||||
expect.fail()
|
||||
} catch (error) {
|
||||
expect(error).to.be.instanceOf(TypeError)
|
||||
expect(error.message).to.equal('should be a chunk id')
|
||||
expect(OError.getFullInfo(error)).to.deep.equal({ arg: 12345 })
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
describe('mongoId', function () {
|
||||
it('should not throw for valid mongo ids', function () {
|
||||
expect(() =>
|
||||
assert.mongoId('507f1f77bcf86cd799439011', 'should be a mongo id')
|
||||
).to.not.throw()
|
||||
})
|
||||
|
||||
it('should throw for invalid mongo ids', function () {
|
||||
try {
|
||||
assert.mongoId('invalid-id', 'should be a mongo id')
|
||||
expect.fail()
|
||||
} catch (error) {
|
||||
expect(error).to.be.instanceOf(TypeError)
|
||||
expect(error.message).to.equal('should be a mongo id')
|
||||
expect(OError.getFullInfo(error)).to.deep.equal({ arg: 'invalid-id' })
|
||||
}
|
||||
})
|
||||
|
||||
it('should throw for numeric mongo ids', function () {
|
||||
try {
|
||||
assert.mongoId('12345', 'should be a mongo id')
|
||||
expect.fail()
|
||||
} catch (error) {
|
||||
expect(error).to.be.instanceOf(TypeError)
|
||||
expect(error.message).to.equal('should be a mongo id')
|
||||
expect(OError.getFullInfo(error)).to.deep.equal({ arg: '12345' })
|
||||
}
|
||||
})
|
||||
|
||||
it('should throw for mongo ids that are too short', function () {
|
||||
try {
|
||||
assert.mongoId('507f1f77bcf86cd79943901', 'should be a mongo id')
|
||||
expect.fail()
|
||||
} catch (error) {
|
||||
expect(error).to.be.instanceOf(TypeError)
|
||||
expect(error.message).to.equal('should be a mongo id')
|
||||
expect(OError.getFullInfo(error)).to.deep.equal({
|
||||
arg: '507f1f77bcf86cd79943901',
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
it('should throw for mongo ids that are too long', function () {
|
||||
try {
|
||||
assert.mongoId('507f1f77bcf86cd7994390111', 'should be a mongo id')
|
||||
expect.fail()
|
||||
} catch (error) {
|
||||
expect(error).to.be.instanceOf(TypeError)
|
||||
expect(error.message).to.equal('should be a mongo id')
|
||||
expect(OError.getFullInfo(error)).to.deep.equal({
|
||||
arg: '507f1f77bcf86cd7994390111',
|
||||
})
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
describe('postgresId', function () {
|
||||
it('should not throw for valid postgres ids', function () {
|
||||
expect(() =>
|
||||
assert.postgresId('123456789', 'should be a postgres id')
|
||||
).to.not.throw()
|
||||
expect(() =>
|
||||
assert.postgresId('1', 'should be a postgres id')
|
||||
).to.not.throw()
|
||||
})
|
||||
|
||||
it('should throw for invalid postgres ids', function () {
|
||||
try {
|
||||
assert.postgresId('invalid-id', 'should be a postgres id')
|
||||
expect.fail()
|
||||
} catch (error) {
|
||||
expect(error).to.be.instanceOf(TypeError)
|
||||
expect(error.message).to.equal('should be a postgres id')
|
||||
expect(OError.getFullInfo(error)).to.deep.equal({ arg: 'invalid-id' })
|
||||
}
|
||||
})
|
||||
|
||||
it('should throw for postgres ids starting with 0', function () {
|
||||
try {
|
||||
assert.postgresId('0123456', 'should be a postgres id')
|
||||
expect.fail()
|
||||
} catch (error) {
|
||||
expect(error).to.be.instanceOf(TypeError)
|
||||
expect(error.message).to.equal('should be a postgres id')
|
||||
expect(OError.getFullInfo(error)).to.deep.equal({ arg: '0123456' })
|
||||
}
|
||||
})
|
||||
|
||||
it('should throw for postgres ids that are too long', function () {
|
||||
try {
|
||||
assert.postgresId('12345678901', 'should be a postgres id')
|
||||
expect.fail()
|
||||
} catch (error) {
|
||||
expect(error).to.be.instanceOf(TypeError)
|
||||
expect(error.message).to.equal('should be a postgres id')
|
||||
expect(OError.getFullInfo(error)).to.deep.equal({ arg: '12345678901' })
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
describe('regex constants', function () {
|
||||
it('MONGO_ID_REGEXP should match valid mongo ids', function () {
|
||||
expect('507f1f77bcf86cd799439011').to.match(assert.MONGO_ID_REGEXP)
|
||||
expect('abcdef0123456789abcdef01').to.match(assert.MONGO_ID_REGEXP)
|
||||
})
|
||||
|
||||
it('MONGO_ID_REGEXP should not match invalid mongo ids', function () {
|
||||
expect('invalid-id').to.not.match(assert.MONGO_ID_REGEXP)
|
||||
expect('507f1f77bcf86cd79943901').to.not.match(assert.MONGO_ID_REGEXP) // too short
|
||||
expect('507f1f77bcf86cd7994390111').to.not.match(assert.MONGO_ID_REGEXP) // too long
|
||||
expect('507F1F77BCF86CD799439011').to.not.match(assert.MONGO_ID_REGEXP) // uppercase
|
||||
})
|
||||
|
||||
it('POSTGRES_ID_REGEXP should match valid postgres ids', function () {
|
||||
expect('123456789').to.match(assert.POSTGRES_ID_REGEXP)
|
||||
expect('1').to.match(assert.POSTGRES_ID_REGEXP)
|
||||
})
|
||||
|
||||
it('POSTGRES_ID_REGEXP should not match invalid postgres ids', function () {
|
||||
expect('invalid-id').to.not.match(assert.POSTGRES_ID_REGEXP)
|
||||
expect('0123456').to.not.match(assert.POSTGRES_ID_REGEXP) // starts with 0
|
||||
expect('12345678901').to.not.match(assert.POSTGRES_ID_REGEXP) // too long (> 10 digits)
|
||||
})
|
||||
})
|
||||
})
|
|
@ -8,20 +8,20 @@ describe('BlobStore postgres backend', function () {
|
|||
const projectId = new ObjectId().toString()
|
||||
await expect(
|
||||
postgresBackend.insertBlob(projectId, 'hash', 123, 99)
|
||||
).to.be.rejectedWith(`bad projectId ${projectId}`)
|
||||
).to.be.rejectedWith('bad projectId')
|
||||
})
|
||||
|
||||
it('deleteBlobs rejects when called with bad projectId', async function () {
|
||||
const projectId = new ObjectId().toString()
|
||||
await expect(postgresBackend.deleteBlobs(projectId)).to.be.rejectedWith(
|
||||
`bad projectId ${projectId}`
|
||||
'bad projectId'
|
||||
)
|
||||
})
|
||||
|
||||
it('findBlobs rejects when called with bad projectId', async function () {
|
||||
const projectId = new ObjectId().toString()
|
||||
await expect(postgresBackend.findBlobs(projectId)).to.be.rejectedWith(
|
||||
`bad projectId ${projectId}`
|
||||
'bad projectId'
|
||||
)
|
||||
})
|
||||
|
||||
|
@ -29,14 +29,14 @@ describe('BlobStore postgres backend', function () {
|
|||
const projectId = new ObjectId().toString()
|
||||
await expect(
|
||||
postgresBackend.findBlob(projectId, 'hash')
|
||||
).to.be.rejectedWith(`bad projectId ${projectId}`)
|
||||
).to.be.rejectedWith('bad projectId')
|
||||
})
|
||||
|
||||
it('getProjectBlobs rejects when called with bad projectId', async function () {
|
||||
const projectId = new ObjectId().toString()
|
||||
await expect(
|
||||
postgresBackend.getProjectBlobs(projectId)
|
||||
).to.be.rejectedWith(`bad projectId ${projectId}`)
|
||||
).to.be.rejectedWith('bad projectId')
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue