mirror of
https://github.com/yu-i-i/overleaf-cep.git
synced 2025-07-22 20:00:06 +02:00
Compare commits
249 commits
c6948581df
...
072f18107c
Author | SHA1 | Date | |
---|---|---|---|
![]() |
072f18107c | ||
![]() |
370e822909 | ||
555cdf7d6e | |||
![]() |
ace526e55e | ||
![]() |
860ee4b4c2 | ||
![]() |
e30d5b9db2 | ||
![]() |
3f33d17237 | ||
![]() |
148fc1e64d | ||
![]() |
71f02142c8 | ||
![]() |
5d0958759f | ||
![]() |
b6482407d0 | ||
![]() |
5110a55266 | ||
![]() |
5504a3471e | ||
![]() |
a0dd7169a0 | ||
![]() |
858c89d3b0 | ||
![]() |
f94eb73e05 | ||
![]() |
1a12fab4d1 | ||
![]() |
a5bf2f844e | ||
![]() |
69b869c0aa | ||
![]() |
0035ab85b7 | ||
![]() |
dc6f0180d5 | ||
![]() |
44d64d6397 | ||
![]() |
16144d4db3 | ||
![]() |
2d8593a7a3 | ||
![]() |
5048b60b02 | ||
![]() |
38f045ade4 | ||
![]() |
35f8549ffb | ||
![]() |
462e13f661 | ||
![]() |
635964a830 | ||
![]() |
4159568ebe | ||
![]() |
3183fc14c1 | ||
![]() |
13e6d2c00f | ||
![]() |
7fed874d53 | ||
![]() |
534b9263d9 | ||
![]() |
4146c920cf | ||
![]() |
b1d077cc8a | ||
![]() |
f3a11e3581 | ||
![]() |
3a018dd207 | ||
![]() |
9c8589bde6 | ||
![]() |
079f200939 | ||
![]() |
a1dcdaf283 | ||
![]() |
1a1851f658 | ||
![]() |
0faa9d5355 | ||
![]() |
44e59d5fe8 | ||
![]() |
0c90b4d100 | ||
![]() |
19dc71f414 | ||
![]() |
069e42e763 | ||
![]() |
04fa5366ce | ||
![]() |
edf4fdda50 | ||
![]() |
6e30a1a32d | ||
![]() |
1042092144 | ||
![]() |
150dfd6cba | ||
![]() |
fd9fd9f0e7 | ||
![]() |
c9174cdecc | ||
![]() |
a20a0923b7 | ||
![]() |
91a308a62f | ||
![]() |
7bdc4291fc | ||
![]() |
af99f736bd | ||
![]() |
3a1ef872cd | ||
![]() |
4310d3ec88 | ||
![]() |
adf399fb95 | ||
![]() |
5b39c76aa8 | ||
![]() |
8423829714 | ||
![]() |
cc7c01132b | ||
![]() |
bf8abb3181 | ||
![]() |
494f0a4b1a | ||
![]() |
0dab9369ee | ||
![]() |
b15758da97 | ||
![]() |
3ba002460e | ||
![]() |
a559cbb590 | ||
![]() |
4648661ce6 | ||
![]() |
f68bf5a69f | ||
![]() |
90309f59ae | ||
![]() |
89937d9635 | ||
![]() |
3eeee3b983 | ||
![]() |
4e03e0fbe1 | ||
![]() |
dc252fe772 | ||
![]() |
30143ead97 | ||
![]() |
982f647845 | ||
![]() |
39b4aed85f | ||
![]() |
6f461564d5 | ||
![]() |
0c2f79b0b8 | ||
![]() |
0f330ef6a3 | ||
![]() |
9cb4ef4d7d | ||
![]() |
6b38336c7b | ||
![]() |
9aa261eaf6 | ||
![]() |
aa4d8f4925 | ||
![]() |
b6fe6ae062 | ||
![]() |
b14a131b43 | ||
![]() |
7ca01dc925 | ||
![]() |
e1a3037ffa | ||
![]() |
6bde3acc62 | ||
![]() |
d9914bf80a | ||
![]() |
7e9a33841d | ||
![]() |
afe146a620 | ||
![]() |
c7dd7208fb | ||
![]() |
8b937c91f4 | ||
![]() |
22016ffef9 | ||
![]() |
569e72a1c0 | ||
![]() |
740b1d3f50 | ||
![]() |
0aa56fbe2c | ||
![]() |
6f516b25af | ||
![]() |
a1591e8b0c | ||
![]() |
b0c5d6fc5a | ||
![]() |
0ac2ddd686 | ||
![]() |
53fc78d83e | ||
![]() |
980a8458d4 | ||
![]() |
f025f1d0cb | ||
![]() |
e95b159edd | ||
![]() |
227f035c2e | ||
![]() |
08ea0f270b | ||
![]() |
fd1926a1c8 | ||
![]() |
9c287ba36c | ||
![]() |
ef7cc20694 | ||
![]() |
cc21f42a14 | ||
![]() |
5c7bef31ca | ||
![]() |
fc050983c9 | ||
![]() |
92626393ec | ||
![]() |
ce00213c4a | ||
![]() |
92731848ac | ||
![]() |
2f44a4eb5a | ||
![]() |
d189c91c59 | ||
![]() |
eed6a982f7 | ||
![]() |
ab0199f238 | ||
![]() |
0a79ac75ff | ||
![]() |
d49a9e9e80 | ||
![]() |
0fc229dfc0 | ||
![]() |
02e7ac52e2 | ||
![]() |
cfc6ff0759 | ||
![]() |
277e59fbd5 | ||
![]() |
272303cb58 | ||
![]() |
819cd85a0e | ||
![]() |
d2e784e11c | ||
![]() |
8b91b3b749 | ||
![]() |
365af778b6 | ||
![]() |
3862826589 | ||
![]() |
2e4b57bf81 | ||
![]() |
55295ece9c | ||
![]() |
8a90173aa7 | ||
![]() |
72ff927a52 | ||
![]() |
9601eeb7c9 | ||
![]() |
91c1c6858a | ||
![]() |
101c994fec | ||
![]() |
ab19b01d43 | ||
![]() |
b3dc0097fd | ||
![]() |
6a951e2ff0 | ||
![]() |
b290e93441 | ||
![]() |
5799d534a9 | ||
![]() |
07b47606c1 | ||
![]() |
b946c2abff | ||
![]() |
25c3699862 | ||
![]() |
0397b02214 | ||
![]() |
7c23655c79 | ||
![]() |
fdd0d95554 | ||
![]() |
2a833aa23a | ||
![]() |
fec6dde00f | ||
![]() |
c81cc4055e | ||
![]() |
2d0706591b | ||
![]() |
f904933d68 | ||
![]() |
c227c1e2d9 | ||
![]() |
c23e84eb37 | ||
![]() |
637312e4f8 | ||
![]() |
ce3054713f | ||
![]() |
2c07fa1f77 | ||
![]() |
52280febf6 | ||
![]() |
f871130773 | ||
![]() |
25675ce2ba | ||
![]() |
c1f5d7c40c | ||
![]() |
4960569648 | ||
![]() |
eb60d364f6 | ||
![]() |
542008c61d | ||
![]() |
3da4dc71f1 | ||
![]() |
312664bd2d | ||
![]() |
69e2a57769 | ||
![]() |
6d202432ff | ||
![]() |
5b08adc4ff | ||
![]() |
86626ca44e | ||
![]() |
45c6ce2219 | ||
![]() |
ff63215d73 | ||
![]() |
d3a9b4943a | ||
![]() |
e0f6ee8b20 | ||
![]() |
edc7634007 | ||
![]() |
c0b7efea10 | ||
![]() |
2eb695f4c3 | ||
![]() |
d280f40885 | ||
![]() |
a9923fed4e | ||
![]() |
7a449f4686 | ||
![]() |
a8df91e91b | ||
![]() |
9e9ad3c005 | ||
![]() |
e5d828673e | ||
![]() |
df233f3e5e | ||
![]() |
784559f1b8 | ||
![]() |
ae51e57c75 | ||
![]() |
24e12bfbd4 | ||
![]() |
1386ca1669 | ||
![]() |
f7fcf4c23f | ||
![]() |
3b684e08ca | ||
![]() |
d7833afd35 | ||
![]() |
af7bcfc96a | ||
![]() |
842f6c289f | ||
![]() |
1e6112d5b0 | ||
![]() |
11e410c9c0 | ||
![]() |
0037b0b3fc | ||
![]() |
cd10a31a16 | ||
![]() |
ca10904484 | ||
![]() |
e3310e2358 | ||
![]() |
62714d995d | ||
![]() |
a8a21e05af | ||
![]() |
08316442cf | ||
![]() |
db98f5132b | ||
![]() |
a134a2b799 | ||
![]() |
7a556cf1fd | ||
![]() |
f11ea06c1a | ||
![]() |
d173bdf8e2 | ||
![]() |
832f9923b9 | ||
![]() |
ef810a9f36 | ||
![]() |
54c0eb7fdc | ||
![]() |
edacb9ec0b | ||
![]() |
b84d23564b | ||
![]() |
d5ba2e3f1c | ||
![]() |
385f5706d8 | ||
![]() |
2226594ade | ||
![]() |
a210a7b14d | ||
![]() |
25d3972810 | ||
![]() |
397016744e | ||
![]() |
4dbc70b745 | ||
![]() |
393cee7af5 | ||
![]() |
50df3862e9 | ||
![]() |
a80203f748 | ||
![]() |
cb350ecc65 | ||
![]() |
b2b676249d | ||
![]() |
ee23e8f49f | ||
![]() |
4aaf411cd2 | ||
![]() |
a63e25953f | ||
![]() |
48337b2e2c | ||
![]() |
3a96df4623 | ||
![]() |
4b9963757f | ||
![]() |
35500cc72b | ||
![]() |
3fbbb50ef7 | ||
![]() |
0aae5c48b4 | ||
![]() |
6cbacc8cb7 | ||
![]() |
2e50e0ffa1 | ||
![]() |
da449f9f5f | ||
![]() |
1b15dc3854 | ||
![]() |
86e13b088a | ||
![]() |
26a77e739d | ||
![]() |
c6f4229147 | ||
![]() |
fe64856be7 | ||
![]() |
9ba772b18f |
730 changed files with 31905 additions and 6346 deletions
|
@ -1,10 +1,19 @@
|
||||||
|
---
|
||||||
|
name: Bug report
|
||||||
|
about: Report a bug
|
||||||
|
title: ''
|
||||||
|
labels: type:bug
|
||||||
|
assignees: ''
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
<!--
|
<!--
|
||||||
|
|
||||||
Note: If you are using www.overleaf.com and have a problem,
|
Note: If you are using www.overleaf.com and have a problem,
|
||||||
or if you would like to request a new feature please contact
|
or if you would like to request a new feature please contact
|
||||||
the support team at support@overleaf.com
|
the support team at support@overleaf.com
|
||||||
|
|
||||||
This form should only be used to report bugs in the
|
This form should only be used to report bugs in the
|
||||||
Community Edition release of Overleaf.
|
Community Edition release of Overleaf.
|
||||||
|
|
||||||
-->
|
-->
|
|
@ -77,6 +77,7 @@ each service:
|
||||||
| `filestore` | 9235 |
|
| `filestore` | 9235 |
|
||||||
| `notifications` | 9236 |
|
| `notifications` | 9236 |
|
||||||
| `real-time` | 9237 |
|
| `real-time` | 9237 |
|
||||||
|
| `references` | 9238 |
|
||||||
| `history-v1` | 9239 |
|
| `history-v1` | 9239 |
|
||||||
| `project-history` | 9240 |
|
| `project-history` | 9240 |
|
||||||
|
|
||||||
|
|
|
@ -13,6 +13,7 @@ NOTIFICATIONS_HOST=notifications
|
||||||
PROJECT_HISTORY_HOST=project-history
|
PROJECT_HISTORY_HOST=project-history
|
||||||
REALTIME_HOST=real-time
|
REALTIME_HOST=real-time
|
||||||
REDIS_HOST=redis
|
REDIS_HOST=redis
|
||||||
|
REFERENCES_HOST=references
|
||||||
SESSION_SECRET=foo
|
SESSION_SECRET=foo
|
||||||
WEBPACK_HOST=webpack
|
WEBPACK_HOST=webpack
|
||||||
WEB_API_PASSWORD=overleaf
|
WEB_API_PASSWORD=overleaf
|
||||||
|
|
|
@ -112,6 +112,17 @@ services:
|
||||||
- ../services/real-time/app.js:/overleaf/services/real-time/app.js
|
- ../services/real-time/app.js:/overleaf/services/real-time/app.js
|
||||||
- ../services/real-time/config:/overleaf/services/real-time/config
|
- ../services/real-time/config:/overleaf/services/real-time/config
|
||||||
|
|
||||||
|
references:
|
||||||
|
command: ["node", "--watch", "app.js"]
|
||||||
|
environment:
|
||||||
|
- NODE_OPTIONS=--inspect=0.0.0.0:9229
|
||||||
|
ports:
|
||||||
|
- "127.0.0.1:9238:9229"
|
||||||
|
volumes:
|
||||||
|
- ../services/references/app:/overleaf/services/references/app
|
||||||
|
- ../services/references/config:/overleaf/services/references/config
|
||||||
|
- ../services/references/app.js:/overleaf/services/references/app.js
|
||||||
|
|
||||||
web:
|
web:
|
||||||
command: ["node", "--watch", "app.js", "--watch-locales"]
|
command: ["node", "--watch", "app.js", "--watch-locales"]
|
||||||
environment:
|
environment:
|
||||||
|
|
|
@ -25,10 +25,10 @@ services:
|
||||||
env_file:
|
env_file:
|
||||||
- dev.env
|
- dev.env
|
||||||
environment:
|
environment:
|
||||||
- DOCKER_RUNNER=true
|
|
||||||
- TEXLIVE_IMAGE=texlive-full # docker build texlive -t texlive-full
|
- TEXLIVE_IMAGE=texlive-full # docker build texlive -t texlive-full
|
||||||
- COMPILES_HOST_DIR=${PWD}/compiles
|
- SANDBOXED_COMPILES=true
|
||||||
- OUTPUT_HOST_DIR=${PWD}/output
|
- SANDBOXED_COMPILES_HOST_DIR_COMPILES=${PWD}/compiles
|
||||||
|
- SANDBOXED_COMPILES_HOST_DIR_OUTPUT=${PWD}/output
|
||||||
user: root
|
user: root
|
||||||
volumes:
|
volumes:
|
||||||
- ${PWD}/compiles:/overleaf/services/clsi/compiles
|
- ${PWD}/compiles:/overleaf/services/clsi/compiles
|
||||||
|
@ -123,7 +123,7 @@ services:
|
||||||
dockerfile: services/real-time/Dockerfile
|
dockerfile: services/real-time/Dockerfile
|
||||||
env_file:
|
env_file:
|
||||||
- dev.env
|
- dev.env
|
||||||
|
|
||||||
redis:
|
redis:
|
||||||
image: redis:5
|
image: redis:5
|
||||||
ports:
|
ports:
|
||||||
|
@ -131,6 +131,13 @@ services:
|
||||||
volumes:
|
volumes:
|
||||||
- redis-data:/data
|
- redis-data:/data
|
||||||
|
|
||||||
|
references:
|
||||||
|
build:
|
||||||
|
context: ..
|
||||||
|
dockerfile: services/references/Dockerfile
|
||||||
|
env_file:
|
||||||
|
- dev.env
|
||||||
|
|
||||||
web:
|
web:
|
||||||
build:
|
build:
|
||||||
context: ..
|
context: ..
|
||||||
|
@ -140,7 +147,7 @@ services:
|
||||||
- dev.env
|
- dev.env
|
||||||
environment:
|
environment:
|
||||||
- APP_NAME=Overleaf Community Edition
|
- APP_NAME=Overleaf Community Edition
|
||||||
- ENABLED_LINKED_FILE_TYPES=project_file,project_output_file
|
- ENABLED_LINKED_FILE_TYPES=project_file,project_output_file,url
|
||||||
- EMAIL_CONFIRMATION_DISABLED=true
|
- EMAIL_CONFIRMATION_DISABLED=true
|
||||||
- NODE_ENV=development
|
- NODE_ENV=development
|
||||||
- OVERLEAF_ALLOW_PUBLIC_ACCESS=true
|
- OVERLEAF_ALLOW_PUBLIC_ACCESS=true
|
||||||
|
@ -161,6 +168,7 @@ services:
|
||||||
- notifications
|
- notifications
|
||||||
- project-history
|
- project-history
|
||||||
- real-time
|
- real-time
|
||||||
|
- references
|
||||||
|
|
||||||
webpack:
|
webpack:
|
||||||
build:
|
build:
|
||||||
|
|
Binary file not shown.
Before Width: | Height: | Size: 587 KiB After Width: | Height: | Size: 1 MiB |
|
@ -32,7 +32,7 @@ services:
|
||||||
OVERLEAF_REDIS_HOST: redis
|
OVERLEAF_REDIS_HOST: redis
|
||||||
REDIS_HOST: redis
|
REDIS_HOST: redis
|
||||||
|
|
||||||
ENABLED_LINKED_FILE_TYPES: 'project_file,project_output_file'
|
ENABLED_LINKED_FILE_TYPES: 'project_file,project_output_file,url'
|
||||||
|
|
||||||
# Enables Thumbnail generation using ImageMagick
|
# Enables Thumbnail generation using ImageMagick
|
||||||
ENABLE_CONVERSIONS: 'true'
|
ENABLE_CONVERSIONS: 'true'
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
access-token-encryptor
|
access-token-encryptor
|
||||||
--dependencies=None
|
--dependencies=None
|
||||||
--docker-repos=gcr.io/overleaf-ops
|
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
|
||||||
--env-add=
|
--env-add=
|
||||||
--env-pass-through=
|
--env-pass-through=
|
||||||
--esmock-loader=False
|
--esmock-loader=False
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
fetch-utils
|
fetch-utils
|
||||||
--dependencies=None
|
--dependencies=None
|
||||||
--docker-repos=gcr.io/overleaf-ops
|
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
|
||||||
--env-add=
|
--env-add=
|
||||||
--env-pass-through=
|
--env-pass-through=
|
||||||
--esmock-loader=False
|
--esmock-loader=False
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
logger
|
logger
|
||||||
--dependencies=None
|
--dependencies=None
|
||||||
--docker-repos=gcr.io/overleaf-ops
|
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
|
||||||
--env-add=
|
--env-add=
|
||||||
--env-pass-through=
|
--env-pass-through=
|
||||||
--esmock-loader=False
|
--esmock-loader=False
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
metrics
|
metrics
|
||||||
--dependencies=None
|
--dependencies=None
|
||||||
--docker-repos=gcr.io/overleaf-ops
|
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
|
||||||
--env-add=
|
--env-add=
|
||||||
--env-pass-through=
|
--env-pass-through=
|
||||||
--esmock-loader=False
|
--esmock-loader=False
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
mongo-utils
|
mongo-utils
|
||||||
--dependencies=None
|
--dependencies=None
|
||||||
--docker-repos=gcr.io/overleaf-ops
|
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
|
||||||
--env-add=
|
--env-add=
|
||||||
--env-pass-through=
|
--env-pass-through=
|
||||||
--esmock-loader=False
|
--esmock-loader=False
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
o-error
|
o-error
|
||||||
--dependencies=None
|
--dependencies=None
|
||||||
--docker-repos=gcr.io/overleaf-ops
|
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
|
||||||
--env-add=
|
--env-add=
|
||||||
--env-pass-through=
|
--env-pass-through=
|
||||||
--esmock-loader=False
|
--esmock-loader=False
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
object-persistor
|
object-persistor
|
||||||
--dependencies=None
|
--dependencies=None
|
||||||
--docker-repos=gcr.io/overleaf-ops
|
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
|
||||||
--env-add=
|
--env-add=
|
||||||
--env-pass-through=
|
--env-pass-through=
|
||||||
--esmock-loader=False
|
--esmock-loader=False
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
overleaf-editor-core
|
overleaf-editor-core
|
||||||
--dependencies=None
|
--dependencies=None
|
||||||
--docker-repos=gcr.io/overleaf-ops
|
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
|
||||||
--env-add=
|
--env-add=
|
||||||
--env-pass-through=
|
--env-pass-through=
|
||||||
--esmock-loader=False
|
--esmock-loader=False
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
// @ts-check
|
// @ts-check
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @import { ClearTrackingPropsRawData } from '../types'
|
* @import { ClearTrackingPropsRawData, TrackingDirective } from '../types'
|
||||||
*/
|
*/
|
||||||
|
|
||||||
class ClearTrackingProps {
|
class ClearTrackingProps {
|
||||||
|
@ -11,12 +11,27 @@ class ClearTrackingProps {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {any} other
|
* @param {any} other
|
||||||
* @returns {boolean}
|
* @returns {other is ClearTrackingProps}
|
||||||
*/
|
*/
|
||||||
equals(other) {
|
equals(other) {
|
||||||
return other instanceof ClearTrackingProps
|
return other instanceof ClearTrackingProps
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {TrackingDirective} other
|
||||||
|
* @returns {other is ClearTrackingProps}
|
||||||
|
*/
|
||||||
|
canMergeWith(other) {
|
||||||
|
return other instanceof ClearTrackingProps
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {TrackingDirective} other
|
||||||
|
*/
|
||||||
|
mergeWith(other) {
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @returns {ClearTrackingPropsRawData}
|
* @returns {ClearTrackingPropsRawData}
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -11,7 +11,7 @@ const EditOperation = require('../operation/edit_operation')
|
||||||
const EditOperationBuilder = require('../operation/edit_operation_builder')
|
const EditOperationBuilder = require('../operation/edit_operation_builder')
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @import { BlobStore, ReadonlyBlobStore, RangesBlob, RawFileData, RawLazyStringFileData } from '../types'
|
* @import { BlobStore, ReadonlyBlobStore, RangesBlob, RawHashFileData, RawLazyStringFileData } from '../types'
|
||||||
*/
|
*/
|
||||||
|
|
||||||
class LazyStringFileData extends FileData {
|
class LazyStringFileData extends FileData {
|
||||||
|
@ -159,11 +159,11 @@ class LazyStringFileData extends FileData {
|
||||||
|
|
||||||
/** @inheritdoc
|
/** @inheritdoc
|
||||||
* @param {BlobStore} blobStore
|
* @param {BlobStore} blobStore
|
||||||
* @return {Promise<RawFileData>}
|
* @return {Promise<RawHashFileData>}
|
||||||
*/
|
*/
|
||||||
async store(blobStore) {
|
async store(blobStore) {
|
||||||
if (this.operations.length === 0) {
|
if (this.operations.length === 0) {
|
||||||
/** @type RawFileData */
|
/** @type RawHashFileData */
|
||||||
const raw = { hash: this.hash }
|
const raw = { hash: this.hash }
|
||||||
if (this.rangesHash) {
|
if (this.rangesHash) {
|
||||||
raw.rangesHash = this.rangesHash
|
raw.rangesHash = this.rangesHash
|
||||||
|
@ -171,9 +171,11 @@ class LazyStringFileData extends FileData {
|
||||||
return raw
|
return raw
|
||||||
}
|
}
|
||||||
const eager = await this.toEager(blobStore)
|
const eager = await this.toEager(blobStore)
|
||||||
|
const raw = await eager.store(blobStore)
|
||||||
|
this.hash = raw.hash
|
||||||
|
this.rangesHash = raw.rangesHash
|
||||||
this.operations.length = 0
|
this.operations.length = 0
|
||||||
/** @type RawFileData */
|
return raw
|
||||||
return await eager.store(blobStore)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -8,7 +8,7 @@ const CommentList = require('./comment_list')
|
||||||
const TrackedChangeList = require('./tracked_change_list')
|
const TrackedChangeList = require('./tracked_change_list')
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @import { StringFileRawData, RawFileData, BlobStore, CommentRawData } from "../types"
|
* @import { StringFileRawData, RawHashFileData, BlobStore, CommentRawData } from "../types"
|
||||||
* @import { TrackedChangeRawData, RangesBlob } from "../types"
|
* @import { TrackedChangeRawData, RangesBlob } from "../types"
|
||||||
* @import EditOperation from "../operation/edit_operation"
|
* @import EditOperation from "../operation/edit_operation"
|
||||||
*/
|
*/
|
||||||
|
@ -139,7 +139,7 @@ class StringFileData extends FileData {
|
||||||
/**
|
/**
|
||||||
* @inheritdoc
|
* @inheritdoc
|
||||||
* @param {BlobStore} blobStore
|
* @param {BlobStore} blobStore
|
||||||
* @return {Promise<RawFileData>}
|
* @return {Promise<RawHashFileData>}
|
||||||
*/
|
*/
|
||||||
async store(blobStore) {
|
async store(blobStore) {
|
||||||
const blob = await blobStore.putString(this.content)
|
const blob = await blobStore.putString(this.content)
|
||||||
|
|
|
@ -84,6 +84,21 @@ class TrackedChange {
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return an equivalent tracked change whose extent is limited to the given
|
||||||
|
* range
|
||||||
|
*
|
||||||
|
* @param {Range} range
|
||||||
|
* @returns {TrackedChange | null} - the result or null if the intersection is empty
|
||||||
|
*/
|
||||||
|
intersectRange(range) {
|
||||||
|
const intersection = this.range.intersect(range)
|
||||||
|
if (intersection == null) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
return new TrackedChange(intersection, this.tracking)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = TrackedChange
|
module.exports = TrackedChange
|
||||||
|
|
|
@ -2,9 +2,11 @@
|
||||||
const Range = require('../range')
|
const Range = require('../range')
|
||||||
const TrackedChange = require('./tracked_change')
|
const TrackedChange = require('./tracked_change')
|
||||||
const TrackingProps = require('../file_data/tracking_props')
|
const TrackingProps = require('../file_data/tracking_props')
|
||||||
|
const { InsertOp, RemoveOp, RetainOp } = require('../operation/scan_op')
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @import { TrackingDirective, TrackedChangeRawData } from "../types"
|
* @import { TrackingDirective, TrackedChangeRawData } from "../types"
|
||||||
|
* @import TextOperation from "../operation/text_operation"
|
||||||
*/
|
*/
|
||||||
|
|
||||||
class TrackedChangeList {
|
class TrackedChangeList {
|
||||||
|
@ -58,6 +60,22 @@ class TrackedChangeList {
|
||||||
return this._trackedChanges.filter(change => range.contains(change.range))
|
return this._trackedChanges.filter(change => range.contains(change.range))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns tracked changes that overlap with the given range
|
||||||
|
* @param {Range} range
|
||||||
|
* @returns {TrackedChange[]}
|
||||||
|
*/
|
||||||
|
intersectRange(range) {
|
||||||
|
const changes = []
|
||||||
|
for (const change of this._trackedChanges) {
|
||||||
|
const intersection = change.intersectRange(range)
|
||||||
|
if (intersection != null) {
|
||||||
|
changes.push(intersection)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return changes
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the tracking props for a given range.
|
* Returns the tracking props for a given range.
|
||||||
* @param {Range} range
|
* @param {Range} range
|
||||||
|
@ -89,6 +107,8 @@ class TrackedChangeList {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Collapses consecutive (and compatible) ranges
|
* Collapses consecutive (and compatible) ranges
|
||||||
|
*
|
||||||
|
* @private
|
||||||
* @returns {void}
|
* @returns {void}
|
||||||
*/
|
*/
|
||||||
_mergeRanges() {
|
_mergeRanges() {
|
||||||
|
@ -117,12 +137,28 @@ class TrackedChangeList {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
* Apply an insert operation
|
||||||
*
|
*
|
||||||
* @param {number} cursor
|
* @param {number} cursor
|
||||||
* @param {string} insertedText
|
* @param {string} insertedText
|
||||||
* @param {{tracking?: TrackingProps}} opts
|
* @param {{tracking?: TrackingProps}} opts
|
||||||
*/
|
*/
|
||||||
applyInsert(cursor, insertedText, opts = {}) {
|
applyInsert(cursor, insertedText, opts = {}) {
|
||||||
|
this._applyInsert(cursor, insertedText, opts)
|
||||||
|
this._mergeRanges()
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Apply an insert operation
|
||||||
|
*
|
||||||
|
* This method will not merge ranges at the end
|
||||||
|
*
|
||||||
|
* @private
|
||||||
|
* @param {number} cursor
|
||||||
|
* @param {string} insertedText
|
||||||
|
* @param {{tracking?: TrackingProps}} [opts]
|
||||||
|
*/
|
||||||
|
_applyInsert(cursor, insertedText, opts = {}) {
|
||||||
const newTrackedChanges = []
|
const newTrackedChanges = []
|
||||||
for (const trackedChange of this._trackedChanges) {
|
for (const trackedChange of this._trackedChanges) {
|
||||||
if (
|
if (
|
||||||
|
@ -171,15 +207,29 @@ class TrackedChangeList {
|
||||||
newTrackedChanges.push(newTrackedChange)
|
newTrackedChanges.push(newTrackedChange)
|
||||||
}
|
}
|
||||||
this._trackedChanges = newTrackedChanges
|
this._trackedChanges = newTrackedChanges
|
||||||
this._mergeRanges()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
* Apply a delete operation to the list of tracked changes
|
||||||
*
|
*
|
||||||
* @param {number} cursor
|
* @param {number} cursor
|
||||||
* @param {number} length
|
* @param {number} length
|
||||||
*/
|
*/
|
||||||
applyDelete(cursor, length) {
|
applyDelete(cursor, length) {
|
||||||
|
this._applyDelete(cursor, length)
|
||||||
|
this._mergeRanges()
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Apply a delete operation to the list of tracked changes
|
||||||
|
*
|
||||||
|
* This method will not merge ranges at the end
|
||||||
|
*
|
||||||
|
* @private
|
||||||
|
* @param {number} cursor
|
||||||
|
* @param {number} length
|
||||||
|
*/
|
||||||
|
_applyDelete(cursor, length) {
|
||||||
const newTrackedChanges = []
|
const newTrackedChanges = []
|
||||||
for (const trackedChange of this._trackedChanges) {
|
for (const trackedChange of this._trackedChanges) {
|
||||||
const deletedRange = new Range(cursor, length)
|
const deletedRange = new Range(cursor, length)
|
||||||
|
@ -205,15 +255,31 @@ class TrackedChangeList {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
this._trackedChanges = newTrackedChanges
|
this._trackedChanges = newTrackedChanges
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Apply a retain operation to the list of tracked changes
|
||||||
|
*
|
||||||
|
* @param {number} cursor
|
||||||
|
* @param {number} length
|
||||||
|
* @param {{tracking?: TrackingDirective}} [opts]
|
||||||
|
*/
|
||||||
|
applyRetain(cursor, length, opts = {}) {
|
||||||
|
this._applyRetain(cursor, length, opts)
|
||||||
this._mergeRanges()
|
this._mergeRanges()
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
* Apply a retain operation to the list of tracked changes
|
||||||
|
*
|
||||||
|
* This method will not merge ranges at the end
|
||||||
|
*
|
||||||
|
* @private
|
||||||
* @param {number} cursor
|
* @param {number} cursor
|
||||||
* @param {number} length
|
* @param {number} length
|
||||||
* @param {{tracking?: TrackingDirective}} opts
|
* @param {{tracking?: TrackingDirective}} opts
|
||||||
*/
|
*/
|
||||||
applyRetain(cursor, length, opts = {}) {
|
_applyRetain(cursor, length, opts = {}) {
|
||||||
// If there's no tracking info, leave everything as-is
|
// If there's no tracking info, leave everything as-is
|
||||||
if (!opts.tracking) {
|
if (!opts.tracking) {
|
||||||
return
|
return
|
||||||
|
@ -269,6 +335,31 @@ class TrackedChangeList {
|
||||||
newTrackedChanges.push(newTrackedChange)
|
newTrackedChanges.push(newTrackedChange)
|
||||||
}
|
}
|
||||||
this._trackedChanges = newTrackedChanges
|
this._trackedChanges = newTrackedChanges
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Apply a text operation to the list of tracked changes
|
||||||
|
*
|
||||||
|
* Ranges are merged only once at the end, for performance and to avoid
|
||||||
|
* problematic edge cases where intermediate ranges get incorrectly merged.
|
||||||
|
*
|
||||||
|
* @param {TextOperation} operation
|
||||||
|
*/
|
||||||
|
applyTextOperation(operation) {
|
||||||
|
// this cursor tracks the destination document that gets modified as
|
||||||
|
// operations are applied to it.
|
||||||
|
let cursor = 0
|
||||||
|
for (const op of operation.ops) {
|
||||||
|
if (op instanceof InsertOp) {
|
||||||
|
this._applyInsert(cursor, op.insertion, { tracking: op.tracking })
|
||||||
|
cursor += op.insertion.length
|
||||||
|
} else if (op instanceof RemoveOp) {
|
||||||
|
this._applyDelete(cursor, op.length)
|
||||||
|
} else if (op instanceof RetainOp) {
|
||||||
|
this._applyRetain(cursor, op.length, { tracking: op.tracking })
|
||||||
|
cursor += op.length
|
||||||
|
}
|
||||||
|
}
|
||||||
this._mergeRanges()
|
this._mergeRanges()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -62,6 +62,35 @@ class TrackingProps {
|
||||||
this.ts.getTime() === other.ts.getTime()
|
this.ts.getTime() === other.ts.getTime()
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Are these tracking props compatible with the other tracking props for merging
|
||||||
|
* ranges?
|
||||||
|
*
|
||||||
|
* @param {TrackingDirective} other
|
||||||
|
* @returns {other is TrackingProps}
|
||||||
|
*/
|
||||||
|
canMergeWith(other) {
|
||||||
|
if (!(other instanceof TrackingProps)) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return this.type === other.type && this.userId === other.userId
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Merge two tracking props
|
||||||
|
*
|
||||||
|
* Assumes that `canMerge(other)` returns true
|
||||||
|
*
|
||||||
|
* @param {TrackingDirective} other
|
||||||
|
*/
|
||||||
|
mergeWith(other) {
|
||||||
|
if (!this.canMergeWith(other)) {
|
||||||
|
throw new Error('Cannot merge with incompatible tracking props')
|
||||||
|
}
|
||||||
|
const ts = this.ts <= other.ts ? this.ts : other.ts
|
||||||
|
return new TrackingProps(this.type, this.userId, ts)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = TrackingProps
|
module.exports = TrackingProps
|
||||||
|
|
|
@ -175,7 +175,7 @@ class InsertOp extends ScanOp {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
if (this.tracking) {
|
if (this.tracking) {
|
||||||
if (!this.tracking.equals(other.tracking)) {
|
if (!other.tracking || !this.tracking.canMergeWith(other.tracking)) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
} else if (other.tracking) {
|
} else if (other.tracking) {
|
||||||
|
@ -198,7 +198,10 @@ class InsertOp extends ScanOp {
|
||||||
throw new Error('Cannot merge with incompatible operation')
|
throw new Error('Cannot merge with incompatible operation')
|
||||||
}
|
}
|
||||||
this.insertion += other.insertion
|
this.insertion += other.insertion
|
||||||
// We already have the same tracking info and commentIds
|
if (this.tracking != null && other.tracking != null) {
|
||||||
|
this.tracking = this.tracking.mergeWith(other.tracking)
|
||||||
|
}
|
||||||
|
// We already have the same commentIds
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -306,9 +309,13 @@ class RetainOp extends ScanOp {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
if (this.tracking) {
|
if (this.tracking) {
|
||||||
return this.tracking.equals(other.tracking)
|
if (!other.tracking || !this.tracking.canMergeWith(other.tracking)) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
} else if (other.tracking) {
|
||||||
|
return false
|
||||||
}
|
}
|
||||||
return !other.tracking
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -319,6 +326,9 @@ class RetainOp extends ScanOp {
|
||||||
throw new Error('Cannot merge with incompatible operation')
|
throw new Error('Cannot merge with incompatible operation')
|
||||||
}
|
}
|
||||||
this.length += other.length
|
this.length += other.length
|
||||||
|
if (this.tracking != null && other.tracking != null) {
|
||||||
|
this.tracking = this.tracking.mergeWith(other.tracking)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -314,25 +314,18 @@ class TextOperation extends EditOperation {
|
||||||
str
|
str
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
file.trackedChanges.applyRetain(result.length, op.length, {
|
|
||||||
tracking: op.tracking,
|
|
||||||
})
|
|
||||||
result += str.slice(inputCursor, inputCursor + op.length)
|
result += str.slice(inputCursor, inputCursor + op.length)
|
||||||
inputCursor += op.length
|
inputCursor += op.length
|
||||||
} else if (op instanceof InsertOp) {
|
} else if (op instanceof InsertOp) {
|
||||||
if (containsNonBmpChars(op.insertion)) {
|
if (containsNonBmpChars(op.insertion)) {
|
||||||
throw new InvalidInsertionError(str, op.toJSON())
|
throw new InvalidInsertionError(str, op.toJSON())
|
||||||
}
|
}
|
||||||
file.trackedChanges.applyInsert(result.length, op.insertion, {
|
|
||||||
tracking: op.tracking,
|
|
||||||
})
|
|
||||||
file.comments.applyInsert(
|
file.comments.applyInsert(
|
||||||
new Range(result.length, op.insertion.length),
|
new Range(result.length, op.insertion.length),
|
||||||
{ commentIds: op.commentIds }
|
{ commentIds: op.commentIds }
|
||||||
)
|
)
|
||||||
result += op.insertion
|
result += op.insertion
|
||||||
} else if (op instanceof RemoveOp) {
|
} else if (op instanceof RemoveOp) {
|
||||||
file.trackedChanges.applyDelete(result.length, op.length)
|
|
||||||
file.comments.applyDelete(new Range(result.length, op.length))
|
file.comments.applyDelete(new Range(result.length, op.length))
|
||||||
inputCursor += op.length
|
inputCursor += op.length
|
||||||
} else {
|
} else {
|
||||||
|
@ -352,6 +345,8 @@ class TextOperation extends EditOperation {
|
||||||
throw new TextOperation.TooLongError(operation, result.length)
|
throw new TextOperation.TooLongError(operation, result.length)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
file.trackedChanges.applyTextOperation(this)
|
||||||
|
|
||||||
file.content = result
|
file.content = result
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -400,44 +395,36 @@ class TextOperation extends EditOperation {
|
||||||
for (let i = 0, l = ops.length; i < l; i++) {
|
for (let i = 0, l = ops.length; i < l; i++) {
|
||||||
const op = ops[i]
|
const op = ops[i]
|
||||||
if (op instanceof RetainOp) {
|
if (op instanceof RetainOp) {
|
||||||
// Where we need to end up after the retains
|
|
||||||
const target = strIndex + op.length
|
|
||||||
// A previous retain could have overriden some tracking info. Now we
|
|
||||||
// need to restore it.
|
|
||||||
const previousRanges = previousState.trackedChanges.inRange(
|
|
||||||
new Range(strIndex, op.length)
|
|
||||||
)
|
|
||||||
|
|
||||||
let removeTrackingInfoIfNeeded
|
|
||||||
if (op.tracking) {
|
if (op.tracking) {
|
||||||
removeTrackingInfoIfNeeded = new ClearTrackingProps()
|
// Where we need to end up after the retains
|
||||||
}
|
const target = strIndex + op.length
|
||||||
|
// A previous retain could have overriden some tracking info. Now we
|
||||||
|
// need to restore it.
|
||||||
|
const previousChanges = previousState.trackedChanges.intersectRange(
|
||||||
|
new Range(strIndex, op.length)
|
||||||
|
)
|
||||||
|
|
||||||
for (const trackedChange of previousRanges) {
|
for (const change of previousChanges) {
|
||||||
if (strIndex < trackedChange.range.start) {
|
if (strIndex < change.range.start) {
|
||||||
inverse.retain(trackedChange.range.start - strIndex, {
|
inverse.retain(change.range.start - strIndex, {
|
||||||
tracking: removeTrackingInfoIfNeeded,
|
tracking: new ClearTrackingProps(),
|
||||||
|
})
|
||||||
|
strIndex = change.range.start
|
||||||
|
}
|
||||||
|
inverse.retain(change.range.length, {
|
||||||
|
tracking: change.tracking,
|
||||||
})
|
})
|
||||||
strIndex = trackedChange.range.start
|
strIndex += change.range.length
|
||||||
}
|
}
|
||||||
if (trackedChange.range.end < strIndex + op.length) {
|
if (strIndex < target) {
|
||||||
inverse.retain(trackedChange.range.length, {
|
inverse.retain(target - strIndex, {
|
||||||
tracking: trackedChange.tracking,
|
tracking: new ClearTrackingProps(),
|
||||||
})
|
})
|
||||||
strIndex = trackedChange.range.end
|
strIndex = target
|
||||||
}
|
}
|
||||||
if (trackedChange.range.end !== strIndex) {
|
} else {
|
||||||
// No need to split the range at the end
|
inverse.retain(op.length)
|
||||||
const [left] = trackedChange.range.splitAt(strIndex)
|
strIndex += op.length
|
||||||
inverse.retain(left.length, { tracking: trackedChange.tracking })
|
|
||||||
strIndex = left.end
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (strIndex < target) {
|
|
||||||
inverse.retain(target - strIndex, {
|
|
||||||
tracking: removeTrackingInfoIfNeeded,
|
|
||||||
})
|
|
||||||
strIndex = target
|
|
||||||
}
|
}
|
||||||
} else if (op instanceof InsertOp) {
|
} else if (op instanceof InsertOp) {
|
||||||
inverse.remove(op.insertion.length)
|
inverse.remove(op.insertion.length)
|
||||||
|
|
|
@ -86,10 +86,32 @@ class Range {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {Range} range
|
* Does this range overlap another range?
|
||||||
|
*
|
||||||
|
* Overlapping means that the two ranges have at least one character in common
|
||||||
|
*
|
||||||
|
* @param {Range} other - the other range
|
||||||
*/
|
*/
|
||||||
overlaps(range) {
|
overlaps(other) {
|
||||||
return this.start < range.end && this.end > range.start
|
return this.start < other.end && this.end > other.start
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Does this range overlap the start of another range?
|
||||||
|
*
|
||||||
|
* @param {Range} other - the other range
|
||||||
|
*/
|
||||||
|
overlapsStart(other) {
|
||||||
|
return this.start <= other.start && this.end > other.start
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Does this range overlap the end of another range?
|
||||||
|
*
|
||||||
|
* @param {Range} other - the other range
|
||||||
|
*/
|
||||||
|
overlapsEnd(other) {
|
||||||
|
return this.start < other.end && this.end >= other.end
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -227,6 +249,26 @@ class Range {
|
||||||
)
|
)
|
||||||
return [rangeUpToCursor, rangeAfterCursor]
|
return [rangeUpToCursor, rangeAfterCursor]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the intersection of this range with another range
|
||||||
|
*
|
||||||
|
* @param {Range} other - the other range
|
||||||
|
* @return {Range | null} the intersection or null if the intersection is empty
|
||||||
|
*/
|
||||||
|
intersect(other) {
|
||||||
|
if (this.contains(other)) {
|
||||||
|
return other
|
||||||
|
} else if (other.contains(this)) {
|
||||||
|
return this
|
||||||
|
} else if (other.overlapsStart(this)) {
|
||||||
|
return new Range(this.pos, other.end - this.start)
|
||||||
|
} else if (other.overlapsEnd(this)) {
|
||||||
|
return new Range(other.pos, this.end - other.start)
|
||||||
|
} else {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = Range
|
module.exports = Range
|
||||||
|
|
|
@ -193,4 +193,13 @@ describe('LazyStringFileData', function () {
|
||||||
expect(fileData.getStringLength()).to.equal(longString.length)
|
expect(fileData.getStringLength()).to.equal(longString.length)
|
||||||
expect(fileData.getOperations()).to.have.length(1)
|
expect(fileData.getOperations()).to.have.length(1)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it('truncates its operations after being stored', async function () {
|
||||||
|
const testHash = File.EMPTY_FILE_HASH
|
||||||
|
const fileData = new LazyStringFileData(testHash, undefined, 0)
|
||||||
|
fileData.edit(new TextOperation().insert('abc'))
|
||||||
|
const stored = await fileData.store(this.blobStore)
|
||||||
|
expect(fileData.hash).to.equal(stored.hash)
|
||||||
|
expect(fileData.operations).to.deep.equal([])
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
// @ts-check
|
|
||||||
'use strict'
|
'use strict'
|
||||||
|
|
||||||
const { expect } = require('chai')
|
const { expect } = require('chai')
|
||||||
|
@ -449,4 +448,44 @@ describe('Range', function () {
|
||||||
expect(() => range.insertAt(16, 3)).to.throw()
|
expect(() => range.insertAt(16, 3)).to.throw()
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
describe('intersect', function () {
|
||||||
|
it('should handle partially overlapping ranges', function () {
|
||||||
|
const range1 = new Range(5, 10)
|
||||||
|
const range2 = new Range(3, 6)
|
||||||
|
const intersection1 = range1.intersect(range2)
|
||||||
|
expect(intersection1.pos).to.equal(5)
|
||||||
|
expect(intersection1.length).to.equal(4)
|
||||||
|
const intersection2 = range2.intersect(range1)
|
||||||
|
expect(intersection2.pos).to.equal(5)
|
||||||
|
expect(intersection2.length).to.equal(4)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should intersect with itself', function () {
|
||||||
|
const range = new Range(5, 10)
|
||||||
|
const intersection = range.intersect(range)
|
||||||
|
expect(intersection.pos).to.equal(5)
|
||||||
|
expect(intersection.length).to.equal(10)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should handle nested ranges', function () {
|
||||||
|
const range1 = new Range(5, 10)
|
||||||
|
const range2 = new Range(7, 2)
|
||||||
|
const intersection1 = range1.intersect(range2)
|
||||||
|
expect(intersection1.pos).to.equal(7)
|
||||||
|
expect(intersection1.length).to.equal(2)
|
||||||
|
const intersection2 = range2.intersect(range1)
|
||||||
|
expect(intersection2.pos).to.equal(7)
|
||||||
|
expect(intersection2.length).to.equal(2)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should handle disconnected ranges', function () {
|
||||||
|
const range1 = new Range(5, 10)
|
||||||
|
const range2 = new Range(20, 30)
|
||||||
|
const intersection1 = range1.intersect(range2)
|
||||||
|
expect(intersection1).to.be.null
|
||||||
|
const intersection2 = range2.intersect(range1)
|
||||||
|
expect(intersection2).to.be.null
|
||||||
|
})
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -107,7 +107,7 @@ describe('RetainOp', function () {
|
||||||
expect(op1.equals(new RetainOp(3))).to.be.true
|
expect(op1.equals(new RetainOp(3))).to.be.true
|
||||||
})
|
})
|
||||||
|
|
||||||
it('cannot merge with another RetainOp if tracking info is different', function () {
|
it('cannot merge with another RetainOp if the tracking user is different', function () {
|
||||||
const op1 = new RetainOp(
|
const op1 = new RetainOp(
|
||||||
4,
|
4,
|
||||||
new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z'))
|
new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z'))
|
||||||
|
@ -120,14 +120,14 @@ describe('RetainOp', function () {
|
||||||
expect(() => op1.mergeWith(op2)).to.throw(Error)
|
expect(() => op1.mergeWith(op2)).to.throw(Error)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('can merge with another RetainOp if tracking info is the same', function () {
|
it('can merge with another RetainOp if the tracking user is the same', function () {
|
||||||
const op1 = new RetainOp(
|
const op1 = new RetainOp(
|
||||||
4,
|
4,
|
||||||
new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z'))
|
new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z'))
|
||||||
)
|
)
|
||||||
const op2 = new RetainOp(
|
const op2 = new RetainOp(
|
||||||
4,
|
4,
|
||||||
new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z'))
|
new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:01.000Z'))
|
||||||
)
|
)
|
||||||
op1.mergeWith(op2)
|
op1.mergeWith(op2)
|
||||||
expect(
|
expect(
|
||||||
|
@ -310,7 +310,7 @@ describe('InsertOp', function () {
|
||||||
expect(() => op1.mergeWith(op2)).to.throw(Error)
|
expect(() => op1.mergeWith(op2)).to.throw(Error)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('cannot merge with another InsertOp if tracking info is different', function () {
|
it('cannot merge with another InsertOp if tracking user is different', function () {
|
||||||
const op1 = new InsertOp(
|
const op1 = new InsertOp(
|
||||||
'a',
|
'a',
|
||||||
new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z'))
|
new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z'))
|
||||||
|
@ -323,7 +323,7 @@ describe('InsertOp', function () {
|
||||||
expect(() => op1.mergeWith(op2)).to.throw(Error)
|
expect(() => op1.mergeWith(op2)).to.throw(Error)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('can merge with another InsertOp if tracking and comment info is the same', function () {
|
it('can merge with another InsertOp if tracking user and comment info is the same', function () {
|
||||||
const op1 = new InsertOp(
|
const op1 = new InsertOp(
|
||||||
'a',
|
'a',
|
||||||
new TrackingProps(
|
new TrackingProps(
|
||||||
|
@ -338,7 +338,7 @@ describe('InsertOp', function () {
|
||||||
new TrackingProps(
|
new TrackingProps(
|
||||||
'insert',
|
'insert',
|
||||||
'user1',
|
'user1',
|
||||||
new Date('2024-01-01T00:00:00.000Z')
|
new Date('2024-01-01T00:00:01.000Z')
|
||||||
),
|
),
|
||||||
['1', '2']
|
['1', '2']
|
||||||
)
|
)
|
||||||
|
|
|
@ -322,6 +322,47 @@ describe('TextOperation', function () {
|
||||||
new TextOperation().retain(4).remove(4).retain(3)
|
new TextOperation().retain(4).remove(4).retain(3)
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it('undoing a tracked delete restores the tracked changes', function () {
|
||||||
|
expectInverseToLeadToInitialState(
|
||||||
|
new StringFileData(
|
||||||
|
'the quick brown fox jumps over the lazy dog',
|
||||||
|
undefined,
|
||||||
|
[
|
||||||
|
{
|
||||||
|
range: { pos: 5, length: 5 },
|
||||||
|
tracking: {
|
||||||
|
ts: '2023-01-01T00:00:00.000Z',
|
||||||
|
type: 'insert',
|
||||||
|
userId: 'user1',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
range: { pos: 12, length: 3 },
|
||||||
|
tracking: {
|
||||||
|
ts: '2023-01-01T00:00:00.000Z',
|
||||||
|
type: 'delete',
|
||||||
|
userId: 'user1',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
range: { pos: 18, length: 5 },
|
||||||
|
tracking: {
|
||||||
|
ts: '2023-01-01T00:00:00.000Z',
|
||||||
|
type: 'insert',
|
||||||
|
userId: 'user1',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
]
|
||||||
|
),
|
||||||
|
new TextOperation()
|
||||||
|
.retain(7)
|
||||||
|
.retain(13, {
|
||||||
|
tracking: new TrackingProps('delete', 'user1', new Date()),
|
||||||
|
})
|
||||||
|
.retain(23)
|
||||||
|
)
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('compose', function () {
|
describe('compose', function () {
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
promise-utils
|
promise-utils
|
||||||
--dependencies=None
|
--dependencies=None
|
||||||
--docker-repos=gcr.io/overleaf-ops
|
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
|
||||||
--env-add=
|
--env-add=
|
||||||
--env-pass-through=
|
--env-pass-through=
|
||||||
--esmock-loader=False
|
--esmock-loader=False
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
ranges-tracker
|
ranges-tracker
|
||||||
--dependencies=None
|
--dependencies=None
|
||||||
--docker-repos=gcr.io/overleaf-ops
|
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
|
||||||
--env-add=
|
--env-add=
|
||||||
--env-pass-through=
|
--env-pass-through=
|
||||||
--esmock-loader=False
|
--esmock-loader=False
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
redis-wrapper
|
redis-wrapper
|
||||||
--dependencies=None
|
--dependencies=None
|
||||||
--docker-repos=gcr.io/overleaf-ops
|
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
|
||||||
--env-add=
|
--env-add=
|
||||||
--env-pass-through=
|
--env-pass-through=
|
||||||
--esmock-loader=False
|
--esmock-loader=False
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
settings
|
settings
|
||||||
--dependencies=None
|
--dependencies=None
|
||||||
--docker-repos=gcr.io/overleaf-ops
|
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
|
||||||
--env-add=
|
--env-add=
|
||||||
--env-pass-through=
|
--env-pass-through=
|
||||||
--esmock-loader=False
|
--esmock-loader=False
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
stream-utils
|
stream-utils
|
||||||
--dependencies=None
|
--dependencies=None
|
||||||
--docker-repos=gcr.io/overleaf-ops
|
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
|
||||||
--env-add=
|
--env-add=
|
||||||
--env-pass-through=
|
--env-pass-through=
|
||||||
--esmock-loader=False
|
--esmock-loader=False
|
||||||
|
|
1717
package-lock.json
generated
1717
package-lock.json
generated
File diff suppressed because it is too large
Load diff
|
@ -37,7 +37,7 @@
|
||||||
},
|
},
|
||||||
"swagger-tools": {
|
"swagger-tools": {
|
||||||
"body-parser": "1.20.3",
|
"body-parser": "1.20.3",
|
||||||
"multer": "2.0.0",
|
"multer": "2.0.1",
|
||||||
"path-to-regexp": "3.3.0",
|
"path-to-regexp": "3.3.0",
|
||||||
"qs": "6.13.0"
|
"qs": "6.13.0"
|
||||||
}
|
}
|
||||||
|
|
23
patches/@node-saml+node-saml+4.0.5.patch
Normal file
23
patches/@node-saml+node-saml+4.0.5.patch
Normal file
|
@ -0,0 +1,23 @@
|
||||||
|
diff --git a/node_modules/@node-saml/node-saml/lib/saml.js b/node_modules/@node-saml/node-saml/lib/saml.js
|
||||||
|
index fba15b9..a5778cb 100644
|
||||||
|
--- a/node_modules/@node-saml/node-saml/lib/saml.js
|
||||||
|
+++ b/node_modules/@node-saml/node-saml/lib/saml.js
|
||||||
|
@@ -336,7 +336,8 @@ class SAML {
|
||||||
|
const requestOrResponse = request || response;
|
||||||
|
(0, utility_1.assertRequired)(requestOrResponse, "either request or response is required");
|
||||||
|
let buffer;
|
||||||
|
- if (this.options.skipRequestCompression) {
|
||||||
|
+ // logout requestOrResponse must be compressed anyway
|
||||||
|
+ if (this.options.skipRequestCompression && operation !== "logout") {
|
||||||
|
buffer = Buffer.from(requestOrResponse, "utf8");
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
@@ -495,7 +496,7 @@ class SAML {
|
||||||
|
try {
|
||||||
|
xml = Buffer.from(container.SAMLResponse, "base64").toString("utf8");
|
||||||
|
doc = await (0, xml_1.parseDomFromString)(xml);
|
||||||
|
- const inResponseToNodes = xml_1.xpath.selectAttributes(doc, "/*[local-name()='Response']/@InResponseTo");
|
||||||
|
+ const inResponseToNodes = xml_1.xpath.selectAttributes(doc, "/*[local-name()='Response' or local-name()='LogoutResponse']/@InResponseTo");
|
||||||
|
if (inResponseToNodes) {
|
||||||
|
inResponseTo = inResponseToNodes.length ? inResponseToNodes[0].nodeValue : null;
|
||||||
|
await this.validateInResponseTo(inResponseTo);
|
64
patches/ldapauth-fork+4.3.3.patch
Normal file
64
patches/ldapauth-fork+4.3.3.patch
Normal file
|
@ -0,0 +1,64 @@
|
||||||
|
diff --git a/node_modules/ldapauth-fork/lib/ldapauth.js b/node_modules/ldapauth-fork/lib/ldapauth.js
|
||||||
|
index 85ecf36a8b..a7d07e0f78 100644
|
||||||
|
--- a/node_modules/ldapauth-fork/lib/ldapauth.js
|
||||||
|
+++ b/node_modules/ldapauth-fork/lib/ldapauth.js
|
||||||
|
@@ -69,6 +69,7 @@ function LdapAuth(opts) {
|
||||||
|
this.opts.bindProperty || (this.opts.bindProperty = 'dn');
|
||||||
|
this.opts.groupSearchScope || (this.opts.groupSearchScope = 'sub');
|
||||||
|
this.opts.groupDnProperty || (this.opts.groupDnProperty = 'dn');
|
||||||
|
+ this.opts.tlsStarted = false;
|
||||||
|
|
||||||
|
EventEmitter.call(this);
|
||||||
|
|
||||||
|
@@ -108,21 +109,7 @@ function LdapAuth(opts) {
|
||||||
|
this._userClient.on('error', this._handleError.bind(this));
|
||||||
|
|
||||||
|
var self = this;
|
||||||
|
- if (this.opts.starttls) {
|
||||||
|
- // When starttls is enabled, this callback supplants the 'connect' callback
|
||||||
|
- this._adminClient.starttls(this.opts.tlsOptions, this._adminClient.controls, function(err) {
|
||||||
|
- if (err) {
|
||||||
|
- self._handleError(err);
|
||||||
|
- } else {
|
||||||
|
- self._onConnectAdmin();
|
||||||
|
- }
|
||||||
|
- });
|
||||||
|
- this._userClient.starttls(this.opts.tlsOptions, this._userClient.controls, function(err) {
|
||||||
|
- if (err) {
|
||||||
|
- self._handleError(err);
|
||||||
|
- }
|
||||||
|
- });
|
||||||
|
- } else if (opts.reconnect) {
|
||||||
|
+ if (opts.reconnect && !this.opts.starttls) {
|
||||||
|
this.once('_installReconnectListener', function() {
|
||||||
|
self.log && self.log.trace('install reconnect listener');
|
||||||
|
self._adminClient.on('connect', function() {
|
||||||
|
@@ -384,6 +371,28 @@ LdapAuth.prototype._findGroups = function(user, callback) {
|
||||||
|
*/
|
||||||
|
LdapAuth.prototype.authenticate = function(username, password, callback) {
|
||||||
|
var self = this;
|
||||||
|
+ if (this.opts.starttls && !this.opts.tlsStarted) {
|
||||||
|
+ // When starttls is enabled, this callback supplants the 'connect' callback
|
||||||
|
+ this._adminClient.starttls(this.opts.tlsOptions, this._adminClient.controls, function (err) {
|
||||||
|
+ if (err) {
|
||||||
|
+ self._handleError(err);
|
||||||
|
+ } else {
|
||||||
|
+ self._onConnectAdmin(function(){self._handleAuthenticate(username, password, callback);});
|
||||||
|
+ }
|
||||||
|
+ });
|
||||||
|
+ this._userClient.starttls(this.opts.tlsOptions, this._userClient.controls, function (err) {
|
||||||
|
+ if (err) {
|
||||||
|
+ self._handleError(err);
|
||||||
|
+ }
|
||||||
|
+ });
|
||||||
|
+ } else {
|
||||||
|
+ self._handleAuthenticate(username, password, callback);
|
||||||
|
+ }
|
||||||
|
+};
|
||||||
|
+
|
||||||
|
+LdapAuth.prototype._handleAuthenticate = function (username, password, callback) {
|
||||||
|
+ this.opts.tlsStarted = true;
|
||||||
|
+ var self = this;
|
||||||
|
|
||||||
|
if (typeof password === 'undefined' || password === null || password === '') {
|
||||||
|
return callback(new Error('no password given'));
|
|
@ -24,6 +24,7 @@ build-base:
|
||||||
--cache-from $(OVERLEAF_BASE_BRANCH) \
|
--cache-from $(OVERLEAF_BASE_BRANCH) \
|
||||||
--tag $(OVERLEAF_BASE_TAG) \
|
--tag $(OVERLEAF_BASE_TAG) \
|
||||||
--tag $(OVERLEAF_BASE_BRANCH) \
|
--tag $(OVERLEAF_BASE_BRANCH) \
|
||||||
|
--network=host \
|
||||||
$(MONOREPO_ROOT)
|
$(MONOREPO_ROOT)
|
||||||
|
|
||||||
|
|
||||||
|
@ -39,6 +40,7 @@ build-community:
|
||||||
--file Dockerfile \
|
--file Dockerfile \
|
||||||
--tag $(OVERLEAF_TAG) \
|
--tag $(OVERLEAF_TAG) \
|
||||||
--tag $(OVERLEAF_BRANCH) \
|
--tag $(OVERLEAF_BRANCH) \
|
||||||
|
--network=host \
|
||||||
$(MONOREPO_ROOT)
|
$(MONOREPO_ROOT)
|
||||||
|
|
||||||
SHELLCHECK_OPTS = \
|
SHELLCHECK_OPTS = \
|
||||||
|
|
|
@ -9,5 +9,6 @@ export HISTORY_V1_HOST=127.0.0.1
|
||||||
export NOTIFICATIONS_HOST=127.0.0.1
|
export NOTIFICATIONS_HOST=127.0.0.1
|
||||||
export PROJECT_HISTORY_HOST=127.0.0.1
|
export PROJECT_HISTORY_HOST=127.0.0.1
|
||||||
export REALTIME_HOST=127.0.0.1
|
export REALTIME_HOST=127.0.0.1
|
||||||
|
export REFERENCES_HOST=127.0.0.1
|
||||||
export WEB_HOST=127.0.0.1
|
export WEB_HOST=127.0.0.1
|
||||||
export WEB_API_HOST=127.0.0.1
|
export WEB_API_HOST=127.0.0.1
|
||||||
|
|
28
server-ce/hotfix/5.5.1/Dockerfile
Normal file
28
server-ce/hotfix/5.5.1/Dockerfile
Normal file
|
@ -0,0 +1,28 @@
|
||||||
|
FROM sharelatex/sharelatex:5.5.0
|
||||||
|
|
||||||
|
|
||||||
|
# fix tls configuration in redis for history-v1
|
||||||
|
COPY pr_25168.patch .
|
||||||
|
RUN patch -p1 < pr_25168.patch && rm pr_25168.patch
|
||||||
|
|
||||||
|
# improve logging in history system
|
||||||
|
COPY pr_26086.patch .
|
||||||
|
RUN patch -p1 < pr_26086.patch && rm pr_26086.patch
|
||||||
|
|
||||||
|
# fix create-user.mjs script
|
||||||
|
COPY pr_26152.patch .
|
||||||
|
RUN patch -p1 < pr_26152.patch && rm pr_26152.patch
|
||||||
|
|
||||||
|
# check mongo featureCompatibilityVersion
|
||||||
|
COPY pr_26091.patch .
|
||||||
|
RUN patch -p1 < pr_26091.patch && rm pr_26091.patch
|
||||||
|
|
||||||
|
# update multer and tar-fs
|
||||||
|
RUN sed -i 's/"multer": "2.0.0"/"multer": "2.0.1"/g' package.json
|
||||||
|
RUN sed -i 's/"dockerode": "^4.0.5"/"dockerode": "^4.0.7"/g' services/clsi/package.json
|
||||||
|
RUN sed -i 's/"tar-fs": "^3.0.4"/"tar-fs": "^3.0.9"/g' services/clsi/package.json
|
||||||
|
RUN sed -i 's/199c5ff05bd375c508f4074498237baead7f5148/4dbceda355efc3fc8ac3cf5c66c3778c8a6fdb23/g' services/web/package.json
|
||||||
|
COPY package-lock.json.diff .
|
||||||
|
RUN patch package-lock.json < package-lock.json.diff
|
||||||
|
RUN npm install --omit=dev
|
||||||
|
RUN npm install @paralleldrive/cuid2@2.2.2 -w services/history-v1
|
2202
server-ce/hotfix/5.5.1/package-lock.json.diff
Normal file
2202
server-ce/hotfix/5.5.1/package-lock.json.diff
Normal file
File diff suppressed because it is too large
Load diff
19
server-ce/hotfix/5.5.1/pr_25168.patch
Normal file
19
server-ce/hotfix/5.5.1/pr_25168.patch
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
--- a/services/history-v1/config/custom-environment-variables.json
|
||||||
|
+++ b/services/history-v1/config/custom-environment-variables.json
|
||||||
|
@@ -50,12 +50,14 @@
|
||||||
|
"history": {
|
||||||
|
"host": "OVERLEAF_REDIS_HOST",
|
||||||
|
"password": "OVERLEAF_REDIS_PASS",
|
||||||
|
- "port": "OVERLEAF_REDIS_PORT"
|
||||||
|
+ "port": "OVERLEAF_REDIS_PORT",
|
||||||
|
+ "tls": "OVERLEAF_REDIS_TLS"
|
||||||
|
},
|
||||||
|
"lock": {
|
||||||
|
"host": "OVERLEAF_REDIS_HOST",
|
||||||
|
"password": "OVERLEAF_REDIS_PASS",
|
||||||
|
- "port": "OVERLEAF_REDIS_PORT"
|
||||||
|
+ "port": "OVERLEAF_REDIS_PORT",
|
||||||
|
+ "tls": "OVERLEAF_REDIS_TLS"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
200
server-ce/hotfix/5.5.1/pr_26086.patch
Normal file
200
server-ce/hotfix/5.5.1/pr_26086.patch
Normal file
|
@ -0,0 +1,200 @@
|
||||||
|
--- a/services/history-v1/api/controllers/project_import.js
|
||||||
|
+++ b/services/history-v1/api/controllers/project_import.js
|
||||||
|
@@ -35,6 +35,7 @@ async function importSnapshot(req, res) {
|
||||||
|
try {
|
||||||
|
snapshot = Snapshot.fromRaw(rawSnapshot)
|
||||||
|
} catch (err) {
|
||||||
|
+ logger.warn({ err, projectId }, 'failed to import snapshot')
|
||||||
|
return render.unprocessableEntity(res)
|
||||||
|
}
|
||||||
|
|
||||||
|
@@ -43,6 +44,7 @@ async function importSnapshot(req, res) {
|
||||||
|
historyId = await chunkStore.initializeProject(projectId, snapshot)
|
||||||
|
} catch (err) {
|
||||||
|
if (err instanceof chunkStore.AlreadyInitialized) {
|
||||||
|
+ logger.warn({ err, projectId }, 'already initialized')
|
||||||
|
return render.conflict(res)
|
||||||
|
} else {
|
||||||
|
throw err
|
||||||
|
--- a/services/history-v1/api/controllers/projects.js
|
||||||
|
+++ b/services/history-v1/api/controllers/projects.js
|
||||||
|
@@ -34,6 +34,7 @@ async function initializeProject(req, res, next) {
|
||||||
|
res.status(HTTPStatus.OK).json({ projectId })
|
||||||
|
} catch (err) {
|
||||||
|
if (err instanceof chunkStore.AlreadyInitialized) {
|
||||||
|
+ logger.warn({ err, projectId }, 'failed to initialize')
|
||||||
|
render.conflict(res)
|
||||||
|
} else {
|
||||||
|
throw err
|
||||||
|
@@ -242,11 +243,15 @@ async function createProjectBlob(req, res, next) {
|
||||||
|
const sizeLimit = new StreamSizeLimit(maxUploadSize)
|
||||||
|
await pipeline(req, sizeLimit, fs.createWriteStream(tmpPath))
|
||||||
|
if (sizeLimit.sizeLimitExceeded) {
|
||||||
|
+ logger.warn(
|
||||||
|
+ { projectId, expectedHash, maxUploadSize },
|
||||||
|
+ 'blob exceeds size threshold'
|
||||||
|
+ )
|
||||||
|
return render.requestEntityTooLarge(res)
|
||||||
|
}
|
||||||
|
const hash = await blobHash.fromFile(tmpPath)
|
||||||
|
if (hash !== expectedHash) {
|
||||||
|
- logger.debug({ hash, expectedHash }, 'Hash mismatch')
|
||||||
|
+ logger.warn({ projectId, hash, expectedHash }, 'Hash mismatch')
|
||||||
|
return render.conflict(res, 'File hash mismatch')
|
||||||
|
}
|
||||||
|
|
||||||
|
@@ -343,6 +348,10 @@ async function copyProjectBlob(req, res, next) {
|
||||||
|
targetBlobStore.getBlob(blobHash),
|
||||||
|
])
|
||||||
|
if (!sourceBlob) {
|
||||||
|
+ logger.warn(
|
||||||
|
+ { sourceProjectId, targetProjectId, blobHash },
|
||||||
|
+ 'missing source blob when copying across projects'
|
||||||
|
+ )
|
||||||
|
return render.notFound(res)
|
||||||
|
}
|
||||||
|
// Exit early if the blob exists in the target project.
|
||||||
|
--- a/services/history-v1/app.js
|
||||||
|
+++ b/services/history-v1/app.js
|
||||||
|
@@ -100,11 +100,13 @@ function setupErrorHandling() {
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if (err.code === 'ENUM_MISMATCH') {
|
||||||
|
+ logger.warn({ err, projectId }, err.message)
|
||||||
|
return res.status(HTTPStatus.UNPROCESSABLE_ENTITY).json({
|
||||||
|
message: 'invalid enum value: ' + err.paramName,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if (err.code === 'REQUIRED') {
|
||||||
|
+ logger.warn({ err, projectId }, err.message)
|
||||||
|
return res.status(HTTPStatus.UNPROCESSABLE_ENTITY).json({
|
||||||
|
message: err.message,
|
||||||
|
})
|
||||||
|
--- a/services/project-history/app/js/HistoryStoreManager.js
|
||||||
|
+++ b/services/project-history/app/js/HistoryStoreManager.js
|
||||||
|
@@ -35,7 +35,10 @@ class StringStream extends stream.Readable {
|
||||||
|
_mocks.getMostRecentChunk = (projectId, historyId, callback) => {
|
||||||
|
const path = `projects/${historyId}/latest/history`
|
||||||
|
logger.debug({ projectId, historyId }, 'getting chunk from history service')
|
||||||
|
- _requestChunk({ path, json: true }, callback)
|
||||||
|
+ _requestChunk({ path, json: true }, (err, chunk) => {
|
||||||
|
+ if (err) return callback(OError.tag(err))
|
||||||
|
+ callback(null, chunk)
|
||||||
|
+ })
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
@@ -54,7 +57,10 @@ export function getChunkAtVersion(projectId, historyId, version, callback) {
|
||||||
|
{ projectId, historyId, version },
|
||||||
|
'getting chunk from history service for version'
|
||||||
|
)
|
||||||
|
- _requestChunk({ path, json: true }, callback)
|
||||||
|
+ _requestChunk({ path, json: true }, (err, chunk) => {
|
||||||
|
+ if (err) return callback(OError.tag(err))
|
||||||
|
+ callback(null, chunk)
|
||||||
|
+ })
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getMostRecentVersion(projectId, historyId, callback) {
|
||||||
|
@@ -68,8 +74,10 @@ export function getMostRecentVersion(projectId, historyId, callback) {
|
||||||
|
_.sortBy(chunk.chunk.history.changes || [], x => x.timestamp)
|
||||||
|
)
|
||||||
|
// find the latest project and doc versions in the chunk
|
||||||
|
- _getLatestProjectVersion(projectId, chunk, (err1, projectVersion) =>
|
||||||
|
+ _getLatestProjectVersion(projectId, chunk, (err1, projectVersion) => {
|
||||||
|
+ if (err1) err1 = OError.tag(err1)
|
||||||
|
_getLatestV2DocVersions(projectId, chunk, (err2, v2DocVersions) => {
|
||||||
|
+ if (err2) err2 = OError.tag(err2)
|
||||||
|
// return the project and doc versions
|
||||||
|
const projectStructureAndDocVersions = {
|
||||||
|
project: projectVersion,
|
||||||
|
@@ -83,7 +91,7 @@ export function getMostRecentVersion(projectId, historyId, callback) {
|
||||||
|
chunk
|
||||||
|
)
|
||||||
|
})
|
||||||
|
- )
|
||||||
|
+ })
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
@@ -211,7 +219,10 @@ export function getProjectBlob(historyId, blobHash, callback) {
|
||||||
|
logger.debug({ historyId, blobHash }, 'getting blob from history service')
|
||||||
|
_requestHistoryService(
|
||||||
|
{ path: `projects/${historyId}/blobs/${blobHash}` },
|
||||||
|
- callback
|
||||||
|
+ (err, blob) => {
|
||||||
|
+ if (err) return callback(OError.tag(err))
|
||||||
|
+ callback(null, blob)
|
||||||
|
+ }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
@@ -277,7 +288,10 @@ function createBlobFromString(historyId, data, fileId, callback) {
|
||||||
|
(fsPath, cb) => {
|
||||||
|
_createBlob(historyId, fsPath, cb)
|
||||||
|
},
|
||||||
|
- callback
|
||||||
|
+ (err, hash) => {
|
||||||
|
+ if (err) return callback(OError.tag(err))
|
||||||
|
+ callback(null, hash)
|
||||||
|
+ }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
@@ -330,7 +344,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) {
|
||||||
|
try {
|
||||||
|
ranges = HistoryBlobTranslator.createRangeBlobDataFromUpdate(update)
|
||||||
|
} catch (error) {
|
||||||
|
- return callback(error)
|
||||||
|
+ return callback(OError.tag(error))
|
||||||
|
}
|
||||||
|
createBlobFromString(
|
||||||
|
historyId,
|
||||||
|
@@ -338,7 +352,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) {
|
||||||
|
`project-${projectId}-doc-${update.doc}`,
|
||||||
|
(err, fileHash) => {
|
||||||
|
if (err) {
|
||||||
|
- return callback(err)
|
||||||
|
+ return callback(OError.tag(err))
|
||||||
|
}
|
||||||
|
if (ranges) {
|
||||||
|
createBlobFromString(
|
||||||
|
@@ -347,7 +361,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) {
|
||||||
|
`project-${projectId}-doc-${update.doc}-ranges`,
|
||||||
|
(err, rangesHash) => {
|
||||||
|
if (err) {
|
||||||
|
- return callback(err)
|
||||||
|
+ return callback(OError.tag(err))
|
||||||
|
}
|
||||||
|
logger.debug(
|
||||||
|
{ fileHash, rangesHash },
|
||||||
|
@@ -415,7 +429,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) {
|
||||||
|
},
|
||||||
|
(err, fileHash) => {
|
||||||
|
if (err) {
|
||||||
|
- return callback(err)
|
||||||
|
+ return callback(OError.tag(err))
|
||||||
|
}
|
||||||
|
if (update.hash && update.hash !== fileHash) {
|
||||||
|
logger.warn(
|
||||||
|
@@ -447,7 +461,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) {
|
||||||
|
},
|
||||||
|
(err, fileHash) => {
|
||||||
|
if (err) {
|
||||||
|
- return callback(err)
|
||||||
|
+ return callback(OError.tag(err))
|
||||||
|
}
|
||||||
|
logger.debug({ fileHash }, 'created empty blob for file')
|
||||||
|
callback(null, { file: fileHash })
|
||||||
|
@@ -520,7 +534,10 @@ export function initializeProject(historyId, callback) {
|
||||||
|
export function deleteProject(projectId, callback) {
|
||||||
|
_requestHistoryService(
|
||||||
|
{ method: 'DELETE', path: `projects/${projectId}` },
|
||||||
|
- callback
|
||||||
|
+ err => {
|
||||||
|
+ if (err) return callback(OError.tag(err))
|
||||||
|
+ callback(null)
|
||||||
|
+ }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
60
server-ce/hotfix/5.5.1/pr_26091.patch
Normal file
60
server-ce/hotfix/5.5.1/pr_26091.patch
Normal file
|
@ -0,0 +1,60 @@
|
||||||
|
--- a/services/web/modules/server-ce-scripts/scripts/check-mongodb.mjs
|
||||||
|
+++ b/services/web/modules/server-ce-scripts/scripts/check-mongodb.mjs
|
||||||
|
@@ -7,6 +7,7 @@ import {
|
||||||
|
const { ObjectId } = mongodb
|
||||||
|
|
||||||
|
const MIN_MONGO_VERSION = [6, 0]
|
||||||
|
+const MIN_MONGO_FEATURE_COMPATIBILITY_VERSION = [6, 0]
|
||||||
|
|
||||||
|
async function main() {
|
||||||
|
let mongoClient
|
||||||
|
@@ -18,6 +19,7 @@ async function main() {
|
||||||
|
}
|
||||||
|
|
||||||
|
await checkMongoVersion(mongoClient)
|
||||||
|
+ await checkFeatureCompatibilityVersion(mongoClient)
|
||||||
|
|
||||||
|
try {
|
||||||
|
await testTransactions(mongoClient)
|
||||||
|
@@ -53,6 +55,41 @@ async function checkMongoVersion(mongoClient) {
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
+async function checkFeatureCompatibilityVersion(mongoClient) {
|
||||||
|
+ const {
|
||||||
|
+ featureCompatibilityVersion: { version },
|
||||||
|
+ } = await mongoClient
|
||||||
|
+ .db()
|
||||||
|
+ .admin()
|
||||||
|
+ .command({ getParameter: 1, featureCompatibilityVersion: 1 })
|
||||||
|
+ const [major, minor] = version.split('.').map(v => parseInt(v))
|
||||||
|
+ const [minMajor, minMinor] = MIN_MONGO_FEATURE_COMPATIBILITY_VERSION
|
||||||
|
+
|
||||||
|
+ if (major < minMajor || (major === minMajor && minor < minMinor)) {
|
||||||
|
+ const minVersion = MIN_MONGO_FEATURE_COMPATIBILITY_VERSION.join('.')
|
||||||
|
+ console.error(`
|
||||||
|
+The MongoDB server has featureCompatibilityVersion=${version}, but Overleaf requires at least version ${minVersion}.
|
||||||
|
+
|
||||||
|
+Open a mongo shell:
|
||||||
|
+- Overleaf Toolkit deployments: $ bin/mongo
|
||||||
|
+- Legacy docker-compose.yml deployments: $ docker exec -it mongo mongosh localhost/sharelatex
|
||||||
|
+
|
||||||
|
+In the mongo shell:
|
||||||
|
+> db.adminCommand( { setFeatureCompatibilityVersion: "${minMajor}.${minMinor}" } )
|
||||||
|
+
|
||||||
|
+Verify the new value:
|
||||||
|
+> db.adminCommand( { getParameter: 1, featureCompatibilityVersion: 1 } )
|
||||||
|
+ ...
|
||||||
|
+ {
|
||||||
|
+ featureCompatibilityVersion: { version: ${minMajor}.${minMinor}' },
|
||||||
|
+...
|
||||||
|
+
|
||||||
|
+Aborting.
|
||||||
|
+`)
|
||||||
|
+ process.exit(1)
|
||||||
|
+ }
|
||||||
|
+}
|
||||||
|
+
|
||||||
|
main()
|
||||||
|
.then(() => {
|
||||||
|
console.error('Mongodb is up.')
|
16
server-ce/hotfix/5.5.1/pr_26152.patch
Normal file
16
server-ce/hotfix/5.5.1/pr_26152.patch
Normal file
|
@ -0,0 +1,16 @@
|
||||||
|
--- a/services/web/modules/server-ce-scripts/scripts/create-user.mjs
|
||||||
|
+++ b/services/web/modules/server-ce-scripts/scripts/create-user.mjs
|
||||||
|
@@ -48,3 +48,13 @@ Please visit the following URL to set a password for ${email} and log in:
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
+
|
||||||
|
+if (filename === process.argv[1]) {
|
||||||
|
+ try {
|
||||||
|
+ await main()
|
||||||
|
+ process.exit(0)
|
||||||
|
+ } catch (error) {
|
||||||
|
+ console.error({ error })
|
||||||
|
+ process.exit(1)
|
||||||
|
+ }
|
||||||
|
+}
|
12
server-ce/runit/references-overleaf/run
Executable file
12
server-ce/runit/references-overleaf/run
Executable file
|
@ -0,0 +1,12 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
NODE_PARAMS=""
|
||||||
|
if [ "$DEBUG_NODE" == "true" ]; then
|
||||||
|
echo "running debug - references"
|
||||||
|
NODE_PARAMS="--inspect=0.0.0.0:30560"
|
||||||
|
fi
|
||||||
|
|
||||||
|
source /etc/overleaf/env.sh
|
||||||
|
export LISTEN_ADDRESS=127.0.0.1
|
||||||
|
|
||||||
|
exec /sbin/setuser www-data /usr/bin/node $NODE_PARAMS /overleaf/services/references/app.js >> /var/log/overleaf/references.log 2>&1
|
|
@ -29,6 +29,9 @@ module.exports = [
|
||||||
{
|
{
|
||||||
name: 'project-history',
|
name: 'project-history',
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
name: 'references',
|
||||||
|
},
|
||||||
{
|
{
|
||||||
name: 'history-v1',
|
name: 'history-v1',
|
||||||
},
|
},
|
||||||
|
|
|
@ -6,8 +6,8 @@ all: test-e2e
|
||||||
# Notable the container labels com.docker.compose.project.working_dir and com.docker.compose.project.config_files need to match when creating containers from the docker host (how you started things) and from host-admin (how tests reconfigure the instance).
|
# Notable the container labels com.docker.compose.project.working_dir and com.docker.compose.project.config_files need to match when creating containers from the docker host (how you started things) and from host-admin (how tests reconfigure the instance).
|
||||||
export PWD = $(shell pwd)
|
export PWD = $(shell pwd)
|
||||||
|
|
||||||
export TEX_LIVE_DOCKER_IMAGE ?= gcr.io/overleaf-ops/texlive-full:2023.1
|
export TEX_LIVE_DOCKER_IMAGE ?= us-east1-docker.pkg.dev/overleaf-ops/ol-docker/texlive-full:2023.1
|
||||||
export ALL_TEX_LIVE_DOCKER_IMAGES ?= gcr.io/overleaf-ops/texlive-full:2023.1,gcr.io/overleaf-ops/texlive-full:2022.1
|
export ALL_TEX_LIVE_DOCKER_IMAGES ?= us-east1-docker.pkg.dev/overleaf-ops/ol-docker/texlive-full:2023.1,us-east1-docker.pkg.dev/overleaf-ops/ol-docker/texlive-full:2022.1
|
||||||
export IMAGE_TAG_PRO ?= us-east1-docker.pkg.dev/overleaf-ops/ol-docker/pro:latest
|
export IMAGE_TAG_PRO ?= us-east1-docker.pkg.dev/overleaf-ops/ol-docker/pro:latest
|
||||||
export CYPRESS_SHARD ?=
|
export CYPRESS_SHARD ?=
|
||||||
export COMPOSE_PROJECT_NAME ?= test
|
export COMPOSE_PROJECT_NAME ?= test
|
||||||
|
@ -20,6 +20,7 @@ test-e2e-native:
|
||||||
npm run cypress:open
|
npm run cypress:open
|
||||||
|
|
||||||
test-e2e:
|
test-e2e:
|
||||||
|
docker compose build host-admin
|
||||||
docker compose up --no-log-prefix --exit-code-from=e2e e2e
|
docker compose up --no-log-prefix --exit-code-from=e2e e2e
|
||||||
|
|
||||||
test-e2e-open:
|
test-e2e-open:
|
||||||
|
@ -45,7 +46,7 @@ prefetch_custom_compose_pull:
|
||||||
prefetch_custom: prefetch_custom_texlive
|
prefetch_custom: prefetch_custom_texlive
|
||||||
prefetch_custom_texlive:
|
prefetch_custom_texlive:
|
||||||
echo "$$ALL_TEX_LIVE_DOCKER_IMAGES" | tr ',' '\n' | xargs -I% \
|
echo "$$ALL_TEX_LIVE_DOCKER_IMAGES" | tr ',' '\n' | xargs -I% \
|
||||||
sh -exc 'tag=%; re_tag=quay.io/sharelatex/$${tag#*/*/}; docker pull $$tag; docker tag $$tag $$re_tag'
|
sh -exc 'tag=%; re_tag=quay.io/sharelatex/$${tag#*/*/*/}; docker pull $$tag; docker tag $$tag $$re_tag'
|
||||||
|
|
||||||
prefetch_custom: prefetch_old
|
prefetch_custom: prefetch_old
|
||||||
prefetch_old:
|
prefetch_old:
|
||||||
|
|
|
@ -179,6 +179,21 @@ describe('admin panel', function () {
|
||||||
cy.get('nav').findByText('Manage Users').click()
|
cy.get('nav').findByText('Manage Users').click()
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it('displays expected tabs', () => {
|
||||||
|
const tabs = ['Users', 'License Usage']
|
||||||
|
cy.get('[role="tab"]').each((el, index) => {
|
||||||
|
cy.wrap(el).findByText(tabs[index]).click()
|
||||||
|
})
|
||||||
|
cy.get('[role="tab"]').should('have.length', tabs.length)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('license usage tab', () => {
|
||||||
|
cy.get('a').contains('License Usage').click()
|
||||||
|
cy.findByText(
|
||||||
|
'An active user is one who has opened a project in this Server Pro instance in the last 12 months.'
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
describe('create users', () => {
|
describe('create users', () => {
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
cy.get('a').contains('New User').click()
|
cy.get('a').contains('New User').click()
|
||||||
|
|
|
@ -20,7 +20,7 @@ services:
|
||||||
OVERLEAF_EMAIL_SMTP_HOST: 'mailtrap'
|
OVERLEAF_EMAIL_SMTP_HOST: 'mailtrap'
|
||||||
OVERLEAF_EMAIL_SMTP_PORT: '25'
|
OVERLEAF_EMAIL_SMTP_PORT: '25'
|
||||||
OVERLEAF_EMAIL_SMTP_IGNORE_TLS: 'true'
|
OVERLEAF_EMAIL_SMTP_IGNORE_TLS: 'true'
|
||||||
ENABLED_LINKED_FILE_TYPES: 'project_file,project_output_file'
|
ENABLED_LINKED_FILE_TYPES: 'project_file,project_output_file,url'
|
||||||
ENABLE_CONVERSIONS: 'true'
|
ENABLE_CONVERSIONS: 'true'
|
||||||
EMAIL_CONFIRMATION_DISABLED: 'true'
|
EMAIL_CONFIRMATION_DISABLED: 'true'
|
||||||
healthcheck:
|
healthcheck:
|
||||||
|
@ -131,7 +131,7 @@ services:
|
||||||
|
|
||||||
saml:
|
saml:
|
||||||
restart: always
|
restart: always
|
||||||
image: gcr.io/overleaf-ops/saml-test
|
image: us-east1-docker.pkg.dev/overleaf-ops/ol-docker/saml-test
|
||||||
environment:
|
environment:
|
||||||
SAML_TEST_SP_ENTITY_ID: 'sharelatex-test-saml'
|
SAML_TEST_SP_ENTITY_ID: 'sharelatex-test-saml'
|
||||||
SAML_BASE_URL_PATH: 'http://saml/simplesaml/'
|
SAML_BASE_URL_PATH: 'http://saml/simplesaml/'
|
||||||
|
|
|
@ -24,10 +24,13 @@ services:
|
||||||
MOCHA_GREP: ${MOCHA_GREP}
|
MOCHA_GREP: ${MOCHA_GREP}
|
||||||
NODE_ENV: test
|
NODE_ENV: test
|
||||||
NODE_OPTIONS: "--unhandled-rejections=strict"
|
NODE_OPTIONS: "--unhandled-rejections=strict"
|
||||||
|
volumes:
|
||||||
|
- ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it
|
||||||
depends_on:
|
depends_on:
|
||||||
mongo:
|
mongo:
|
||||||
condition: service_started
|
condition: service_started
|
||||||
user: node
|
user: node
|
||||||
|
entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 --
|
||||||
command: npm run test:acceptance
|
command: npm run test:acceptance
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -26,6 +26,7 @@ services:
|
||||||
- .:/overleaf/services/chat
|
- .:/overleaf/services/chat
|
||||||
- ../../node_modules:/overleaf/node_modules
|
- ../../node_modules:/overleaf/node_modules
|
||||||
- ../../libraries:/overleaf/libraries
|
- ../../libraries:/overleaf/libraries
|
||||||
|
- ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it
|
||||||
working_dir: /overleaf/services/chat
|
working_dir: /overleaf/services/chat
|
||||||
environment:
|
environment:
|
||||||
ELASTIC_SEARCH_DSN: es:9200
|
ELASTIC_SEARCH_DSN: es:9200
|
||||||
|
@ -39,6 +40,7 @@ services:
|
||||||
depends_on:
|
depends_on:
|
||||||
mongo:
|
mongo:
|
||||||
condition: service_started
|
condition: service_started
|
||||||
|
entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 --
|
||||||
command: npm run --silent test:acceptance
|
command: npm run --silent test:acceptance
|
||||||
|
|
||||||
mongo:
|
mongo:
|
||||||
|
|
|
@ -19,18 +19,18 @@ The CLSI can be configured through the following environment variables:
|
||||||
* `ALLOWED_IMAGES` - Space separated list of allowed Docker TeX Live images
|
* `ALLOWED_IMAGES` - Space separated list of allowed Docker TeX Live images
|
||||||
* `CATCH_ERRORS` - Set to `true` to log uncaught exceptions
|
* `CATCH_ERRORS` - Set to `true` to log uncaught exceptions
|
||||||
* `COMPILE_GROUP_DOCKER_CONFIGS` - JSON string of Docker configs for compile groups
|
* `COMPILE_GROUP_DOCKER_CONFIGS` - JSON string of Docker configs for compile groups
|
||||||
* `COMPILES_HOST_DIR` - Working directory for LaTeX compiles
|
* `SANDBOXED_COMPILES` - Set to true to use sibling containers
|
||||||
* `OUTPUT_HOST_DIR` - Output directory for LaTeX compiles
|
* `SANDBOXED_COMPILES_HOST_DIR_COMPILES` - Working directory for LaTeX compiles
|
||||||
|
* `SANDBOXED_COMPILES_HOST_DIR_OUTPUT` - Output directory for LaTeX compiles
|
||||||
* `COMPILE_SIZE_LIMIT` - Sets the body-parser [limit](https://github.com/expressjs/body-parser#limit)
|
* `COMPILE_SIZE_LIMIT` - Sets the body-parser [limit](https://github.com/expressjs/body-parser#limit)
|
||||||
* `DOCKER_RUNNER` - Set to true to use sibling containers
|
|
||||||
* `DOCKER_RUNTIME` -
|
* `DOCKER_RUNTIME` -
|
||||||
* `FILESTORE_DOMAIN_OVERRIDE` - The url for the filestore service e.g.`http://$FILESTORE_HOST:3009`
|
* `FILESTORE_DOMAIN_OVERRIDE` - The url for the filestore service e.g.`http://$FILESTORE_HOST:3009`
|
||||||
* `FILESTORE_PARALLEL_FILE_DOWNLOADS` - Number of parallel file downloads
|
* `FILESTORE_PARALLEL_FILE_DOWNLOADS` - Number of parallel file downloads
|
||||||
* `LISTEN_ADDRESS` - The address for the RESTful service to listen on. Set to `0.0.0.0` to listen on all network interfaces
|
* `LISTEN_ADDRESS` - The address for the RESTful service to listen on. Set to `0.0.0.0` to listen on all network interfaces
|
||||||
* `PROCESS_LIFE_SPAN_LIMIT_MS` - Process life span limit in milliseconds
|
* `PROCESS_LIFE_SPAN_LIMIT_MS` - Process life span limit in milliseconds
|
||||||
* `SMOKE_TEST` - Whether to run smoke tests
|
* `SMOKE_TEST` - Whether to run smoke tests
|
||||||
* `TEXLIVE_IMAGE` - The TeX Live Docker image to use for sibling containers, e.g. `gcr.io/overleaf-ops/texlive-full:2017.1`
|
* `TEXLIVE_IMAGE` - The TeX Live Docker image to use for sibling containers, e.g. `us-east1-docker.pkg.dev/overleaf-ops/ol-docker/texlive-full:2017.1`
|
||||||
* `TEX_LIVE_IMAGE_NAME_OVERRIDE` - The name of the registry for the Docker image e.g. `gcr.io/overleaf-ops`
|
* `TEX_LIVE_IMAGE_NAME_OVERRIDE` - The name of the registry for the Docker image e.g. `us-east1-docker.pkg.dev/overleaf-ops/ol-docker`
|
||||||
* `TEXLIVE_IMAGE_USER` - When using sibling containers, the user to run as in the TeX Live image. Defaults to `tex`
|
* `TEXLIVE_IMAGE_USER` - When using sibling containers, the user to run as in the TeX Live image. Defaults to `tex`
|
||||||
* `TEXLIVE_OPENOUT_ANY` - Sets the `openout_any` environment variable for TeX Live (see the `\openout` primitive [documentation](http://tug.org/texinfohtml/web2c.html#tex-invocation))
|
* `TEXLIVE_OPENOUT_ANY` - Sets the `openout_any` environment variable for TeX Live (see the `\openout` primitive [documentation](http://tug.org/texinfohtml/web2c.html#tex-invocation))
|
||||||
|
|
||||||
|
@ -63,10 +63,10 @@ Then start the Docker container:
|
||||||
docker run --rm \
|
docker run --rm \
|
||||||
-p 127.0.0.1:3013:3013 \
|
-p 127.0.0.1:3013:3013 \
|
||||||
-e LISTEN_ADDRESS=0.0.0.0 \
|
-e LISTEN_ADDRESS=0.0.0.0 \
|
||||||
-e DOCKER_RUNNER=true \
|
-e SANDBOXED_COMPILES=true \
|
||||||
-e TEXLIVE_IMAGE=texlive/texlive \
|
-e TEXLIVE_IMAGE=texlive/texlive \
|
||||||
-e TEXLIVE_IMAGE_USER=root \
|
-e TEXLIVE_IMAGE_USER=root \
|
||||||
-e COMPILES_HOST_DIR="$PWD/compiles" \
|
-e SANDBOXED_COMPILES_HOST_DIR_COMPILES="$PWD/compiles" \
|
||||||
-v "$PWD/compiles:/overleaf/services/clsi/compiles" \
|
-v "$PWD/compiles:/overleaf/services/clsi/compiles" \
|
||||||
-v "$PWD/cache:/overleaf/services/clsi/cache" \
|
-v "$PWD/cache:/overleaf/services/clsi/cache" \
|
||||||
-v /var/run/docker.sock:/var/run/docker.sock \
|
-v /var/run/docker.sock:/var/run/docker.sock \
|
||||||
|
|
|
@ -232,8 +232,8 @@ const DockerRunner = {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// set the path based on the image year
|
// set the path based on the image year
|
||||||
const match = image.match(/:([0-9]+)\.[0-9]+/)
|
const match = image.match(/:([0-9]+)\.[0-9]+|:TL([0-9]+)/)
|
||||||
const year = match ? match[1] : '2014'
|
const year = match ? match[1] || match[2] : '2014'
|
||||||
env.PATH = `/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/texlive/${year}/bin/x86_64-linux/`
|
env.PATH = `/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/texlive/${year}/bin/x86_64-linux/`
|
||||||
const options = {
|
const options = {
|
||||||
Cmd: command,
|
Cmd: command,
|
||||||
|
|
|
@ -2,7 +2,7 @@ clsi
|
||||||
--data-dirs=cache,compiles,output
|
--data-dirs=cache,compiles,output
|
||||||
--dependencies=
|
--dependencies=
|
||||||
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
|
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
|
||||||
--env-add=ENABLE_PDF_CACHING="true",PDF_CACHING_ENABLE_WORKER_POOL="true",ALLOWED_IMAGES=quay.io/sharelatex/texlive-full:2017.1,TEXLIVE_IMAGE=quay.io/sharelatex/texlive-full:2017.1,TEX_LIVE_IMAGE_NAME_OVERRIDE=us-east1-docker.pkg.dev/overleaf-ops/ol-docker,TEXLIVE_IMAGE_USER="tex",DOCKER_RUNNER="true",COMPILES_HOST_DIR=$PWD/compiles,OUTPUT_HOST_DIR=$PWD/output
|
--env-add=ENABLE_PDF_CACHING="true",PDF_CACHING_ENABLE_WORKER_POOL="true",ALLOWED_IMAGES=quay.io/sharelatex/texlive-full:2017.1,TEXLIVE_IMAGE=quay.io/sharelatex/texlive-full:2017.1,TEX_LIVE_IMAGE_NAME_OVERRIDE=us-east1-docker.pkg.dev/overleaf-ops/ol-docker,TEXLIVE_IMAGE_USER="tex",SANDBOXED_COMPILES="true",SANDBOXED_COMPILES_HOST_DIR_COMPILES=$PWD/compiles,SANDBOXED_COMPILES_HOST_DIR_OUTPUT=$PWD/output
|
||||||
--env-pass-through=
|
--env-pass-through=
|
||||||
--esmock-loader=False
|
--esmock-loader=False
|
||||||
--node-version=22.15.1
|
--node-version=22.15.1
|
||||||
|
|
|
@ -107,7 +107,7 @@ if ((process.env.DOCKER_RUNNER || process.env.SANDBOXED_COMPILES) === 'true') {
|
||||||
CLSI: 1,
|
CLSI: 1,
|
||||||
},
|
},
|
||||||
socketPath: '/var/run/docker.sock',
|
socketPath: '/var/run/docker.sock',
|
||||||
user: process.env.TEXLIVE_IMAGE_USER || 'tex',
|
user: process.env.TEXLIVE_IMAGE_USER || 'www-data',
|
||||||
},
|
},
|
||||||
optimiseInDocker: true,
|
optimiseInDocker: true,
|
||||||
expireProjectAfterIdleMs: 24 * 60 * 60 * 1000,
|
expireProjectAfterIdleMs: 24 * 60 * 60 * 1000,
|
||||||
|
|
|
@ -29,9 +29,9 @@ services:
|
||||||
TEXLIVE_IMAGE: quay.io/sharelatex/texlive-full:2017.1
|
TEXLIVE_IMAGE: quay.io/sharelatex/texlive-full:2017.1
|
||||||
TEX_LIVE_IMAGE_NAME_OVERRIDE: us-east1-docker.pkg.dev/overleaf-ops/ol-docker
|
TEX_LIVE_IMAGE_NAME_OVERRIDE: us-east1-docker.pkg.dev/overleaf-ops/ol-docker
|
||||||
TEXLIVE_IMAGE_USER: "tex"
|
TEXLIVE_IMAGE_USER: "tex"
|
||||||
DOCKER_RUNNER: "true"
|
SANDBOXED_COMPILES: "true"
|
||||||
COMPILES_HOST_DIR: $PWD/compiles
|
SANDBOXED_COMPILES_HOST_DIR_COMPILES: $PWD/compiles
|
||||||
OUTPUT_HOST_DIR: $PWD/output
|
SANDBOXED_COMPILES_HOST_DIR_OUTPUT: $PWD/output
|
||||||
volumes:
|
volumes:
|
||||||
- ./compiles:/overleaf/services/clsi/compiles
|
- ./compiles:/overleaf/services/clsi/compiles
|
||||||
- /var/run/docker.sock:/var/run/docker.sock
|
- /var/run/docker.sock:/var/run/docker.sock
|
||||||
|
|
|
@ -47,8 +47,8 @@ services:
|
||||||
TEXLIVE_IMAGE: quay.io/sharelatex/texlive-full:2017.1
|
TEXLIVE_IMAGE: quay.io/sharelatex/texlive-full:2017.1
|
||||||
TEX_LIVE_IMAGE_NAME_OVERRIDE: us-east1-docker.pkg.dev/overleaf-ops/ol-docker
|
TEX_LIVE_IMAGE_NAME_OVERRIDE: us-east1-docker.pkg.dev/overleaf-ops/ol-docker
|
||||||
TEXLIVE_IMAGE_USER: "tex"
|
TEXLIVE_IMAGE_USER: "tex"
|
||||||
DOCKER_RUNNER: "true"
|
SANDBOXED_COMPILES: "true"
|
||||||
COMPILES_HOST_DIR: $PWD/compiles
|
SANDBOXED_COMPILES_HOST_DIR_COMPILES: $PWD/compiles
|
||||||
OUTPUT_HOST_DIR: $PWD/output
|
SANDBOXED_COMPILES_HOST_DIR_OUTPUT: $PWD/output
|
||||||
command: npm run --silent test:acceptance
|
command: npm run --silent test:acceptance
|
||||||
|
|
||||||
|
|
|
@ -27,13 +27,13 @@
|
||||||
"async": "^3.2.5",
|
"async": "^3.2.5",
|
||||||
"body-parser": "^1.20.3",
|
"body-parser": "^1.20.3",
|
||||||
"bunyan": "^1.8.15",
|
"bunyan": "^1.8.15",
|
||||||
"dockerode": "^4.0.5",
|
"dockerode": "^4.0.7",
|
||||||
"express": "^4.21.2",
|
"express": "^4.21.2",
|
||||||
"lodash": "^4.17.21",
|
"lodash": "^4.17.21",
|
||||||
"p-limit": "^3.1.0",
|
"p-limit": "^3.1.0",
|
||||||
"request": "^2.88.2",
|
"request": "^2.88.2",
|
||||||
"send": "^0.19.0",
|
"send": "^0.19.0",
|
||||||
"tar-fs": "^3.0.4",
|
"tar-fs": "^3.0.9",
|
||||||
"workerpool": "^6.1.5"
|
"workerpool": "^6.1.5"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
|
|
|
@ -829,13 +829,19 @@
|
||||||
"args": []
|
"args": []
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "gettimeofday",
|
"name": "gettimeofday",
|
||||||
"action": "SCMP_ACT_ALLOW",
|
"action": "SCMP_ACT_ALLOW",
|
||||||
"args": []
|
"args": []
|
||||||
}, {
|
},
|
||||||
"name": "epoll_pwait",
|
{
|
||||||
"action": "SCMP_ACT_ALLOW",
|
"name": "epoll_pwait",
|
||||||
"args": []
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "poll",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,10 +24,13 @@ services:
|
||||||
MOCHA_GREP: ${MOCHA_GREP}
|
MOCHA_GREP: ${MOCHA_GREP}
|
||||||
NODE_ENV: test
|
NODE_ENV: test
|
||||||
NODE_OPTIONS: "--unhandled-rejections=strict"
|
NODE_OPTIONS: "--unhandled-rejections=strict"
|
||||||
|
volumes:
|
||||||
|
- ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it
|
||||||
depends_on:
|
depends_on:
|
||||||
mongo:
|
mongo:
|
||||||
condition: service_started
|
condition: service_started
|
||||||
user: node
|
user: node
|
||||||
|
entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 --
|
||||||
command: npm run test:acceptance
|
command: npm run test:acceptance
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -26,6 +26,7 @@ services:
|
||||||
- .:/overleaf/services/contacts
|
- .:/overleaf/services/contacts
|
||||||
- ../../node_modules:/overleaf/node_modules
|
- ../../node_modules:/overleaf/node_modules
|
||||||
- ../../libraries:/overleaf/libraries
|
- ../../libraries:/overleaf/libraries
|
||||||
|
- ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it
|
||||||
working_dir: /overleaf/services/contacts
|
working_dir: /overleaf/services/contacts
|
||||||
environment:
|
environment:
|
||||||
ELASTIC_SEARCH_DSN: es:9200
|
ELASTIC_SEARCH_DSN: es:9200
|
||||||
|
@ -39,6 +40,7 @@ services:
|
||||||
depends_on:
|
depends_on:
|
||||||
mongo:
|
mongo:
|
||||||
condition: service_started
|
condition: service_started
|
||||||
|
entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 --
|
||||||
command: npm run --silent test:acceptance
|
command: npm run --silent test:acceptance
|
||||||
|
|
||||||
mongo:
|
mongo:
|
||||||
|
|
|
@ -6,9 +6,9 @@
|
||||||
"main": "app.js",
|
"main": "app.js",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"start": "node app.js",
|
"start": "node app.js",
|
||||||
"test:acceptance:_run": "LOG_LEVEL=fatal mocha --loader=esmock --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js",
|
"test:acceptance:_run": "mocha --loader=esmock --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js",
|
||||||
"test:acceptance": "npm run test:acceptance:_run -- --grep=$MOCHA_GREP",
|
"test:acceptance": "npm run test:acceptance:_run -- --grep=$MOCHA_GREP",
|
||||||
"test:unit:_run": "LOG_LEVEL=fatal mocha --loader=esmock --recursive --reporter spec $@ test/unit/js",
|
"test:unit:_run": "mocha --loader=esmock --recursive --reporter spec $@ test/unit/js",
|
||||||
"test:unit": "npm run test:unit:_run -- --grep=$MOCHA_GREP",
|
"test:unit": "npm run test:unit:_run -- --grep=$MOCHA_GREP",
|
||||||
"nodemon": "node --watch app.js",
|
"nodemon": "node --watch app.js",
|
||||||
"lint": "eslint --max-warnings 0 --format unix .",
|
"lint": "eslint --max-warnings 0 --format unix .",
|
||||||
|
|
|
@ -50,6 +50,14 @@ app.param('doc_id', function (req, res, next, docId) {
|
||||||
app.get('/project/:project_id/doc-deleted', HttpController.getAllDeletedDocs)
|
app.get('/project/:project_id/doc-deleted', HttpController.getAllDeletedDocs)
|
||||||
app.get('/project/:project_id/doc', HttpController.getAllDocs)
|
app.get('/project/:project_id/doc', HttpController.getAllDocs)
|
||||||
app.get('/project/:project_id/ranges', HttpController.getAllRanges)
|
app.get('/project/:project_id/ranges', HttpController.getAllRanges)
|
||||||
|
app.get(
|
||||||
|
'/project/:project_id/comment-thread-ids',
|
||||||
|
HttpController.getCommentThreadIds
|
||||||
|
)
|
||||||
|
app.get(
|
||||||
|
'/project/:project_id/tracked-changes-user-ids',
|
||||||
|
HttpController.getTrackedChangesUserIds
|
||||||
|
)
|
||||||
app.get('/project/:project_id/has-ranges', HttpController.projectHasRanges)
|
app.get('/project/:project_id/has-ranges', HttpController.projectHasRanges)
|
||||||
app.get('/project/:project_id/doc/:doc_id', HttpController.getDoc)
|
app.get('/project/:project_id/doc/:doc_id', HttpController.getDoc)
|
||||||
app.get('/project/:project_id/doc/:doc_id/deleted', HttpController.isDocDeleted)
|
app.get('/project/:project_id/doc/:doc_id/deleted', HttpController.isDocDeleted)
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
const { callbackify } = require('node:util')
|
const MongoManager = require('./MongoManager')
|
||||||
const MongoManager = require('./MongoManager').promises
|
|
||||||
const Errors = require('./Errors')
|
const Errors = require('./Errors')
|
||||||
const logger = require('@overleaf/logger')
|
const logger = require('@overleaf/logger')
|
||||||
const Settings = require('@overleaf/settings')
|
const Settings = require('@overleaf/settings')
|
||||||
|
@ -8,29 +7,12 @@ const { ReadableString } = require('@overleaf/stream-utils')
|
||||||
const RangeManager = require('./RangeManager')
|
const RangeManager = require('./RangeManager')
|
||||||
const PersistorManager = require('./PersistorManager')
|
const PersistorManager = require('./PersistorManager')
|
||||||
const pMap = require('p-map')
|
const pMap = require('p-map')
|
||||||
const { streamToBuffer } = require('./StreamToBuffer').promises
|
const { streamToBuffer } = require('./StreamToBuffer')
|
||||||
const { BSON } = require('mongodb-legacy')
|
const { BSON } = require('mongodb-legacy')
|
||||||
|
|
||||||
const PARALLEL_JOBS = Settings.parallelArchiveJobs
|
const PARALLEL_JOBS = Settings.parallelArchiveJobs
|
||||||
const UN_ARCHIVE_BATCH_SIZE = Settings.unArchiveBatchSize
|
const UN_ARCHIVE_BATCH_SIZE = Settings.unArchiveBatchSize
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
archiveAllDocs: callbackify(archiveAllDocs),
|
|
||||||
archiveDoc: callbackify(archiveDoc),
|
|
||||||
unArchiveAllDocs: callbackify(unArchiveAllDocs),
|
|
||||||
unarchiveDoc: callbackify(unarchiveDoc),
|
|
||||||
destroyProject: callbackify(destroyProject),
|
|
||||||
getDoc: callbackify(getDoc),
|
|
||||||
promises: {
|
|
||||||
archiveAllDocs,
|
|
||||||
archiveDoc,
|
|
||||||
unArchiveAllDocs,
|
|
||||||
unarchiveDoc,
|
|
||||||
destroyProject,
|
|
||||||
getDoc,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
async function archiveAllDocs(projectId) {
|
async function archiveAllDocs(projectId) {
|
||||||
if (!_isArchivingEnabled()) {
|
if (!_isArchivingEnabled()) {
|
||||||
return
|
return
|
||||||
|
@ -62,6 +44,8 @@ async function archiveDoc(projectId, docId) {
|
||||||
throw new Error('doc has no lines')
|
throw new Error('doc has no lines')
|
||||||
}
|
}
|
||||||
|
|
||||||
|
RangeManager.fixCommentIds(doc)
|
||||||
|
|
||||||
// warn about any oversized docs already in mongo
|
// warn about any oversized docs already in mongo
|
||||||
const linesSize = BSON.calculateObjectSize(doc.lines || {})
|
const linesSize = BSON.calculateObjectSize(doc.lines || {})
|
||||||
const rangesSize = BSON.calculateObjectSize(doc.ranges || {})
|
const rangesSize = BSON.calculateObjectSize(doc.ranges || {})
|
||||||
|
@ -225,3 +209,12 @@ function _isArchivingEnabled() {
|
||||||
|
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
archiveAllDocs,
|
||||||
|
archiveDoc,
|
||||||
|
unArchiveAllDocs,
|
||||||
|
unarchiveDoc,
|
||||||
|
destroyProject,
|
||||||
|
getDoc,
|
||||||
|
}
|
||||||
|
|
|
@ -5,7 +5,6 @@ const _ = require('lodash')
|
||||||
const DocArchive = require('./DocArchiveManager')
|
const DocArchive = require('./DocArchiveManager')
|
||||||
const RangeManager = require('./RangeManager')
|
const RangeManager = require('./RangeManager')
|
||||||
const Settings = require('@overleaf/settings')
|
const Settings = require('@overleaf/settings')
|
||||||
const { callbackifyAll } = require('@overleaf/promise-utils')
|
|
||||||
const { setTimeout } = require('node:timers/promises')
|
const { setTimeout } = require('node:timers/promises')
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -29,7 +28,7 @@ const DocManager = {
|
||||||
throw new Error('must include inS3 when getting doc')
|
throw new Error('must include inS3 when getting doc')
|
||||||
}
|
}
|
||||||
|
|
||||||
const doc = await MongoManager.promises.findDoc(projectId, docId, filter)
|
const doc = await MongoManager.findDoc(projectId, docId, filter)
|
||||||
|
|
||||||
if (doc == null) {
|
if (doc == null) {
|
||||||
throw new Errors.NotFoundError(
|
throw new Errors.NotFoundError(
|
||||||
|
@ -38,15 +37,19 @@ const DocManager = {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (doc.inS3) {
|
if (doc.inS3) {
|
||||||
await DocArchive.promises.unarchiveDoc(projectId, docId)
|
await DocArchive.unarchiveDoc(projectId, docId)
|
||||||
return await DocManager._getDoc(projectId, docId, filter)
|
return await DocManager._getDoc(projectId, docId, filter)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (filter.ranges) {
|
||||||
|
RangeManager.fixCommentIds(doc)
|
||||||
|
}
|
||||||
|
|
||||||
return doc
|
return doc
|
||||||
},
|
},
|
||||||
|
|
||||||
async isDocDeleted(projectId, docId) {
|
async isDocDeleted(projectId, docId) {
|
||||||
const doc = await MongoManager.promises.findDoc(projectId, docId, {
|
const doc = await MongoManager.findDoc(projectId, docId, {
|
||||||
deleted: true,
|
deleted: true,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -74,7 +77,7 @@ const DocManager = {
|
||||||
|
|
||||||
// returns the doc without any version information
|
// returns the doc without any version information
|
||||||
async _peekRawDoc(projectId, docId) {
|
async _peekRawDoc(projectId, docId) {
|
||||||
const doc = await MongoManager.promises.findDoc(projectId, docId, {
|
const doc = await MongoManager.findDoc(projectId, docId, {
|
||||||
lines: true,
|
lines: true,
|
||||||
rev: true,
|
rev: true,
|
||||||
deleted: true,
|
deleted: true,
|
||||||
|
@ -91,7 +94,7 @@ const DocManager = {
|
||||||
|
|
||||||
if (doc.inS3) {
|
if (doc.inS3) {
|
||||||
// skip the unarchiving to mongo when getting a doc
|
// skip the unarchiving to mongo when getting a doc
|
||||||
const archivedDoc = await DocArchive.promises.getDoc(projectId, docId)
|
const archivedDoc = await DocArchive.getDoc(projectId, docId)
|
||||||
Object.assign(doc, archivedDoc)
|
Object.assign(doc, archivedDoc)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -102,7 +105,7 @@ const DocManager = {
|
||||||
// without unarchiving it (avoids unnecessary writes to mongo)
|
// without unarchiving it (avoids unnecessary writes to mongo)
|
||||||
async peekDoc(projectId, docId) {
|
async peekDoc(projectId, docId) {
|
||||||
const doc = await DocManager._peekRawDoc(projectId, docId)
|
const doc = await DocManager._peekRawDoc(projectId, docId)
|
||||||
await MongoManager.promises.checkRevUnchanged(doc)
|
await MongoManager.checkRevUnchanged(doc)
|
||||||
return doc
|
return doc
|
||||||
},
|
},
|
||||||
|
|
||||||
|
@ -111,16 +114,18 @@ const DocManager = {
|
||||||
lines: true,
|
lines: true,
|
||||||
inS3: true,
|
inS3: true,
|
||||||
})
|
})
|
||||||
return doc
|
if (!doc) throw new Errors.NotFoundError()
|
||||||
|
if (!Array.isArray(doc.lines)) throw new Errors.DocWithoutLinesError()
|
||||||
|
return doc.lines.join('\n')
|
||||||
},
|
},
|
||||||
|
|
||||||
async getAllDeletedDocs(projectId, filter) {
|
async getAllDeletedDocs(projectId, filter) {
|
||||||
return await MongoManager.promises.getProjectsDeletedDocs(projectId, filter)
|
return await MongoManager.getProjectsDeletedDocs(projectId, filter)
|
||||||
},
|
},
|
||||||
|
|
||||||
async getAllNonDeletedDocs(projectId, filter) {
|
async getAllNonDeletedDocs(projectId, filter) {
|
||||||
await DocArchive.promises.unArchiveAllDocs(projectId)
|
await DocArchive.unArchiveAllDocs(projectId)
|
||||||
const docs = await MongoManager.promises.getProjectsDocs(
|
const docs = await MongoManager.getProjectsDocs(
|
||||||
projectId,
|
projectId,
|
||||||
{ include_deleted: false },
|
{ include_deleted: false },
|
||||||
filter
|
filter
|
||||||
|
@ -128,15 +133,46 @@ const DocManager = {
|
||||||
if (docs == null) {
|
if (docs == null) {
|
||||||
throw new Errors.NotFoundError(`No docs for project ${projectId}`)
|
throw new Errors.NotFoundError(`No docs for project ${projectId}`)
|
||||||
}
|
}
|
||||||
|
if (filter.ranges) {
|
||||||
|
for (const doc of docs) {
|
||||||
|
RangeManager.fixCommentIds(doc)
|
||||||
|
}
|
||||||
|
}
|
||||||
return docs
|
return docs
|
||||||
},
|
},
|
||||||
|
|
||||||
|
async getCommentThreadIds(projectId) {
|
||||||
|
const docs = await DocManager.getAllNonDeletedDocs(projectId, {
|
||||||
|
_id: true,
|
||||||
|
ranges: true,
|
||||||
|
})
|
||||||
|
const byDoc = new Map()
|
||||||
|
for (const doc of docs) {
|
||||||
|
const ids = new Set()
|
||||||
|
for (const comment of doc.ranges?.comments || []) {
|
||||||
|
ids.add(comment.op.t)
|
||||||
|
}
|
||||||
|
if (ids.size > 0) byDoc.set(doc._id.toString(), Array.from(ids))
|
||||||
|
}
|
||||||
|
return Object.fromEntries(byDoc.entries())
|
||||||
|
},
|
||||||
|
|
||||||
|
async getTrackedChangesUserIds(projectId) {
|
||||||
|
const docs = await DocManager.getAllNonDeletedDocs(projectId, {
|
||||||
|
ranges: true,
|
||||||
|
})
|
||||||
|
const userIds = new Set()
|
||||||
|
for (const doc of docs) {
|
||||||
|
for (const change of doc.ranges?.changes || []) {
|
||||||
|
if (change.metadata.user_id === 'anonymous-user') continue
|
||||||
|
userIds.add(change.metadata.user_id)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return Array.from(userIds)
|
||||||
|
},
|
||||||
|
|
||||||
async projectHasRanges(projectId) {
|
async projectHasRanges(projectId) {
|
||||||
const docs = await MongoManager.promises.getProjectsDocs(
|
const docs = await MongoManager.getProjectsDocs(projectId, {}, { _id: 1 })
|
||||||
projectId,
|
|
||||||
{},
|
|
||||||
{ _id: 1 }
|
|
||||||
)
|
|
||||||
const docIds = docs.map(doc => doc._id)
|
const docIds = docs.map(doc => doc._id)
|
||||||
for (const docId of docIds) {
|
for (const docId of docIds) {
|
||||||
const doc = await DocManager.peekDoc(projectId, docId)
|
const doc = await DocManager.peekDoc(projectId, docId)
|
||||||
|
@ -247,7 +283,7 @@ const DocManager = {
|
||||||
}
|
}
|
||||||
|
|
||||||
modified = true
|
modified = true
|
||||||
await MongoManager.promises.upsertIntoDocCollection(
|
await MongoManager.upsertIntoDocCollection(
|
||||||
projectId,
|
projectId,
|
||||||
docId,
|
docId,
|
||||||
doc?.rev,
|
doc?.rev,
|
||||||
|
@ -262,11 +298,7 @@ const DocManager = {
|
||||||
|
|
||||||
async patchDoc(projectId, docId, meta) {
|
async patchDoc(projectId, docId, meta) {
|
||||||
const projection = { _id: 1, deleted: true }
|
const projection = { _id: 1, deleted: true }
|
||||||
const doc = await MongoManager.promises.findDoc(
|
const doc = await MongoManager.findDoc(projectId, docId, projection)
|
||||||
projectId,
|
|
||||||
docId,
|
|
||||||
projection
|
|
||||||
)
|
|
||||||
if (!doc) {
|
if (!doc) {
|
||||||
throw new Errors.NotFoundError(
|
throw new Errors.NotFoundError(
|
||||||
`No such project/doc to delete: ${projectId}/${docId}`
|
`No such project/doc to delete: ${projectId}/${docId}`
|
||||||
|
@ -275,7 +307,7 @@ const DocManager = {
|
||||||
|
|
||||||
if (meta.deleted && Settings.docstore.archiveOnSoftDelete) {
|
if (meta.deleted && Settings.docstore.archiveOnSoftDelete) {
|
||||||
// The user will not read this doc anytime soon. Flush it out of mongo.
|
// The user will not read this doc anytime soon. Flush it out of mongo.
|
||||||
DocArchive.promises.archiveDoc(projectId, docId).catch(err => {
|
DocArchive.archiveDoc(projectId, docId).catch(err => {
|
||||||
logger.warn(
|
logger.warn(
|
||||||
{ projectId, docId, err },
|
{ projectId, docId, err },
|
||||||
'archiving a single doc in the background failed'
|
'archiving a single doc in the background failed'
|
||||||
|
@ -283,15 +315,8 @@ const DocManager = {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
await MongoManager.promises.patchDoc(projectId, docId, meta)
|
await MongoManager.patchDoc(projectId, docId, meta)
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
module.exports = DocManager
|
||||||
...callbackifyAll(DocManager, {
|
|
||||||
multiResult: {
|
|
||||||
updateDoc: ['modified', 'rev'],
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
promises: DocManager,
|
|
||||||
}
|
|
||||||
|
|
|
@ -10,10 +10,13 @@ class DocRevValueError extends OError {}
|
||||||
|
|
||||||
class DocVersionDecrementedError extends OError {}
|
class DocVersionDecrementedError extends OError {}
|
||||||
|
|
||||||
|
class DocWithoutLinesError extends OError {}
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
Md5MismatchError,
|
Md5MismatchError,
|
||||||
DocModifiedError,
|
DocModifiedError,
|
||||||
DocRevValueError,
|
DocRevValueError,
|
||||||
DocVersionDecrementedError,
|
DocVersionDecrementedError,
|
||||||
|
DocWithoutLinesError,
|
||||||
...Errors,
|
...Errors,
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,67 +1,35 @@
|
||||||
// TODO: This file was created by bulk-decaffeinate.
|
|
||||||
// Fix any style issues and re-enable lint.
|
|
||||||
/*
|
|
||||||
* decaffeinate suggestions:
|
|
||||||
* DS102: Remove unnecessary code created because of implicit returns
|
|
||||||
* DS207: Consider shorter variations of null checks
|
|
||||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
|
||||||
*/
|
|
||||||
const { db, ObjectId } = require('./mongodb')
|
const { db, ObjectId } = require('./mongodb')
|
||||||
const request = require('request')
|
|
||||||
const async = require('async')
|
|
||||||
const _ = require('lodash')
|
const _ = require('lodash')
|
||||||
const crypto = require('node:crypto')
|
const crypto = require('node:crypto')
|
||||||
const settings = require('@overleaf/settings')
|
const settings = require('@overleaf/settings')
|
||||||
const { port } = settings.internal.docstore
|
const { port } = settings.internal.docstore
|
||||||
const logger = require('@overleaf/logger')
|
const logger = require('@overleaf/logger')
|
||||||
|
const { fetchNothing, fetchJson } = require('@overleaf/fetch-utils')
|
||||||
|
|
||||||
module.exports = {
|
async function check() {
|
||||||
check(callback) {
|
const docId = new ObjectId()
|
||||||
const docId = new ObjectId()
|
const projectId = new ObjectId(settings.docstore.healthCheck.project_id)
|
||||||
const projectId = new ObjectId(settings.docstore.healthCheck.project_id)
|
const url = `http://127.0.0.1:${port}/project/${projectId}/doc/${docId}`
|
||||||
const url = `http://127.0.0.1:${port}/project/${projectId}/doc/${docId}`
|
const lines = [
|
||||||
const lines = [
|
'smoke test - delete me',
|
||||||
'smoke test - delete me',
|
`${crypto.randomBytes(32).toString('hex')}`,
|
||||||
`${crypto.randomBytes(32).toString('hex')}`,
|
]
|
||||||
]
|
logger.debug({ lines, url, docId, projectId }, 'running health check')
|
||||||
const getOpts = () => ({
|
let body
|
||||||
url,
|
try {
|
||||||
timeout: 3000,
|
await fetchNothing(url, {
|
||||||
|
method: 'POST',
|
||||||
|
json: { lines, version: 42, ranges: {} },
|
||||||
|
signal: AbortSignal.timeout(3_000),
|
||||||
})
|
})
|
||||||
logger.debug({ lines, url, docId, projectId }, 'running health check')
|
body = await fetchJson(url, { signal: AbortSignal.timeout(3_000) })
|
||||||
const jobs = [
|
} finally {
|
||||||
function (cb) {
|
await db.docs.deleteOne({ _id: docId, project_id: projectId })
|
||||||
const opts = getOpts()
|
}
|
||||||
opts.json = { lines, version: 42, ranges: {} }
|
if (!_.isEqual(body?.lines, lines)) {
|
||||||
return request.post(opts, cb)
|
throw new Error(`health check lines not equal ${body.lines} != ${lines}`)
|
||||||
},
|
}
|
||||||
function (cb) {
|
}
|
||||||
const opts = getOpts()
|
module.exports = {
|
||||||
opts.json = true
|
check,
|
||||||
return request.get(opts, function (err, res, body) {
|
|
||||||
if (err != null) {
|
|
||||||
logger.err({ err }, 'docstore returned a error in health check get')
|
|
||||||
return cb(err)
|
|
||||||
} else if (res == null) {
|
|
||||||
return cb(new Error('no response from docstore with get check'))
|
|
||||||
} else if ((res != null ? res.statusCode : undefined) !== 200) {
|
|
||||||
return cb(new Error(`status code not 200, its ${res.statusCode}`))
|
|
||||||
} else if (
|
|
||||||
_.isEqual(body != null ? body.lines : undefined, lines) &&
|
|
||||||
(body != null ? body._id : undefined) === docId.toString()
|
|
||||||
) {
|
|
||||||
return cb()
|
|
||||||
} else {
|
|
||||||
return cb(
|
|
||||||
new Error(
|
|
||||||
`health check lines not equal ${body.lines} != ${lines}`
|
|
||||||
)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
},
|
|
||||||
cb => db.docs.deleteOne({ _id: docId, project_id: projectId }, cb),
|
|
||||||
]
|
|
||||||
return async.series(jobs, callback)
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,143 +4,104 @@ const DocArchive = require('./DocArchiveManager')
|
||||||
const HealthChecker = require('./HealthChecker')
|
const HealthChecker = require('./HealthChecker')
|
||||||
const Errors = require('./Errors')
|
const Errors = require('./Errors')
|
||||||
const Settings = require('@overleaf/settings')
|
const Settings = require('@overleaf/settings')
|
||||||
|
const { expressify } = require('@overleaf/promise-utils')
|
||||||
|
|
||||||
function getDoc(req, res, next) {
|
async function getDoc(req, res) {
|
||||||
const { doc_id: docId, project_id: projectId } = req.params
|
const { doc_id: docId, project_id: projectId } = req.params
|
||||||
const includeDeleted = req.query.include_deleted === 'true'
|
const includeDeleted = req.query.include_deleted === 'true'
|
||||||
logger.debug({ projectId, docId }, 'getting doc')
|
logger.debug({ projectId, docId }, 'getting doc')
|
||||||
DocManager.getFullDoc(projectId, docId, function (error, doc) {
|
const doc = await DocManager.getFullDoc(projectId, docId)
|
||||||
if (error) {
|
logger.debug({ docId, projectId }, 'got doc')
|
||||||
return next(error)
|
if (doc.deleted && !includeDeleted) {
|
||||||
}
|
res.sendStatus(404)
|
||||||
logger.debug({ docId, projectId }, 'got doc')
|
} else {
|
||||||
if (doc == null) {
|
res.json(_buildDocView(doc))
|
||||||
res.sendStatus(404)
|
}
|
||||||
} else if (doc.deleted && !includeDeleted) {
|
|
||||||
res.sendStatus(404)
|
|
||||||
} else {
|
|
||||||
res.json(_buildDocView(doc))
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function peekDoc(req, res, next) {
|
async function peekDoc(req, res) {
|
||||||
const { doc_id: docId, project_id: projectId } = req.params
|
const { doc_id: docId, project_id: projectId } = req.params
|
||||||
logger.debug({ projectId, docId }, 'peeking doc')
|
logger.debug({ projectId, docId }, 'peeking doc')
|
||||||
DocManager.peekDoc(projectId, docId, function (error, doc) {
|
const doc = await DocManager.peekDoc(projectId, docId)
|
||||||
if (error) {
|
res.setHeader('x-doc-status', doc.inS3 ? 'archived' : 'active')
|
||||||
return next(error)
|
res.json(_buildDocView(doc))
|
||||||
}
|
|
||||||
if (doc == null) {
|
|
||||||
res.sendStatus(404)
|
|
||||||
} else {
|
|
||||||
res.setHeader('x-doc-status', doc.inS3 ? 'archived' : 'active')
|
|
||||||
res.json(_buildDocView(doc))
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function isDocDeleted(req, res, next) {
|
async function isDocDeleted(req, res) {
|
||||||
const { doc_id: docId, project_id: projectId } = req.params
|
const { doc_id: docId, project_id: projectId } = req.params
|
||||||
DocManager.isDocDeleted(projectId, docId, function (error, deleted) {
|
const deleted = await DocManager.isDocDeleted(projectId, docId)
|
||||||
if (error) {
|
res.json({ deleted })
|
||||||
return next(error)
|
|
||||||
}
|
|
||||||
res.json({ deleted })
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function getRawDoc(req, res, next) {
|
async function getRawDoc(req, res) {
|
||||||
const { doc_id: docId, project_id: projectId } = req.params
|
const { doc_id: docId, project_id: projectId } = req.params
|
||||||
logger.debug({ projectId, docId }, 'getting raw doc')
|
logger.debug({ projectId, docId }, 'getting raw doc')
|
||||||
DocManager.getDocLines(projectId, docId, function (error, doc) {
|
const content = await DocManager.getDocLines(projectId, docId)
|
||||||
if (error) {
|
res.setHeader('content-type', 'text/plain')
|
||||||
return next(error)
|
res.send(content)
|
||||||
}
|
|
||||||
if (doc == null) {
|
|
||||||
res.sendStatus(404)
|
|
||||||
} else {
|
|
||||||
res.setHeader('content-type', 'text/plain')
|
|
||||||
res.send(_buildRawDocView(doc))
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function getAllDocs(req, res, next) {
|
async function getAllDocs(req, res) {
|
||||||
const { project_id: projectId } = req.params
|
const { project_id: projectId } = req.params
|
||||||
logger.debug({ projectId }, 'getting all docs')
|
logger.debug({ projectId }, 'getting all docs')
|
||||||
DocManager.getAllNonDeletedDocs(
|
const docs = await DocManager.getAllNonDeletedDocs(projectId, {
|
||||||
projectId,
|
lines: true,
|
||||||
{ lines: true, rev: true },
|
rev: true,
|
||||||
function (error, docs) {
|
})
|
||||||
if (docs == null) {
|
const docViews = _buildDocsArrayView(projectId, docs)
|
||||||
docs = []
|
for (const docView of docViews) {
|
||||||
}
|
if (!docView.lines) {
|
||||||
if (error) {
|
logger.warn({ projectId, docId: docView._id }, 'missing doc lines')
|
||||||
return next(error)
|
docView.lines = []
|
||||||
}
|
|
||||||
const docViews = _buildDocsArrayView(projectId, docs)
|
|
||||||
for (const docView of docViews) {
|
|
||||||
if (!docView.lines) {
|
|
||||||
logger.warn({ projectId, docId: docView._id }, 'missing doc lines')
|
|
||||||
docView.lines = []
|
|
||||||
}
|
|
||||||
}
|
|
||||||
res.json(docViews)
|
|
||||||
}
|
}
|
||||||
)
|
}
|
||||||
|
res.json(docViews)
|
||||||
}
|
}
|
||||||
|
|
||||||
function getAllDeletedDocs(req, res, next) {
|
async function getAllDeletedDocs(req, res) {
|
||||||
const { project_id: projectId } = req.params
|
const { project_id: projectId } = req.params
|
||||||
logger.debug({ projectId }, 'getting all deleted docs')
|
logger.debug({ projectId }, 'getting all deleted docs')
|
||||||
DocManager.getAllDeletedDocs(
|
const docs = await DocManager.getAllDeletedDocs(projectId, {
|
||||||
projectId,
|
name: true,
|
||||||
{ name: true, deletedAt: true },
|
deletedAt: true,
|
||||||
function (error, docs) {
|
})
|
||||||
if (error) {
|
res.json(
|
||||||
return next(error)
|
docs.map(doc => ({
|
||||||
}
|
_id: doc._id.toString(),
|
||||||
res.json(
|
name: doc.name,
|
||||||
docs.map(doc => ({
|
deletedAt: doc.deletedAt,
|
||||||
_id: doc._id.toString(),
|
}))
|
||||||
name: doc.name,
|
|
||||||
deletedAt: doc.deletedAt,
|
|
||||||
}))
|
|
||||||
)
|
|
||||||
}
|
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
function getAllRanges(req, res, next) {
|
async function getAllRanges(req, res) {
|
||||||
const { project_id: projectId } = req.params
|
const { project_id: projectId } = req.params
|
||||||
logger.debug({ projectId }, 'getting all ranges')
|
logger.debug({ projectId }, 'getting all ranges')
|
||||||
DocManager.getAllNonDeletedDocs(
|
const docs = await DocManager.getAllNonDeletedDocs(projectId, {
|
||||||
projectId,
|
ranges: true,
|
||||||
{ ranges: true },
|
|
||||||
function (error, docs) {
|
|
||||||
if (docs == null) {
|
|
||||||
docs = []
|
|
||||||
}
|
|
||||||
if (error) {
|
|
||||||
return next(error)
|
|
||||||
}
|
|
||||||
res.json(_buildDocsArrayView(projectId, docs))
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
function projectHasRanges(req, res, next) {
|
|
||||||
const { project_id: projectId } = req.params
|
|
||||||
DocManager.projectHasRanges(projectId, (err, projectHasRanges) => {
|
|
||||||
if (err) {
|
|
||||||
return next(err)
|
|
||||||
}
|
|
||||||
res.json({ projectHasRanges })
|
|
||||||
})
|
})
|
||||||
|
res.json(_buildDocsArrayView(projectId, docs))
|
||||||
}
|
}
|
||||||
|
|
||||||
function updateDoc(req, res, next) {
|
async function getCommentThreadIds(req, res) {
|
||||||
|
const { project_id: projectId } = req.params
|
||||||
|
const threadIds = await DocManager.getCommentThreadIds(projectId)
|
||||||
|
res.json(threadIds)
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getTrackedChangesUserIds(req, res) {
|
||||||
|
const { project_id: projectId } = req.params
|
||||||
|
const userIds = await DocManager.getTrackedChangesUserIds(projectId)
|
||||||
|
res.json(userIds)
|
||||||
|
}
|
||||||
|
|
||||||
|
async function projectHasRanges(req, res) {
|
||||||
|
const { project_id: projectId } = req.params
|
||||||
|
const projectHasRanges = await DocManager.projectHasRanges(projectId)
|
||||||
|
res.json({ projectHasRanges })
|
||||||
|
}
|
||||||
|
|
||||||
|
async function updateDoc(req, res) {
|
||||||
const { doc_id: docId, project_id: projectId } = req.params
|
const { doc_id: docId, project_id: projectId } = req.params
|
||||||
const lines = req.body?.lines
|
const lines = req.body?.lines
|
||||||
const version = req.body?.version
|
const version = req.body?.version
|
||||||
|
@ -172,25 +133,20 @@ function updateDoc(req, res, next) {
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.debug({ projectId, docId }, 'got http request to update doc')
|
logger.debug({ projectId, docId }, 'got http request to update doc')
|
||||||
DocManager.updateDoc(
|
const { modified, rev } = await DocManager.updateDoc(
|
||||||
projectId,
|
projectId,
|
||||||
docId,
|
docId,
|
||||||
lines,
|
lines,
|
||||||
version,
|
version,
|
||||||
ranges,
|
ranges
|
||||||
function (error, modified, rev) {
|
|
||||||
if (error) {
|
|
||||||
return next(error)
|
|
||||||
}
|
|
||||||
res.json({
|
|
||||||
modified,
|
|
||||||
rev,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
)
|
)
|
||||||
|
res.json({
|
||||||
|
modified,
|
||||||
|
rev,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
function patchDoc(req, res, next) {
|
async function patchDoc(req, res) {
|
||||||
const { doc_id: docId, project_id: projectId } = req.params
|
const { doc_id: docId, project_id: projectId } = req.params
|
||||||
logger.debug({ projectId, docId }, 'patching doc')
|
logger.debug({ projectId, docId }, 'patching doc')
|
||||||
|
|
||||||
|
@ -203,12 +159,8 @@ function patchDoc(req, res, next) {
|
||||||
logger.fatal({ field }, 'joi validation for pathDoc is broken')
|
logger.fatal({ field }, 'joi validation for pathDoc is broken')
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
DocManager.patchDoc(projectId, docId, meta, function (error) {
|
await DocManager.patchDoc(projectId, docId, meta)
|
||||||
if (error) {
|
res.sendStatus(204)
|
||||||
return next(error)
|
|
||||||
}
|
|
||||||
res.sendStatus(204)
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function _buildDocView(doc) {
|
function _buildDocView(doc) {
|
||||||
|
@ -221,10 +173,6 @@ function _buildDocView(doc) {
|
||||||
return docView
|
return docView
|
||||||
}
|
}
|
||||||
|
|
||||||
function _buildRawDocView(doc) {
|
|
||||||
return (doc?.lines ?? []).join('\n')
|
|
||||||
}
|
|
||||||
|
|
||||||
function _buildDocsArrayView(projectId, docs) {
|
function _buildDocsArrayView(projectId, docs) {
|
||||||
const docViews = []
|
const docViews = []
|
||||||
for (const doc of docs) {
|
for (const doc of docs) {
|
||||||
|
@ -241,79 +189,69 @@ function _buildDocsArrayView(projectId, docs) {
|
||||||
return docViews
|
return docViews
|
||||||
}
|
}
|
||||||
|
|
||||||
function archiveAllDocs(req, res, next) {
|
async function archiveAllDocs(req, res) {
|
||||||
const { project_id: projectId } = req.params
|
const { project_id: projectId } = req.params
|
||||||
logger.debug({ projectId }, 'archiving all docs')
|
logger.debug({ projectId }, 'archiving all docs')
|
||||||
DocArchive.archiveAllDocs(projectId, function (error) {
|
await DocArchive.archiveAllDocs(projectId)
|
||||||
if (error) {
|
res.sendStatus(204)
|
||||||
return next(error)
|
|
||||||
}
|
|
||||||
res.sendStatus(204)
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function archiveDoc(req, res, next) {
|
async function archiveDoc(req, res) {
|
||||||
const { doc_id: docId, project_id: projectId } = req.params
|
const { doc_id: docId, project_id: projectId } = req.params
|
||||||
logger.debug({ projectId, docId }, 'archiving a doc')
|
logger.debug({ projectId, docId }, 'archiving a doc')
|
||||||
DocArchive.archiveDoc(projectId, docId, function (error) {
|
await DocArchive.archiveDoc(projectId, docId)
|
||||||
if (error) {
|
res.sendStatus(204)
|
||||||
return next(error)
|
|
||||||
}
|
|
||||||
res.sendStatus(204)
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function unArchiveAllDocs(req, res, next) {
|
async function unArchiveAllDocs(req, res) {
|
||||||
const { project_id: projectId } = req.params
|
const { project_id: projectId } = req.params
|
||||||
logger.debug({ projectId }, 'unarchiving all docs')
|
logger.debug({ projectId }, 'unarchiving all docs')
|
||||||
DocArchive.unArchiveAllDocs(projectId, function (err) {
|
try {
|
||||||
if (err) {
|
await DocArchive.unArchiveAllDocs(projectId)
|
||||||
if (err instanceof Errors.DocRevValueError) {
|
} catch (err) {
|
||||||
logger.warn({ err }, 'Failed to unarchive doc')
|
if (err instanceof Errors.DocRevValueError) {
|
||||||
return res.sendStatus(409)
|
logger.warn({ err }, 'Failed to unarchive doc')
|
||||||
}
|
return res.sendStatus(409)
|
||||||
return next(err)
|
|
||||||
}
|
}
|
||||||
res.sendStatus(200)
|
throw err
|
||||||
})
|
}
|
||||||
|
res.sendStatus(200)
|
||||||
}
|
}
|
||||||
|
|
||||||
function destroyProject(req, res, next) {
|
async function destroyProject(req, res) {
|
||||||
const { project_id: projectId } = req.params
|
const { project_id: projectId } = req.params
|
||||||
logger.debug({ projectId }, 'destroying all docs')
|
logger.debug({ projectId }, 'destroying all docs')
|
||||||
DocArchive.destroyProject(projectId, function (error) {
|
await DocArchive.destroyProject(projectId)
|
||||||
if (error) {
|
res.sendStatus(204)
|
||||||
return next(error)
|
|
||||||
}
|
|
||||||
res.sendStatus(204)
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function healthCheck(req, res) {
|
async function healthCheck(req, res) {
|
||||||
HealthChecker.check(function (err) {
|
try {
|
||||||
if (err) {
|
await HealthChecker.check()
|
||||||
logger.err({ err }, 'error performing health check')
|
} catch (err) {
|
||||||
res.sendStatus(500)
|
logger.err({ err }, 'error performing health check')
|
||||||
} else {
|
res.sendStatus(500)
|
||||||
res.sendStatus(200)
|
return
|
||||||
}
|
}
|
||||||
})
|
res.sendStatus(200)
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
getDoc,
|
getDoc: expressify(getDoc),
|
||||||
peekDoc,
|
peekDoc: expressify(peekDoc),
|
||||||
isDocDeleted,
|
isDocDeleted: expressify(isDocDeleted),
|
||||||
getRawDoc,
|
getRawDoc: expressify(getRawDoc),
|
||||||
getAllDocs,
|
getAllDocs: expressify(getAllDocs),
|
||||||
getAllDeletedDocs,
|
getAllDeletedDocs: expressify(getAllDeletedDocs),
|
||||||
getAllRanges,
|
getAllRanges: expressify(getAllRanges),
|
||||||
projectHasRanges,
|
getTrackedChangesUserIds: expressify(getTrackedChangesUserIds),
|
||||||
updateDoc,
|
getCommentThreadIds: expressify(getCommentThreadIds),
|
||||||
patchDoc,
|
projectHasRanges: expressify(projectHasRanges),
|
||||||
archiveAllDocs,
|
updateDoc: expressify(updateDoc),
|
||||||
archiveDoc,
|
patchDoc: expressify(patchDoc),
|
||||||
unArchiveAllDocs,
|
archiveAllDocs: expressify(archiveAllDocs),
|
||||||
destroyProject,
|
archiveDoc: expressify(archiveDoc),
|
||||||
healthCheck,
|
unArchiveAllDocs: expressify(unArchiveAllDocs),
|
||||||
|
destroyProject: expressify(destroyProject),
|
||||||
|
healthCheck: expressify(healthCheck),
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
const { db, ObjectId } = require('./mongodb')
|
const { db, ObjectId } = require('./mongodb')
|
||||||
const Settings = require('@overleaf/settings')
|
const Settings = require('@overleaf/settings')
|
||||||
const Errors = require('./Errors')
|
const Errors = require('./Errors')
|
||||||
const { callbackify } = require('node:util')
|
|
||||||
|
|
||||||
const ARCHIVING_LOCK_DURATION_MS = Settings.archivingLockDurationMs
|
const ARCHIVING_LOCK_DURATION_MS = Settings.archivingLockDurationMs
|
||||||
|
|
||||||
|
@ -241,34 +240,17 @@ async function destroyProject(projectId) {
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
findDoc: callbackify(findDoc),
|
findDoc,
|
||||||
getProjectsDeletedDocs: callbackify(getProjectsDeletedDocs),
|
getProjectsDeletedDocs,
|
||||||
getProjectsDocs: callbackify(getProjectsDocs),
|
getProjectsDocs,
|
||||||
getArchivedProjectDocs: callbackify(getArchivedProjectDocs),
|
getArchivedProjectDocs,
|
||||||
getNonArchivedProjectDocIds: callbackify(getNonArchivedProjectDocIds),
|
getNonArchivedProjectDocIds,
|
||||||
getNonDeletedArchivedProjectDocs: callbackify(
|
getNonDeletedArchivedProjectDocs,
|
||||||
getNonDeletedArchivedProjectDocs
|
upsertIntoDocCollection,
|
||||||
),
|
restoreArchivedDoc,
|
||||||
upsertIntoDocCollection: callbackify(upsertIntoDocCollection),
|
patchDoc,
|
||||||
restoreArchivedDoc: callbackify(restoreArchivedDoc),
|
getDocForArchiving,
|
||||||
patchDoc: callbackify(patchDoc),
|
markDocAsArchived,
|
||||||
getDocForArchiving: callbackify(getDocForArchiving),
|
checkRevUnchanged,
|
||||||
markDocAsArchived: callbackify(markDocAsArchived),
|
destroyProject,
|
||||||
checkRevUnchanged: callbackify(checkRevUnchanged),
|
|
||||||
destroyProject: callbackify(destroyProject),
|
|
||||||
promises: {
|
|
||||||
findDoc,
|
|
||||||
getProjectsDeletedDocs,
|
|
||||||
getProjectsDocs,
|
|
||||||
getArchivedProjectDocs,
|
|
||||||
getNonArchivedProjectDocIds,
|
|
||||||
getNonDeletedArchivedProjectDocs,
|
|
||||||
upsertIntoDocCollection,
|
|
||||||
restoreArchivedDoc,
|
|
||||||
patchDoc,
|
|
||||||
getDocForArchiving,
|
|
||||||
markDocAsArchived,
|
|
||||||
checkRevUnchanged,
|
|
||||||
destroyProject,
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -49,15 +49,25 @@ module.exports = RangeManager = {
|
||||||
updateMetadata(change.metadata)
|
updateMetadata(change.metadata)
|
||||||
}
|
}
|
||||||
for (const comment of Array.from(ranges.comments || [])) {
|
for (const comment of Array.from(ranges.comments || [])) {
|
||||||
comment.id = RangeManager._safeObjectId(comment.id)
|
// Two bugs resulted in mismatched ids, prefer the thread id from the op: https://github.com/overleaf/internal/issues/23272
|
||||||
if ((comment.op != null ? comment.op.t : undefined) != null) {
|
comment.id = RangeManager._safeObjectId(comment.op?.t || comment.id)
|
||||||
comment.op.t = RangeManager._safeObjectId(comment.op.t)
|
if (comment.op) comment.op.t = comment.id
|
||||||
}
|
|
||||||
|
// resolved property is added to comments when they are obtained from history, but this state doesn't belong in mongo docs collection
|
||||||
|
// more info: https://github.com/overleaf/internal/issues/24371#issuecomment-2913095174
|
||||||
|
delete comment.op?.resolved
|
||||||
updateMetadata(comment.metadata)
|
updateMetadata(comment.metadata)
|
||||||
}
|
}
|
||||||
return ranges
|
return ranges
|
||||||
},
|
},
|
||||||
|
|
||||||
|
fixCommentIds(doc) {
|
||||||
|
for (const comment of doc?.ranges?.comments || []) {
|
||||||
|
// Two bugs resulted in mismatched ids, prefer the thread id from the op: https://github.com/overleaf/internal/issues/23272
|
||||||
|
if (comment.op?.t) comment.id = comment.op.t
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
_safeObjectId(data) {
|
_safeObjectId(data) {
|
||||||
try {
|
try {
|
||||||
return new ObjectId(data)
|
return new ObjectId(data)
|
||||||
|
|
|
@ -2,13 +2,9 @@ const { LoggerStream, WritableBuffer } = require('@overleaf/stream-utils')
|
||||||
const Settings = require('@overleaf/settings')
|
const Settings = require('@overleaf/settings')
|
||||||
const logger = require('@overleaf/logger/logging-manager')
|
const logger = require('@overleaf/logger/logging-manager')
|
||||||
const { pipeline } = require('node:stream/promises')
|
const { pipeline } = require('node:stream/promises')
|
||||||
const { callbackify } = require('node:util')
|
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
streamToBuffer: callbackify(streamToBuffer),
|
streamToBuffer,
|
||||||
promises: {
|
|
||||||
streamToBuffer,
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async function streamToBuffer(projectId, docId, stream) {
|
async function streamToBuffer(projectId, docId, stream) {
|
||||||
|
|
|
@ -27,12 +27,15 @@ services:
|
||||||
MOCHA_GREP: ${MOCHA_GREP}
|
MOCHA_GREP: ${MOCHA_GREP}
|
||||||
NODE_ENV: test
|
NODE_ENV: test
|
||||||
NODE_OPTIONS: "--unhandled-rejections=strict"
|
NODE_OPTIONS: "--unhandled-rejections=strict"
|
||||||
|
volumes:
|
||||||
|
- ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it
|
||||||
depends_on:
|
depends_on:
|
||||||
mongo:
|
mongo:
|
||||||
condition: service_started
|
condition: service_started
|
||||||
gcs:
|
gcs:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
user: node
|
user: node
|
||||||
|
entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 --
|
||||||
command: npm run test:acceptance
|
command: npm run test:acceptance
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -26,6 +26,7 @@ services:
|
||||||
- .:/overleaf/services/docstore
|
- .:/overleaf/services/docstore
|
||||||
- ../../node_modules:/overleaf/node_modules
|
- ../../node_modules:/overleaf/node_modules
|
||||||
- ../../libraries:/overleaf/libraries
|
- ../../libraries:/overleaf/libraries
|
||||||
|
- ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it
|
||||||
working_dir: /overleaf/services/docstore
|
working_dir: /overleaf/services/docstore
|
||||||
environment:
|
environment:
|
||||||
ELASTIC_SEARCH_DSN: es:9200
|
ELASTIC_SEARCH_DSN: es:9200
|
||||||
|
@ -44,6 +45,7 @@ services:
|
||||||
condition: service_started
|
condition: service_started
|
||||||
gcs:
|
gcs:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
|
entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 --
|
||||||
command: npm run --silent test:acceptance
|
command: npm run --silent test:acceptance
|
||||||
|
|
||||||
mongo:
|
mongo:
|
||||||
|
|
|
@ -17,6 +17,7 @@
|
||||||
"types:check": "tsc --noEmit"
|
"types:check": "tsc --noEmit"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"@overleaf/fetch-utils": "*",
|
||||||
"@overleaf/logger": "*",
|
"@overleaf/logger": "*",
|
||||||
"@overleaf/metrics": "*",
|
"@overleaf/metrics": "*",
|
||||||
"@overleaf/o-error": "*",
|
"@overleaf/o-error": "*",
|
||||||
|
|
|
@ -1001,6 +1001,15 @@ describe('Archiving', function () {
|
||||||
},
|
},
|
||||||
version: 2,
|
version: 2,
|
||||||
}
|
}
|
||||||
|
this.fixedRanges = {
|
||||||
|
...this.doc.ranges,
|
||||||
|
comments: [
|
||||||
|
{
|
||||||
|
...this.doc.ranges.comments[0],
|
||||||
|
id: this.doc.ranges.comments[0].op.t,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
return DocstoreClient.createDoc(
|
return DocstoreClient.createDoc(
|
||||||
this.project_id,
|
this.project_id,
|
||||||
this.doc._id,
|
this.doc._id,
|
||||||
|
@ -1048,7 +1057,7 @@ describe('Archiving', function () {
|
||||||
throw error
|
throw error
|
||||||
}
|
}
|
||||||
s3Doc.lines.should.deep.equal(this.doc.lines)
|
s3Doc.lines.should.deep.equal(this.doc.lines)
|
||||||
const ranges = JSON.parse(JSON.stringify(this.doc.ranges)) // ObjectId -> String
|
const ranges = JSON.parse(JSON.stringify(this.fixedRanges)) // ObjectId -> String
|
||||||
s3Doc.ranges.should.deep.equal(ranges)
|
s3Doc.ranges.should.deep.equal(ranges)
|
||||||
return done()
|
return done()
|
||||||
}
|
}
|
||||||
|
@ -1075,7 +1084,7 @@ describe('Archiving', function () {
|
||||||
throw error
|
throw error
|
||||||
}
|
}
|
||||||
doc.lines.should.deep.equal(this.doc.lines)
|
doc.lines.should.deep.equal(this.doc.lines)
|
||||||
doc.ranges.should.deep.equal(this.doc.ranges)
|
doc.ranges.should.deep.equal(this.fixedRanges)
|
||||||
expect(doc.inS3).not.to.exist
|
expect(doc.inS3).not.to.exist
|
||||||
return done()
|
return done()
|
||||||
})
|
})
|
||||||
|
|
|
@ -20,30 +20,73 @@ const DocstoreClient = require('./helpers/DocstoreClient')
|
||||||
describe('Getting all docs', function () {
|
describe('Getting all docs', function () {
|
||||||
beforeEach(function (done) {
|
beforeEach(function (done) {
|
||||||
this.project_id = new ObjectId()
|
this.project_id = new ObjectId()
|
||||||
|
this.threadId1 = new ObjectId().toString()
|
||||||
|
this.threadId2 = new ObjectId().toString()
|
||||||
this.docs = [
|
this.docs = [
|
||||||
{
|
{
|
||||||
_id: new ObjectId(),
|
_id: new ObjectId(),
|
||||||
lines: ['one', 'two', 'three'],
|
lines: ['one', 'two', 'three'],
|
||||||
ranges: { mock: 'one' },
|
ranges: {
|
||||||
|
comments: [
|
||||||
|
{ id: new ObjectId().toString(), op: { t: this.threadId1 } },
|
||||||
|
],
|
||||||
|
changes: [
|
||||||
|
{
|
||||||
|
id: new ObjectId().toString(),
|
||||||
|
metadata: { user_id: 'user-id-1' },
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
rev: 2,
|
rev: 2,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
_id: new ObjectId(),
|
_id: new ObjectId(),
|
||||||
lines: ['aaa', 'bbb', 'ccc'],
|
lines: ['aaa', 'bbb', 'ccc'],
|
||||||
ranges: { mock: 'two' },
|
ranges: {
|
||||||
|
changes: [
|
||||||
|
{
|
||||||
|
id: new ObjectId().toString(),
|
||||||
|
metadata: { user_id: 'user-id-2' },
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
rev: 4,
|
rev: 4,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
_id: new ObjectId(),
|
_id: new ObjectId(),
|
||||||
lines: ['111', '222', '333'],
|
lines: ['111', '222', '333'],
|
||||||
ranges: { mock: 'three' },
|
ranges: {
|
||||||
|
comments: [
|
||||||
|
{ id: new ObjectId().toString(), op: { t: this.threadId2 } },
|
||||||
|
],
|
||||||
|
changes: [
|
||||||
|
{
|
||||||
|
id: new ObjectId().toString(),
|
||||||
|
metadata: { user_id: 'anonymous-user' },
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
rev: 6,
|
rev: 6,
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
|
this.fixedRanges = this.docs.map(doc => {
|
||||||
|
if (!doc.ranges?.comments?.length) return doc.ranges
|
||||||
|
return {
|
||||||
|
...doc.ranges,
|
||||||
|
comments: [
|
||||||
|
{ ...doc.ranges.comments[0], id: doc.ranges.comments[0].op.t },
|
||||||
|
],
|
||||||
|
}
|
||||||
|
})
|
||||||
this.deleted_doc = {
|
this.deleted_doc = {
|
||||||
_id: new ObjectId(),
|
_id: new ObjectId(),
|
||||||
lines: ['deleted'],
|
lines: ['deleted'],
|
||||||
ranges: { mock: 'four' },
|
ranges: {
|
||||||
|
comments: [{ id: new ObjectId().toString(), op: { t: 'thread-id-3' } }],
|
||||||
|
changes: [
|
||||||
|
{ id: new ObjectId().toString(), metadata: { user_id: 'user-id-3' } },
|
||||||
|
],
|
||||||
|
},
|
||||||
rev: 8,
|
rev: 8,
|
||||||
}
|
}
|
||||||
const version = 42
|
const version = 42
|
||||||
|
@ -96,7 +139,7 @@ describe('Getting all docs', function () {
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
return it('getAllRanges should return all the (non-deleted) doc ranges', function (done) {
|
it('getAllRanges should return all the (non-deleted) doc ranges', function (done) {
|
||||||
return DocstoreClient.getAllRanges(this.project_id, (error, res, docs) => {
|
return DocstoreClient.getAllRanges(this.project_id, (error, res, docs) => {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
throw error
|
throw error
|
||||||
|
@ -104,9 +147,38 @@ describe('Getting all docs', function () {
|
||||||
docs.length.should.equal(this.docs.length)
|
docs.length.should.equal(this.docs.length)
|
||||||
for (let i = 0; i < docs.length; i++) {
|
for (let i = 0; i < docs.length; i++) {
|
||||||
const doc = docs[i]
|
const doc = docs[i]
|
||||||
doc.ranges.should.deep.equal(this.docs[i].ranges)
|
doc.ranges.should.deep.equal(this.fixedRanges[i])
|
||||||
}
|
}
|
||||||
return done()
|
return done()
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it('getTrackedChangesUserIds should return all the user ids from (non-deleted) ranges', function (done) {
|
||||||
|
DocstoreClient.getTrackedChangesUserIds(
|
||||||
|
this.project_id,
|
||||||
|
(error, res, userIds) => {
|
||||||
|
if (error != null) {
|
||||||
|
throw error
|
||||||
|
}
|
||||||
|
userIds.should.deep.equal(['user-id-1', 'user-id-2'])
|
||||||
|
done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('getCommentThreadIds should return all the thread ids from (non-deleted) ranges', function (done) {
|
||||||
|
DocstoreClient.getCommentThreadIds(
|
||||||
|
this.project_id,
|
||||||
|
(error, res, threadIds) => {
|
||||||
|
if (error != null) {
|
||||||
|
throw error
|
||||||
|
}
|
||||||
|
threadIds.should.deep.equal({
|
||||||
|
[this.docs[0]._id.toString()]: [this.threadId1],
|
||||||
|
[this.docs[2]._id.toString()]: [this.threadId2],
|
||||||
|
})
|
||||||
|
done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -28,10 +28,26 @@ describe('Getting a doc', function () {
|
||||||
op: { i: 'foo', p: 3 },
|
op: { i: 'foo', p: 3 },
|
||||||
meta: {
|
meta: {
|
||||||
user_id: new ObjectId().toString(),
|
user_id: new ObjectId().toString(),
|
||||||
ts: new Date().toString(),
|
ts: new Date().toJSON(),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
|
comments: [
|
||||||
|
{
|
||||||
|
id: new ObjectId().toString(),
|
||||||
|
op: { c: 'comment', p: 1, t: new ObjectId().toString() },
|
||||||
|
metadata: {
|
||||||
|
user_id: new ObjectId().toString(),
|
||||||
|
ts: new Date().toJSON(),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
this.fixedRanges = {
|
||||||
|
...this.ranges,
|
||||||
|
comments: [
|
||||||
|
{ ...this.ranges.comments[0], id: this.ranges.comments[0].op.t },
|
||||||
|
],
|
||||||
}
|
}
|
||||||
return DocstoreApp.ensureRunning(() => {
|
return DocstoreApp.ensureRunning(() => {
|
||||||
return DocstoreClient.createDoc(
|
return DocstoreClient.createDoc(
|
||||||
|
@ -60,7 +76,7 @@ describe('Getting a doc', function () {
|
||||||
if (error) return done(error)
|
if (error) return done(error)
|
||||||
doc.lines.should.deep.equal(this.lines)
|
doc.lines.should.deep.equal(this.lines)
|
||||||
doc.version.should.equal(this.version)
|
doc.version.should.equal(this.version)
|
||||||
doc.ranges.should.deep.equal(this.ranges)
|
doc.ranges.should.deep.equal(this.fixedRanges)
|
||||||
return done()
|
return done()
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
@ -114,7 +130,7 @@ describe('Getting a doc', function () {
|
||||||
if (error) return done(error)
|
if (error) return done(error)
|
||||||
doc.lines.should.deep.equal(this.lines)
|
doc.lines.should.deep.equal(this.lines)
|
||||||
doc.version.should.equal(this.version)
|
doc.version.should.equal(this.version)
|
||||||
doc.ranges.should.deep.equal(this.ranges)
|
doc.ranges.should.deep.equal(this.fixedRanges)
|
||||||
doc.deleted.should.equal(true)
|
doc.deleted.should.equal(true)
|
||||||
return done()
|
return done()
|
||||||
}
|
}
|
||||||
|
|
28
services/docstore/test/acceptance/js/HealthCheckerTest.js
Normal file
28
services/docstore/test/acceptance/js/HealthCheckerTest.js
Normal file
|
@ -0,0 +1,28 @@
|
||||||
|
const { db } = require('../../../app/js/mongodb')
|
||||||
|
const DocstoreApp = require('./helpers/DocstoreApp')
|
||||||
|
const DocstoreClient = require('./helpers/DocstoreClient')
|
||||||
|
const { expect } = require('chai')
|
||||||
|
|
||||||
|
describe('HealthChecker', function () {
|
||||||
|
beforeEach('start', function (done) {
|
||||||
|
DocstoreApp.ensureRunning(done)
|
||||||
|
})
|
||||||
|
beforeEach('clear docs collection', async function () {
|
||||||
|
await db.docs.deleteMany({})
|
||||||
|
})
|
||||||
|
let res
|
||||||
|
beforeEach('run health check', function (done) {
|
||||||
|
DocstoreClient.healthCheck((err, _res) => {
|
||||||
|
res = _res
|
||||||
|
done(err)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should return 200', function () {
|
||||||
|
res.statusCode.should.equal(200)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should not leave any cruft behind', async function () {
|
||||||
|
expect(await db.docs.find({}).toArray()).to.deep.equal([])
|
||||||
|
})
|
||||||
|
})
|
|
@ -100,6 +100,26 @@ module.exports = DocstoreClient = {
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
|
|
||||||
|
getCommentThreadIds(projectId, callback) {
|
||||||
|
request.get(
|
||||||
|
{
|
||||||
|
url: `http://127.0.0.1:${settings.internal.docstore.port}/project/${projectId}/comment-thread-ids`,
|
||||||
|
json: true,
|
||||||
|
},
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
getTrackedChangesUserIds(projectId, callback) {
|
||||||
|
request.get(
|
||||||
|
{
|
||||||
|
url: `http://127.0.0.1:${settings.internal.docstore.port}/project/${projectId}/tracked-changes-user-ids`,
|
||||||
|
json: true,
|
||||||
|
},
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
updateDoc(projectId, docId, lines, version, ranges, callback) {
|
updateDoc(projectId, docId, lines, version, ranges, callback) {
|
||||||
return request.post(
|
return request.post(
|
||||||
{
|
{
|
||||||
|
@ -181,6 +201,13 @@ module.exports = DocstoreClient = {
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
|
|
||||||
|
healthCheck(callback) {
|
||||||
|
request.get(
|
||||||
|
`http://127.0.0.1:${settings.internal.docstore.port}/health_check`,
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
getS3Doc(projectId, docId, callback) {
|
getS3Doc(projectId, docId, callback) {
|
||||||
getStringFromPersistor(
|
getStringFromPersistor(
|
||||||
Persistor,
|
Persistor,
|
||||||
|
|
|
@ -4,7 +4,7 @@ const modulePath = '../../../app/js/DocArchiveManager.js'
|
||||||
const SandboxedModule = require('sandboxed-module')
|
const SandboxedModule = require('sandboxed-module')
|
||||||
const { ObjectId } = require('mongodb-legacy')
|
const { ObjectId } = require('mongodb-legacy')
|
||||||
const Errors = require('../../../app/js/Errors')
|
const Errors = require('../../../app/js/Errors')
|
||||||
const StreamToBuffer = require('../../../app/js/StreamToBuffer').promises
|
const StreamToBuffer = require('../../../app/js/StreamToBuffer')
|
||||||
|
|
||||||
describe('DocArchiveManager', function () {
|
describe('DocArchiveManager', function () {
|
||||||
let DocArchiveManager,
|
let DocArchiveManager,
|
||||||
|
@ -31,6 +31,7 @@ describe('DocArchiveManager', function () {
|
||||||
|
|
||||||
RangeManager = {
|
RangeManager = {
|
||||||
jsonRangesToMongo: sinon.stub().returns({ mongo: 'ranges' }),
|
jsonRangesToMongo: sinon.stub().returns({ mongo: 'ranges' }),
|
||||||
|
fixCommentIds: sinon.stub(),
|
||||||
}
|
}
|
||||||
Settings = {
|
Settings = {
|
||||||
docstore: {
|
docstore: {
|
||||||
|
@ -142,37 +143,33 @@ describe('DocArchiveManager', function () {
|
||||||
}
|
}
|
||||||
|
|
||||||
MongoManager = {
|
MongoManager = {
|
||||||
promises: {
|
markDocAsArchived: sinon.stub().resolves(),
|
||||||
markDocAsArchived: sinon.stub().resolves(),
|
restoreArchivedDoc: sinon.stub().resolves(),
|
||||||
restoreArchivedDoc: sinon.stub().resolves(),
|
upsertIntoDocCollection: sinon.stub().resolves(),
|
||||||
upsertIntoDocCollection: sinon.stub().resolves(),
|
getProjectsDocs: sinon.stub().resolves(mongoDocs),
|
||||||
getProjectsDocs: sinon.stub().resolves(mongoDocs),
|
getNonDeletedArchivedProjectDocs: getArchivedProjectDocs,
|
||||||
getNonDeletedArchivedProjectDocs: getArchivedProjectDocs,
|
getNonArchivedProjectDocIds,
|
||||||
getNonArchivedProjectDocIds,
|
getArchivedProjectDocs,
|
||||||
getArchivedProjectDocs,
|
findDoc: sinon.stub().callsFake(fakeGetDoc),
|
||||||
findDoc: sinon.stub().callsFake(fakeGetDoc),
|
getDocForArchiving: sinon.stub().callsFake(fakeGetDoc),
|
||||||
getDocForArchiving: sinon.stub().callsFake(fakeGetDoc),
|
destroyProject: sinon.stub().resolves(),
|
||||||
destroyProject: sinon.stub().resolves(),
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Wrap streamToBuffer so that we can pass in something that it expects (in
|
// Wrap streamToBuffer so that we can pass in something that it expects (in
|
||||||
// this case, a Promise) rather than a stubbed stream object
|
// this case, a Promise) rather than a stubbed stream object
|
||||||
streamToBuffer = {
|
streamToBuffer = {
|
||||||
promises: {
|
streamToBuffer: async () => {
|
||||||
streamToBuffer: async () => {
|
const inputStream = new Promise(resolve => {
|
||||||
const inputStream = new Promise(resolve => {
|
stream.on('data', data => resolve(data))
|
||||||
stream.on('data', data => resolve(data))
|
})
|
||||||
})
|
|
||||||
|
|
||||||
const value = await StreamToBuffer.streamToBuffer(
|
const value = await StreamToBuffer.streamToBuffer(
|
||||||
'testProjectId',
|
'testProjectId',
|
||||||
'testDocId',
|
'testDocId',
|
||||||
inputStream
|
inputStream
|
||||||
)
|
)
|
||||||
|
|
||||||
return value
|
return value
|
||||||
},
|
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -192,9 +189,13 @@ describe('DocArchiveManager', function () {
|
||||||
|
|
||||||
describe('archiveDoc', function () {
|
describe('archiveDoc', function () {
|
||||||
it('should resolve when passed a valid document', async function () {
|
it('should resolve when passed a valid document', async function () {
|
||||||
await expect(
|
await expect(DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id)).to
|
||||||
DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
|
.eventually.be.fulfilled
|
||||||
).to.eventually.be.fulfilled
|
})
|
||||||
|
|
||||||
|
it('should fix comment ids', async function () {
|
||||||
|
await DocArchiveManager.archiveDoc(projectId, mongoDocs[1]._id)
|
||||||
|
expect(RangeManager.fixCommentIds).to.have.been.called
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should throw an error if the doc has no lines', async function () {
|
it('should throw an error if the doc has no lines', async function () {
|
||||||
|
@ -202,26 +203,26 @@ describe('DocArchiveManager', function () {
|
||||||
doc.lines = null
|
doc.lines = null
|
||||||
|
|
||||||
await expect(
|
await expect(
|
||||||
DocArchiveManager.promises.archiveDoc(projectId, doc._id)
|
DocArchiveManager.archiveDoc(projectId, doc._id)
|
||||||
).to.eventually.be.rejectedWith('doc has no lines')
|
).to.eventually.be.rejectedWith('doc has no lines')
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should add the schema version', async function () {
|
it('should add the schema version', async function () {
|
||||||
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[1]._id)
|
await DocArchiveManager.archiveDoc(projectId, mongoDocs[1]._id)
|
||||||
expect(StreamUtils.ReadableString).to.have.been.calledWith(
|
expect(StreamUtils.ReadableString).to.have.been.calledWith(
|
||||||
sinon.match(/"schema_v":1/)
|
sinon.match(/"schema_v":1/)
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should calculate the hex md5 sum of the content', async function () {
|
it('should calculate the hex md5 sum of the content', async function () {
|
||||||
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
|
await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id)
|
||||||
expect(Crypto.createHash).to.have.been.calledWith('md5')
|
expect(Crypto.createHash).to.have.been.calledWith('md5')
|
||||||
expect(HashUpdate).to.have.been.calledWith(archivedDocJson)
|
expect(HashUpdate).to.have.been.calledWith(archivedDocJson)
|
||||||
expect(HashDigest).to.have.been.calledWith('hex')
|
expect(HashDigest).to.have.been.calledWith('hex')
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should pass the md5 hash to the object persistor for verification', async function () {
|
it('should pass the md5 hash to the object persistor for verification', async function () {
|
||||||
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
|
await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id)
|
||||||
|
|
||||||
expect(PersistorManager.sendStream).to.have.been.calledWith(
|
expect(PersistorManager.sendStream).to.have.been.calledWith(
|
||||||
sinon.match.any,
|
sinon.match.any,
|
||||||
|
@ -232,7 +233,7 @@ describe('DocArchiveManager', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should pass the correct bucket and key to the persistor', async function () {
|
it('should pass the correct bucket and key to the persistor', async function () {
|
||||||
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
|
await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id)
|
||||||
|
|
||||||
expect(PersistorManager.sendStream).to.have.been.calledWith(
|
expect(PersistorManager.sendStream).to.have.been.calledWith(
|
||||||
Settings.docstore.bucket,
|
Settings.docstore.bucket,
|
||||||
|
@ -241,7 +242,7 @@ describe('DocArchiveManager', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should create a stream from the encoded json and send it', async function () {
|
it('should create a stream from the encoded json and send it', async function () {
|
||||||
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
|
await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id)
|
||||||
expect(StreamUtils.ReadableString).to.have.been.calledWith(
|
expect(StreamUtils.ReadableString).to.have.been.calledWith(
|
||||||
archivedDocJson
|
archivedDocJson
|
||||||
)
|
)
|
||||||
|
@ -253,8 +254,8 @@ describe('DocArchiveManager', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should mark the doc as archived', async function () {
|
it('should mark the doc as archived', async function () {
|
||||||
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
|
await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id)
|
||||||
expect(MongoManager.promises.markDocAsArchived).to.have.been.calledWith(
|
expect(MongoManager.markDocAsArchived).to.have.been.calledWith(
|
||||||
projectId,
|
projectId,
|
||||||
mongoDocs[0]._id,
|
mongoDocs[0]._id,
|
||||||
mongoDocs[0].rev
|
mongoDocs[0].rev
|
||||||
|
@ -267,8 +268,8 @@ describe('DocArchiveManager', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should bail out early', async function () {
|
it('should bail out early', async function () {
|
||||||
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
|
await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id)
|
||||||
expect(MongoManager.promises.getDocForArchiving).to.not.have.been.called
|
expect(MongoManager.getDocForArchiving).to.not.have.been.called
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -285,7 +286,7 @@ describe('DocArchiveManager', function () {
|
||||||
|
|
||||||
it('should return an error', async function () {
|
it('should return an error', async function () {
|
||||||
await expect(
|
await expect(
|
||||||
DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
|
DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id)
|
||||||
).to.eventually.be.rejectedWith('null bytes detected')
|
).to.eventually.be.rejectedWith('null bytes detected')
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
@ -296,21 +297,19 @@ describe('DocArchiveManager', function () {
|
||||||
|
|
||||||
describe('when the doc is in S3', function () {
|
describe('when the doc is in S3', function () {
|
||||||
beforeEach(function () {
|
beforeEach(function () {
|
||||||
MongoManager.promises.findDoc = sinon
|
MongoManager.findDoc = sinon.stub().resolves({ inS3: true, rev })
|
||||||
.stub()
|
|
||||||
.resolves({ inS3: true, rev })
|
|
||||||
docId = mongoDocs[0]._id
|
docId = mongoDocs[0]._id
|
||||||
lines = ['doc', 'lines']
|
lines = ['doc', 'lines']
|
||||||
rev = 123
|
rev = 123
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should resolve when passed a valid document', async function () {
|
it('should resolve when passed a valid document', async function () {
|
||||||
await expect(DocArchiveManager.promises.unarchiveDoc(projectId, docId))
|
await expect(DocArchiveManager.unarchiveDoc(projectId, docId)).to
|
||||||
.to.eventually.be.fulfilled
|
.eventually.be.fulfilled
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should test md5 validity with the raw buffer', async function () {
|
it('should test md5 validity with the raw buffer', async function () {
|
||||||
await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
|
await DocArchiveManager.unarchiveDoc(projectId, docId)
|
||||||
expect(HashUpdate).to.have.been.calledWith(
|
expect(HashUpdate).to.have.been.calledWith(
|
||||||
sinon.match.instanceOf(Buffer)
|
sinon.match.instanceOf(Buffer)
|
||||||
)
|
)
|
||||||
|
@ -319,15 +318,17 @@ describe('DocArchiveManager', function () {
|
||||||
it('should throw an error if the md5 does not match', async function () {
|
it('should throw an error if the md5 does not match', async function () {
|
||||||
PersistorManager.getObjectMd5Hash.resolves('badf00d')
|
PersistorManager.getObjectMd5Hash.resolves('badf00d')
|
||||||
await expect(
|
await expect(
|
||||||
DocArchiveManager.promises.unarchiveDoc(projectId, docId)
|
DocArchiveManager.unarchiveDoc(projectId, docId)
|
||||||
).to.eventually.be.rejected.and.be.instanceof(Errors.Md5MismatchError)
|
).to.eventually.be.rejected.and.be.instanceof(Errors.Md5MismatchError)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should restore the doc in Mongo', async function () {
|
it('should restore the doc in Mongo', async function () {
|
||||||
await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
|
await DocArchiveManager.unarchiveDoc(projectId, docId)
|
||||||
expect(
|
expect(MongoManager.restoreArchivedDoc).to.have.been.calledWith(
|
||||||
MongoManager.promises.restoreArchivedDoc
|
projectId,
|
||||||
).to.have.been.calledWith(projectId, docId, archivedDoc)
|
docId,
|
||||||
|
archivedDoc
|
||||||
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('when archiving is not configured', function () {
|
describe('when archiving is not configured', function () {
|
||||||
|
@ -337,15 +338,15 @@ describe('DocArchiveManager', function () {
|
||||||
|
|
||||||
it('should error out on archived doc', async function () {
|
it('should error out on archived doc', async function () {
|
||||||
await expect(
|
await expect(
|
||||||
DocArchiveManager.promises.unarchiveDoc(projectId, docId)
|
DocArchiveManager.unarchiveDoc(projectId, docId)
|
||||||
).to.eventually.be.rejected.and.match(
|
).to.eventually.be.rejected.and.match(
|
||||||
/found archived doc, but archiving backend is not configured/
|
/found archived doc, but archiving backend is not configured/
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should return early on non-archived doc', async function () {
|
it('should return early on non-archived doc', async function () {
|
||||||
MongoManager.promises.findDoc = sinon.stub().resolves({ rev })
|
MongoManager.findDoc = sinon.stub().resolves({ rev })
|
||||||
await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
|
await DocArchiveManager.unarchiveDoc(projectId, docId)
|
||||||
expect(PersistorManager.getObjectMd5Hash).to.not.have.been.called
|
expect(PersistorManager.getObjectMd5Hash).to.not.have.been.called
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
@ -363,10 +364,12 @@ describe('DocArchiveManager', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should return the docs lines', async function () {
|
it('should return the docs lines', async function () {
|
||||||
await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
|
await DocArchiveManager.unarchiveDoc(projectId, docId)
|
||||||
expect(
|
expect(MongoManager.restoreArchivedDoc).to.have.been.calledWith(
|
||||||
MongoManager.promises.restoreArchivedDoc
|
projectId,
|
||||||
).to.have.been.calledWith(projectId, docId, { lines, rev })
|
docId,
|
||||||
|
{ lines, rev }
|
||||||
|
)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -385,14 +388,16 @@ describe('DocArchiveManager', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should return the doc lines and ranges', async function () {
|
it('should return the doc lines and ranges', async function () {
|
||||||
await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
|
await DocArchiveManager.unarchiveDoc(projectId, docId)
|
||||||
expect(
|
expect(MongoManager.restoreArchivedDoc).to.have.been.calledWith(
|
||||||
MongoManager.promises.restoreArchivedDoc
|
projectId,
|
||||||
).to.have.been.calledWith(projectId, docId, {
|
docId,
|
||||||
lines,
|
{
|
||||||
ranges: { mongo: 'ranges' },
|
lines,
|
||||||
rev: 456,
|
ranges: { mongo: 'ranges' },
|
||||||
})
|
rev: 456,
|
||||||
|
}
|
||||||
|
)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -406,10 +411,12 @@ describe('DocArchiveManager', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should return only the doc lines', async function () {
|
it('should return only the doc lines', async function () {
|
||||||
await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
|
await DocArchiveManager.unarchiveDoc(projectId, docId)
|
||||||
expect(
|
expect(MongoManager.restoreArchivedDoc).to.have.been.calledWith(
|
||||||
MongoManager.promises.restoreArchivedDoc
|
projectId,
|
||||||
).to.have.been.calledWith(projectId, docId, { lines, rev: 456 })
|
docId,
|
||||||
|
{ lines, rev: 456 }
|
||||||
|
)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -423,10 +430,12 @@ describe('DocArchiveManager', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should use the rev obtained from Mongo', async function () {
|
it('should use the rev obtained from Mongo', async function () {
|
||||||
await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
|
await DocArchiveManager.unarchiveDoc(projectId, docId)
|
||||||
expect(
|
expect(MongoManager.restoreArchivedDoc).to.have.been.calledWith(
|
||||||
MongoManager.promises.restoreArchivedDoc
|
projectId,
|
||||||
).to.have.been.calledWith(projectId, docId, { lines, rev })
|
docId,
|
||||||
|
{ lines, rev }
|
||||||
|
)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -441,7 +450,7 @@ describe('DocArchiveManager', function () {
|
||||||
|
|
||||||
it('should throw an error', async function () {
|
it('should throw an error', async function () {
|
||||||
await expect(
|
await expect(
|
||||||
DocArchiveManager.promises.unarchiveDoc(projectId, docId)
|
DocArchiveManager.unarchiveDoc(projectId, docId)
|
||||||
).to.eventually.be.rejectedWith(
|
).to.eventually.be.rejectedWith(
|
||||||
"I don't understand the doc format in s3"
|
"I don't understand the doc format in s3"
|
||||||
)
|
)
|
||||||
|
@ -451,8 +460,8 @@ describe('DocArchiveManager', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should not do anything if the file is already unarchived', async function () {
|
it('should not do anything if the file is already unarchived', async function () {
|
||||||
MongoManager.promises.findDoc.resolves({ inS3: false })
|
MongoManager.findDoc.resolves({ inS3: false })
|
||||||
await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
|
await DocArchiveManager.unarchiveDoc(projectId, docId)
|
||||||
expect(PersistorManager.getObjectStream).not.to.have.been.called
|
expect(PersistorManager.getObjectStream).not.to.have.been.called
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -461,7 +470,7 @@ describe('DocArchiveManager', function () {
|
||||||
.stub()
|
.stub()
|
||||||
.rejects(new Errors.NotFoundError())
|
.rejects(new Errors.NotFoundError())
|
||||||
await expect(
|
await expect(
|
||||||
DocArchiveManager.promises.unarchiveDoc(projectId, docId)
|
DocArchiveManager.unarchiveDoc(projectId, docId)
|
||||||
).to.eventually.be.rejected.and.be.instanceof(Errors.NotFoundError)
|
).to.eventually.be.rejected.and.be.instanceof(Errors.NotFoundError)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
@ -469,13 +478,11 @@ describe('DocArchiveManager', function () {
|
||||||
describe('destroyProject', function () {
|
describe('destroyProject', function () {
|
||||||
describe('when archiving is enabled', function () {
|
describe('when archiving is enabled', function () {
|
||||||
beforeEach(async function () {
|
beforeEach(async function () {
|
||||||
await DocArchiveManager.promises.destroyProject(projectId)
|
await DocArchiveManager.destroyProject(projectId)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should delete the project in Mongo', function () {
|
it('should delete the project in Mongo', function () {
|
||||||
expect(MongoManager.promises.destroyProject).to.have.been.calledWith(
|
expect(MongoManager.destroyProject).to.have.been.calledWith(projectId)
|
||||||
projectId
|
|
||||||
)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should delete the project in the persistor', function () {
|
it('should delete the project in the persistor', function () {
|
||||||
|
@ -489,13 +496,11 @@ describe('DocArchiveManager', function () {
|
||||||
describe('when archiving is disabled', function () {
|
describe('when archiving is disabled', function () {
|
||||||
beforeEach(async function () {
|
beforeEach(async function () {
|
||||||
Settings.docstore.backend = ''
|
Settings.docstore.backend = ''
|
||||||
await DocArchiveManager.promises.destroyProject(projectId)
|
await DocArchiveManager.destroyProject(projectId)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should delete the project in Mongo', function () {
|
it('should delete the project in Mongo', function () {
|
||||||
expect(MongoManager.promises.destroyProject).to.have.been.calledWith(
|
expect(MongoManager.destroyProject).to.have.been.calledWith(projectId)
|
||||||
projectId
|
|
||||||
)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should not delete the project in the persistor', function () {
|
it('should not delete the project in the persistor', function () {
|
||||||
|
@ -506,33 +511,35 @@ describe('DocArchiveManager', function () {
|
||||||
|
|
||||||
describe('archiveAllDocs', function () {
|
describe('archiveAllDocs', function () {
|
||||||
it('should resolve with valid arguments', async function () {
|
it('should resolve with valid arguments', async function () {
|
||||||
await expect(DocArchiveManager.promises.archiveAllDocs(projectId)).to
|
await expect(DocArchiveManager.archiveAllDocs(projectId)).to.eventually.be
|
||||||
.eventually.be.fulfilled
|
.fulfilled
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should archive all project docs which are not in s3', async function () {
|
it('should archive all project docs which are not in s3', async function () {
|
||||||
await DocArchiveManager.promises.archiveAllDocs(projectId)
|
await DocArchiveManager.archiveAllDocs(projectId)
|
||||||
// not inS3
|
// not inS3
|
||||||
expect(MongoManager.promises.markDocAsArchived).to.have.been.calledWith(
|
expect(MongoManager.markDocAsArchived).to.have.been.calledWith(
|
||||||
projectId,
|
projectId,
|
||||||
mongoDocs[0]._id
|
mongoDocs[0]._id
|
||||||
)
|
)
|
||||||
expect(MongoManager.promises.markDocAsArchived).to.have.been.calledWith(
|
expect(MongoManager.markDocAsArchived).to.have.been.calledWith(
|
||||||
projectId,
|
projectId,
|
||||||
mongoDocs[1]._id
|
mongoDocs[1]._id
|
||||||
)
|
)
|
||||||
expect(MongoManager.promises.markDocAsArchived).to.have.been.calledWith(
|
expect(MongoManager.markDocAsArchived).to.have.been.calledWith(
|
||||||
projectId,
|
projectId,
|
||||||
mongoDocs[4]._id
|
mongoDocs[4]._id
|
||||||
)
|
)
|
||||||
|
|
||||||
// inS3
|
// inS3
|
||||||
expect(
|
expect(MongoManager.markDocAsArchived).not.to.have.been.calledWith(
|
||||||
MongoManager.promises.markDocAsArchived
|
projectId,
|
||||||
).not.to.have.been.calledWith(projectId, mongoDocs[2]._id)
|
mongoDocs[2]._id
|
||||||
expect(
|
)
|
||||||
MongoManager.promises.markDocAsArchived
|
expect(MongoManager.markDocAsArchived).not.to.have.been.calledWith(
|
||||||
).not.to.have.been.calledWith(projectId, mongoDocs[3]._id)
|
projectId,
|
||||||
|
mongoDocs[3]._id
|
||||||
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('when archiving is not configured', function () {
|
describe('when archiving is not configured', function () {
|
||||||
|
@ -541,21 +548,20 @@ describe('DocArchiveManager', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should bail out early', async function () {
|
it('should bail out early', async function () {
|
||||||
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
|
await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id)
|
||||||
expect(MongoManager.promises.getNonArchivedProjectDocIds).to.not.have
|
expect(MongoManager.getNonArchivedProjectDocIds).to.not.have.been.called
|
||||||
.been.called
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('unArchiveAllDocs', function () {
|
describe('unArchiveAllDocs', function () {
|
||||||
it('should resolve with valid arguments', async function () {
|
it('should resolve with valid arguments', async function () {
|
||||||
await expect(DocArchiveManager.promises.unArchiveAllDocs(projectId)).to
|
await expect(DocArchiveManager.unArchiveAllDocs(projectId)).to.eventually
|
||||||
.eventually.be.fulfilled
|
.be.fulfilled
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should unarchive all inS3 docs', async function () {
|
it('should unarchive all inS3 docs', async function () {
|
||||||
await DocArchiveManager.promises.unArchiveAllDocs(projectId)
|
await DocArchiveManager.unArchiveAllDocs(projectId)
|
||||||
|
|
||||||
for (const doc of archivedDocs) {
|
for (const doc of archivedDocs) {
|
||||||
expect(PersistorManager.getObjectStream).to.have.been.calledWith(
|
expect(PersistorManager.getObjectStream).to.have.been.calledWith(
|
||||||
|
@ -571,9 +577,9 @@ describe('DocArchiveManager', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should bail out early', async function () {
|
it('should bail out early', async function () {
|
||||||
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
|
await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id)
|
||||||
expect(MongoManager.promises.getNonDeletedArchivedProjectDocs).to.not
|
expect(MongoManager.getNonDeletedArchivedProjectDocs).to.not.have.been
|
||||||
.have.been.called
|
.called
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -17,25 +17,22 @@ describe('DocManager', function () {
|
||||||
this.version = 42
|
this.version = 42
|
||||||
|
|
||||||
this.MongoManager = {
|
this.MongoManager = {
|
||||||
promises: {
|
findDoc: sinon.stub(),
|
||||||
findDoc: sinon.stub(),
|
getProjectsDocs: sinon.stub(),
|
||||||
getProjectsDocs: sinon.stub(),
|
patchDoc: sinon.stub().resolves(),
|
||||||
patchDoc: sinon.stub().resolves(),
|
upsertIntoDocCollection: sinon.stub().resolves(),
|
||||||
upsertIntoDocCollection: sinon.stub().resolves(),
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
this.DocArchiveManager = {
|
this.DocArchiveManager = {
|
||||||
promises: {
|
unarchiveDoc: sinon.stub(),
|
||||||
unarchiveDoc: sinon.stub(),
|
unArchiveAllDocs: sinon.stub(),
|
||||||
unArchiveAllDocs: sinon.stub(),
|
archiveDoc: sinon.stub().resolves(),
|
||||||
archiveDoc: sinon.stub().resolves(),
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
this.RangeManager = {
|
this.RangeManager = {
|
||||||
jsonRangesToMongo(r) {
|
jsonRangesToMongo(r) {
|
||||||
return r
|
return r
|
||||||
},
|
},
|
||||||
shouldUpdateRanges: sinon.stub().returns(false),
|
shouldUpdateRanges: sinon.stub().returns(false),
|
||||||
|
fixCommentIds: sinon.stub(),
|
||||||
}
|
}
|
||||||
this.settings = { docstore: {} }
|
this.settings = { docstore: {} }
|
||||||
|
|
||||||
|
@ -52,7 +49,7 @@ describe('DocManager', function () {
|
||||||
|
|
||||||
describe('getFullDoc', function () {
|
describe('getFullDoc', function () {
|
||||||
beforeEach(function () {
|
beforeEach(function () {
|
||||||
this.DocManager.promises._getDoc = sinon.stub()
|
this.DocManager._getDoc = sinon.stub()
|
||||||
this.doc = {
|
this.doc = {
|
||||||
_id: this.doc_id,
|
_id: this.doc_id,
|
||||||
lines: ['2134'],
|
lines: ['2134'],
|
||||||
|
@ -60,13 +57,10 @@ describe('DocManager', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should call get doc with a quick filter', async function () {
|
it('should call get doc with a quick filter', async function () {
|
||||||
this.DocManager.promises._getDoc.resolves(this.doc)
|
this.DocManager._getDoc.resolves(this.doc)
|
||||||
const doc = await this.DocManager.promises.getFullDoc(
|
const doc = await this.DocManager.getFullDoc(this.project_id, this.doc_id)
|
||||||
this.project_id,
|
|
||||||
this.doc_id
|
|
||||||
)
|
|
||||||
doc.should.equal(this.doc)
|
doc.should.equal(this.doc)
|
||||||
this.DocManager.promises._getDoc
|
this.DocManager._getDoc
|
||||||
.calledWith(this.project_id, this.doc_id, {
|
.calledWith(this.project_id, this.doc_id, {
|
||||||
lines: true,
|
lines: true,
|
||||||
rev: true,
|
rev: true,
|
||||||
|
@ -79,27 +73,27 @@ describe('DocManager', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should return error when get doc errors', async function () {
|
it('should return error when get doc errors', async function () {
|
||||||
this.DocManager.promises._getDoc.rejects(this.stubbedError)
|
this.DocManager._getDoc.rejects(this.stubbedError)
|
||||||
await expect(
|
await expect(
|
||||||
this.DocManager.promises.getFullDoc(this.project_id, this.doc_id)
|
this.DocManager.getFullDoc(this.project_id, this.doc_id)
|
||||||
).to.be.rejectedWith(this.stubbedError)
|
).to.be.rejectedWith(this.stubbedError)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('getRawDoc', function () {
|
describe('getRawDoc', function () {
|
||||||
beforeEach(function () {
|
beforeEach(function () {
|
||||||
this.DocManager.promises._getDoc = sinon.stub()
|
this.DocManager._getDoc = sinon.stub()
|
||||||
this.doc = { lines: ['2134'] }
|
this.doc = { lines: ['2134'] }
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should call get doc with a quick filter', async function () {
|
it('should call get doc with a quick filter', async function () {
|
||||||
this.DocManager.promises._getDoc.resolves(this.doc)
|
this.DocManager._getDoc.resolves(this.doc)
|
||||||
const doc = await this.DocManager.promises.getDocLines(
|
const content = await this.DocManager.getDocLines(
|
||||||
this.project_id,
|
this.project_id,
|
||||||
this.doc_id
|
this.doc_id
|
||||||
)
|
)
|
||||||
doc.should.equal(this.doc)
|
content.should.equal(this.doc.lines.join('\n'))
|
||||||
this.DocManager.promises._getDoc
|
this.DocManager._getDoc
|
||||||
.calledWith(this.project_id, this.doc_id, {
|
.calledWith(this.project_id, this.doc_id, {
|
||||||
lines: true,
|
lines: true,
|
||||||
inS3: true,
|
inS3: true,
|
||||||
|
@ -108,11 +102,46 @@ describe('DocManager', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should return error when get doc errors', async function () {
|
it('should return error when get doc errors', async function () {
|
||||||
this.DocManager.promises._getDoc.rejects(this.stubbedError)
|
this.DocManager._getDoc.rejects(this.stubbedError)
|
||||||
await expect(
|
await expect(
|
||||||
this.DocManager.promises.getDocLines(this.project_id, this.doc_id)
|
this.DocManager.getDocLines(this.project_id, this.doc_id)
|
||||||
).to.be.rejectedWith(this.stubbedError)
|
).to.be.rejectedWith(this.stubbedError)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it('should return error when get doc does not exist', async function () {
|
||||||
|
this.DocManager._getDoc.resolves(null)
|
||||||
|
await expect(
|
||||||
|
this.DocManager.getDocLines(this.project_id, this.doc_id)
|
||||||
|
).to.be.rejectedWith(Errors.NotFoundError)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should return error when get doc has no lines', async function () {
|
||||||
|
this.DocManager._getDoc.resolves({})
|
||||||
|
await expect(
|
||||||
|
this.DocManager.getDocLines(this.project_id, this.doc_id)
|
||||||
|
).to.be.rejectedWith(Errors.DocWithoutLinesError)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('_getDoc', function () {
|
||||||
|
it('should return error when get doc does not exist', async function () {
|
||||||
|
this.MongoManager.findDoc.resolves(null)
|
||||||
|
await expect(
|
||||||
|
this.DocManager._getDoc(this.project_id, this.doc_id, { inS3: true })
|
||||||
|
).to.be.rejectedWith(Errors.NotFoundError)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should fix comment ids', async function () {
|
||||||
|
this.MongoManager.findDoc.resolves({
|
||||||
|
_id: this.doc_id,
|
||||||
|
ranges: {},
|
||||||
|
})
|
||||||
|
await this.DocManager._getDoc(this.project_id, this.doc_id, {
|
||||||
|
inS3: true,
|
||||||
|
ranges: true,
|
||||||
|
})
|
||||||
|
expect(this.RangeManager.fixCommentIds).to.have.been.called
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('getDoc', function () {
|
describe('getDoc', function () {
|
||||||
|
@ -128,26 +157,25 @@ describe('DocManager', function () {
|
||||||
|
|
||||||
describe('when using a filter', function () {
|
describe('when using a filter', function () {
|
||||||
beforeEach(function () {
|
beforeEach(function () {
|
||||||
this.MongoManager.promises.findDoc.resolves(this.doc)
|
this.MongoManager.findDoc.resolves(this.doc)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should error if inS3 is not set to true', async function () {
|
it('should error if inS3 is not set to true', async function () {
|
||||||
await expect(
|
await expect(
|
||||||
this.DocManager.promises._getDoc(this.project_id, this.doc_id, {
|
this.DocManager._getDoc(this.project_id, this.doc_id, {
|
||||||
inS3: false,
|
inS3: false,
|
||||||
})
|
})
|
||||||
).to.be.rejected
|
).to.be.rejected
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should always get inS3 even when no filter is passed', async function () {
|
it('should always get inS3 even when no filter is passed', async function () {
|
||||||
await expect(
|
await expect(this.DocManager._getDoc(this.project_id, this.doc_id)).to
|
||||||
this.DocManager.promises._getDoc(this.project_id, this.doc_id)
|
.be.rejected
|
||||||
).to.be.rejected
|
this.MongoManager.findDoc.called.should.equal(false)
|
||||||
this.MongoManager.promises.findDoc.called.should.equal(false)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should not error if inS3 is set to true', async function () {
|
it('should not error if inS3 is set to true', async function () {
|
||||||
await this.DocManager.promises._getDoc(this.project_id, this.doc_id, {
|
await this.DocManager._getDoc(this.project_id, this.doc_id, {
|
||||||
inS3: true,
|
inS3: true,
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
@ -155,8 +183,8 @@ describe('DocManager', function () {
|
||||||
|
|
||||||
describe('when the doc is in the doc collection', function () {
|
describe('when the doc is in the doc collection', function () {
|
||||||
beforeEach(async function () {
|
beforeEach(async function () {
|
||||||
this.MongoManager.promises.findDoc.resolves(this.doc)
|
this.MongoManager.findDoc.resolves(this.doc)
|
||||||
this.result = await this.DocManager.promises._getDoc(
|
this.result = await this.DocManager._getDoc(
|
||||||
this.project_id,
|
this.project_id,
|
||||||
this.doc_id,
|
this.doc_id,
|
||||||
{ version: true, inS3: true }
|
{ version: true, inS3: true }
|
||||||
|
@ -164,7 +192,7 @@ describe('DocManager', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should get the doc from the doc collection', function () {
|
it('should get the doc from the doc collection', function () {
|
||||||
this.MongoManager.promises.findDoc
|
this.MongoManager.findDoc
|
||||||
.calledWith(this.project_id, this.doc_id)
|
.calledWith(this.project_id, this.doc_id)
|
||||||
.should.equal(true)
|
.should.equal(true)
|
||||||
})
|
})
|
||||||
|
@ -177,9 +205,9 @@ describe('DocManager', function () {
|
||||||
|
|
||||||
describe('when MongoManager.findDoc errors', function () {
|
describe('when MongoManager.findDoc errors', function () {
|
||||||
it('should return the error', async function () {
|
it('should return the error', async function () {
|
||||||
this.MongoManager.promises.findDoc.rejects(this.stubbedError)
|
this.MongoManager.findDoc.rejects(this.stubbedError)
|
||||||
await expect(
|
await expect(
|
||||||
this.DocManager.promises._getDoc(this.project_id, this.doc_id, {
|
this.DocManager._getDoc(this.project_id, this.doc_id, {
|
||||||
version: true,
|
version: true,
|
||||||
inS3: true,
|
inS3: true,
|
||||||
})
|
})
|
||||||
|
@ -202,15 +230,15 @@ describe('DocManager', function () {
|
||||||
version: 2,
|
version: 2,
|
||||||
inS3: false,
|
inS3: false,
|
||||||
}
|
}
|
||||||
this.MongoManager.promises.findDoc.resolves(this.doc)
|
this.MongoManager.findDoc.resolves(this.doc)
|
||||||
this.DocArchiveManager.promises.unarchiveDoc.callsFake(
|
this.DocArchiveManager.unarchiveDoc.callsFake(
|
||||||
async (projectId, docId) => {
|
async (projectId, docId) => {
|
||||||
this.MongoManager.promises.findDoc.resolves({
|
this.MongoManager.findDoc.resolves({
|
||||||
...this.unarchivedDoc,
|
...this.unarchivedDoc,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
this.result = await this.DocManager.promises._getDoc(
|
this.result = await this.DocManager._getDoc(
|
||||||
this.project_id,
|
this.project_id,
|
||||||
this.doc_id,
|
this.doc_id,
|
||||||
{
|
{
|
||||||
|
@ -221,13 +249,13 @@ describe('DocManager', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should call the DocArchive to unarchive the doc', function () {
|
it('should call the DocArchive to unarchive the doc', function () {
|
||||||
this.DocArchiveManager.promises.unarchiveDoc
|
this.DocArchiveManager.unarchiveDoc
|
||||||
.calledWith(this.project_id, this.doc_id)
|
.calledWith(this.project_id, this.doc_id)
|
||||||
.should.equal(true)
|
.should.equal(true)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should look up the doc twice', function () {
|
it('should look up the doc twice', function () {
|
||||||
this.MongoManager.promises.findDoc.calledTwice.should.equal(true)
|
this.MongoManager.findDoc.calledTwice.should.equal(true)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should return the doc', function () {
|
it('should return the doc', function () {
|
||||||
|
@ -239,9 +267,9 @@ describe('DocManager', function () {
|
||||||
|
|
||||||
describe('when the doc does not exist in the docs collection', function () {
|
describe('when the doc does not exist in the docs collection', function () {
|
||||||
it('should return a NotFoundError', async function () {
|
it('should return a NotFoundError', async function () {
|
||||||
this.MongoManager.promises.findDoc.resolves(null)
|
this.MongoManager.findDoc.resolves(null)
|
||||||
await expect(
|
await expect(
|
||||||
this.DocManager.promises._getDoc(this.project_id, this.doc_id, {
|
this.DocManager._getDoc(this.project_id, this.doc_id, {
|
||||||
version: true,
|
version: true,
|
||||||
inS3: true,
|
inS3: true,
|
||||||
})
|
})
|
||||||
|
@ -262,23 +290,27 @@ describe('DocManager', function () {
|
||||||
lines: ['mock-lines'],
|
lines: ['mock-lines'],
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
this.MongoManager.promises.getProjectsDocs.resolves(this.docs)
|
this.MongoManager.getProjectsDocs.resolves(this.docs)
|
||||||
this.DocArchiveManager.promises.unArchiveAllDocs.resolves(this.docs)
|
this.DocArchiveManager.unArchiveAllDocs.resolves(this.docs)
|
||||||
this.filter = { lines: true }
|
this.filter = { lines: true, ranges: true }
|
||||||
this.result = await this.DocManager.promises.getAllNonDeletedDocs(
|
this.result = await this.DocManager.getAllNonDeletedDocs(
|
||||||
this.project_id,
|
this.project_id,
|
||||||
this.filter
|
this.filter
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should get the project from the database', function () {
|
it('should get the project from the database', function () {
|
||||||
this.MongoManager.promises.getProjectsDocs.should.have.been.calledWith(
|
this.MongoManager.getProjectsDocs.should.have.been.calledWith(
|
||||||
this.project_id,
|
this.project_id,
|
||||||
{ include_deleted: false },
|
{ include_deleted: false },
|
||||||
this.filter
|
this.filter
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it('should fix comment ids', async function () {
|
||||||
|
expect(this.RangeManager.fixCommentIds).to.have.been.called
|
||||||
|
})
|
||||||
|
|
||||||
it('should return the docs', function () {
|
it('should return the docs', function () {
|
||||||
expect(this.result).to.deep.equal(this.docs)
|
expect(this.result).to.deep.equal(this.docs)
|
||||||
})
|
})
|
||||||
|
@ -286,13 +318,10 @@ describe('DocManager', function () {
|
||||||
|
|
||||||
describe('when there are no docs for the project', function () {
|
describe('when there are no docs for the project', function () {
|
||||||
it('should return a NotFoundError', async function () {
|
it('should return a NotFoundError', async function () {
|
||||||
this.MongoManager.promises.getProjectsDocs.resolves(null)
|
this.MongoManager.getProjectsDocs.resolves(null)
|
||||||
this.DocArchiveManager.promises.unArchiveAllDocs.resolves(null)
|
this.DocArchiveManager.unArchiveAllDocs.resolves(null)
|
||||||
await expect(
|
await expect(
|
||||||
this.DocManager.promises.getAllNonDeletedDocs(
|
this.DocManager.getAllNonDeletedDocs(this.project_id, this.filter)
|
||||||
this.project_id,
|
|
||||||
this.filter
|
|
||||||
)
|
|
||||||
).to.be.rejectedWith(`No docs for project ${this.project_id}`)
|
).to.be.rejectedWith(`No docs for project ${this.project_id}`)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
@ -303,7 +332,7 @@ describe('DocManager', function () {
|
||||||
beforeEach(function () {
|
beforeEach(function () {
|
||||||
this.lines = ['mock', 'doc', 'lines']
|
this.lines = ['mock', 'doc', 'lines']
|
||||||
this.rev = 77
|
this.rev = 77
|
||||||
this.MongoManager.promises.findDoc.resolves({
|
this.MongoManager.findDoc.resolves({
|
||||||
_id: new ObjectId(this.doc_id),
|
_id: new ObjectId(this.doc_id),
|
||||||
})
|
})
|
||||||
this.meta = {}
|
this.meta = {}
|
||||||
|
@ -311,7 +340,7 @@ describe('DocManager', function () {
|
||||||
|
|
||||||
describe('standard path', function () {
|
describe('standard path', function () {
|
||||||
beforeEach(async function () {
|
beforeEach(async function () {
|
||||||
await this.DocManager.promises.patchDoc(
|
await this.DocManager.patchDoc(
|
||||||
this.project_id,
|
this.project_id,
|
||||||
this.doc_id,
|
this.doc_id,
|
||||||
this.meta
|
this.meta
|
||||||
|
@ -319,14 +348,14 @@ describe('DocManager', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should get the doc', function () {
|
it('should get the doc', function () {
|
||||||
expect(this.MongoManager.promises.findDoc).to.have.been.calledWith(
|
expect(this.MongoManager.findDoc).to.have.been.calledWith(
|
||||||
this.project_id,
|
this.project_id,
|
||||||
this.doc_id
|
this.doc_id
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should persist the meta', function () {
|
it('should persist the meta', function () {
|
||||||
expect(this.MongoManager.promises.patchDoc).to.have.been.calledWith(
|
expect(this.MongoManager.patchDoc).to.have.been.calledWith(
|
||||||
this.project_id,
|
this.project_id,
|
||||||
this.doc_id,
|
this.doc_id,
|
||||||
this.meta
|
this.meta
|
||||||
|
@ -339,7 +368,7 @@ describe('DocManager', function () {
|
||||||
this.settings.docstore.archiveOnSoftDelete = false
|
this.settings.docstore.archiveOnSoftDelete = false
|
||||||
this.meta.deleted = true
|
this.meta.deleted = true
|
||||||
|
|
||||||
await this.DocManager.promises.patchDoc(
|
await this.DocManager.patchDoc(
|
||||||
this.project_id,
|
this.project_id,
|
||||||
this.doc_id,
|
this.doc_id,
|
||||||
this.meta
|
this.meta
|
||||||
|
@ -347,8 +376,7 @@ describe('DocManager', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should not flush the doc out of mongo', function () {
|
it('should not flush the doc out of mongo', function () {
|
||||||
expect(this.DocArchiveManager.promises.archiveDoc).to.not.have.been
|
expect(this.DocArchiveManager.archiveDoc).to.not.have.been.called
|
||||||
.called
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -356,7 +384,7 @@ describe('DocManager', function () {
|
||||||
beforeEach(async function () {
|
beforeEach(async function () {
|
||||||
this.settings.docstore.archiveOnSoftDelete = false
|
this.settings.docstore.archiveOnSoftDelete = false
|
||||||
this.meta.deleted = false
|
this.meta.deleted = false
|
||||||
await this.DocManager.promises.patchDoc(
|
await this.DocManager.patchDoc(
|
||||||
this.project_id,
|
this.project_id,
|
||||||
this.doc_id,
|
this.doc_id,
|
||||||
this.meta
|
this.meta
|
||||||
|
@ -364,8 +392,7 @@ describe('DocManager', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should not flush the doc out of mongo', function () {
|
it('should not flush the doc out of mongo', function () {
|
||||||
expect(this.DocArchiveManager.promises.archiveDoc).to.not.have.been
|
expect(this.DocArchiveManager.archiveDoc).to.not.have.been.called
|
||||||
.called
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -377,7 +404,7 @@ describe('DocManager', function () {
|
||||||
|
|
||||||
describe('when the background flush succeeds', function () {
|
describe('when the background flush succeeds', function () {
|
||||||
beforeEach(async function () {
|
beforeEach(async function () {
|
||||||
await this.DocManager.promises.patchDoc(
|
await this.DocManager.patchDoc(
|
||||||
this.project_id,
|
this.project_id,
|
||||||
this.doc_id,
|
this.doc_id,
|
||||||
this.meta
|
this.meta
|
||||||
|
@ -389,17 +416,18 @@ describe('DocManager', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should flush the doc out of mongo', function () {
|
it('should flush the doc out of mongo', function () {
|
||||||
expect(
|
expect(this.DocArchiveManager.archiveDoc).to.have.been.calledWith(
|
||||||
this.DocArchiveManager.promises.archiveDoc
|
this.project_id,
|
||||||
).to.have.been.calledWith(this.project_id, this.doc_id)
|
this.doc_id
|
||||||
|
)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('when the background flush fails', function () {
|
describe('when the background flush fails', function () {
|
||||||
beforeEach(async function () {
|
beforeEach(async function () {
|
||||||
this.err = new Error('foo')
|
this.err = new Error('foo')
|
||||||
this.DocArchiveManager.promises.archiveDoc.rejects(this.err)
|
this.DocArchiveManager.archiveDoc.rejects(this.err)
|
||||||
await this.DocManager.promises.patchDoc(
|
await this.DocManager.patchDoc(
|
||||||
this.project_id,
|
this.project_id,
|
||||||
this.doc_id,
|
this.doc_id,
|
||||||
this.meta
|
this.meta
|
||||||
|
@ -422,9 +450,9 @@ describe('DocManager', function () {
|
||||||
|
|
||||||
describe('when the doc does not exist', function () {
|
describe('when the doc does not exist', function () {
|
||||||
it('should return a NotFoundError', async function () {
|
it('should return a NotFoundError', async function () {
|
||||||
this.MongoManager.promises.findDoc.resolves(null)
|
this.MongoManager.findDoc.resolves(null)
|
||||||
await expect(
|
await expect(
|
||||||
this.DocManager.promises.patchDoc(this.project_id, this.doc_id, {})
|
this.DocManager.patchDoc(this.project_id, this.doc_id, {})
|
||||||
).to.be.rejectedWith(
|
).to.be.rejectedWith(
|
||||||
`No such project/doc to delete: ${this.project_id}/${this.doc_id}`
|
`No such project/doc to delete: ${this.project_id}/${this.doc_id}`
|
||||||
)
|
)
|
||||||
|
@ -470,13 +498,13 @@ describe('DocManager', function () {
|
||||||
ranges: this.originalRanges,
|
ranges: this.originalRanges,
|
||||||
}
|
}
|
||||||
|
|
||||||
this.DocManager.promises._getDoc = sinon.stub()
|
this.DocManager._getDoc = sinon.stub()
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('when only the doc lines have changed', function () {
|
describe('when only the doc lines have changed', function () {
|
||||||
beforeEach(async function () {
|
beforeEach(async function () {
|
||||||
this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc)
|
this.DocManager._getDoc = sinon.stub().resolves(this.doc)
|
||||||
this.result = await this.DocManager.promises.updateDoc(
|
this.result = await this.DocManager.updateDoc(
|
||||||
this.project_id,
|
this.project_id,
|
||||||
this.doc_id,
|
this.doc_id,
|
||||||
this.newDocLines,
|
this.newDocLines,
|
||||||
|
@ -486,7 +514,7 @@ describe('DocManager', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should get the existing doc', function () {
|
it('should get the existing doc', function () {
|
||||||
this.DocManager.promises._getDoc
|
this.DocManager._getDoc
|
||||||
.calledWith(this.project_id, this.doc_id, {
|
.calledWith(this.project_id, this.doc_id, {
|
||||||
version: true,
|
version: true,
|
||||||
rev: true,
|
rev: true,
|
||||||
|
@ -498,7 +526,7 @@ describe('DocManager', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should upsert the document to the doc collection', function () {
|
it('should upsert the document to the doc collection', function () {
|
||||||
this.MongoManager.promises.upsertIntoDocCollection
|
this.MongoManager.upsertIntoDocCollection
|
||||||
.calledWith(this.project_id, this.doc_id, this.rev, {
|
.calledWith(this.project_id, this.doc_id, this.rev, {
|
||||||
lines: this.newDocLines,
|
lines: this.newDocLines,
|
||||||
})
|
})
|
||||||
|
@ -512,9 +540,9 @@ describe('DocManager', function () {
|
||||||
|
|
||||||
describe('when the doc ranges have changed', function () {
|
describe('when the doc ranges have changed', function () {
|
||||||
beforeEach(async function () {
|
beforeEach(async function () {
|
||||||
this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc)
|
this.DocManager._getDoc = sinon.stub().resolves(this.doc)
|
||||||
this.RangeManager.shouldUpdateRanges.returns(true)
|
this.RangeManager.shouldUpdateRanges.returns(true)
|
||||||
this.result = await this.DocManager.promises.updateDoc(
|
this.result = await this.DocManager.updateDoc(
|
||||||
this.project_id,
|
this.project_id,
|
||||||
this.doc_id,
|
this.doc_id,
|
||||||
this.oldDocLines,
|
this.oldDocLines,
|
||||||
|
@ -524,7 +552,7 @@ describe('DocManager', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should upsert the ranges', function () {
|
it('should upsert the ranges', function () {
|
||||||
this.MongoManager.promises.upsertIntoDocCollection
|
this.MongoManager.upsertIntoDocCollection
|
||||||
.calledWith(this.project_id, this.doc_id, this.rev, {
|
.calledWith(this.project_id, this.doc_id, this.rev, {
|
||||||
ranges: this.newRanges,
|
ranges: this.newRanges,
|
||||||
})
|
})
|
||||||
|
@ -538,8 +566,8 @@ describe('DocManager', function () {
|
||||||
|
|
||||||
describe('when only the version has changed', function () {
|
describe('when only the version has changed', function () {
|
||||||
beforeEach(async function () {
|
beforeEach(async function () {
|
||||||
this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc)
|
this.DocManager._getDoc = sinon.stub().resolves(this.doc)
|
||||||
this.result = await this.DocManager.promises.updateDoc(
|
this.result = await this.DocManager.updateDoc(
|
||||||
this.project_id,
|
this.project_id,
|
||||||
this.doc_id,
|
this.doc_id,
|
||||||
this.oldDocLines,
|
this.oldDocLines,
|
||||||
|
@ -549,7 +577,7 @@ describe('DocManager', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should update the version', function () {
|
it('should update the version', function () {
|
||||||
this.MongoManager.promises.upsertIntoDocCollection.should.have.been.calledWith(
|
this.MongoManager.upsertIntoDocCollection.should.have.been.calledWith(
|
||||||
this.project_id,
|
this.project_id,
|
||||||
this.doc_id,
|
this.doc_id,
|
||||||
this.rev,
|
this.rev,
|
||||||
|
@ -564,8 +592,8 @@ describe('DocManager', function () {
|
||||||
|
|
||||||
describe('when the doc has not changed at all', function () {
|
describe('when the doc has not changed at all', function () {
|
||||||
beforeEach(async function () {
|
beforeEach(async function () {
|
||||||
this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc)
|
this.DocManager._getDoc = sinon.stub().resolves(this.doc)
|
||||||
this.result = await this.DocManager.promises.updateDoc(
|
this.result = await this.DocManager.updateDoc(
|
||||||
this.project_id,
|
this.project_id,
|
||||||
this.doc_id,
|
this.doc_id,
|
||||||
this.oldDocLines,
|
this.oldDocLines,
|
||||||
|
@ -575,9 +603,7 @@ describe('DocManager', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should not update the ranges or lines or version', function () {
|
it('should not update the ranges or lines or version', function () {
|
||||||
this.MongoManager.promises.upsertIntoDocCollection.called.should.equal(
|
this.MongoManager.upsertIntoDocCollection.called.should.equal(false)
|
||||||
false
|
|
||||||
)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should return the old rev and modified == false', function () {
|
it('should return the old rev and modified == false', function () {
|
||||||
|
@ -588,7 +614,7 @@ describe('DocManager', function () {
|
||||||
describe('when the version is null', function () {
|
describe('when the version is null', function () {
|
||||||
it('should return an error', async function () {
|
it('should return an error', async function () {
|
||||||
await expect(
|
await expect(
|
||||||
this.DocManager.promises.updateDoc(
|
this.DocManager.updateDoc(
|
||||||
this.project_id,
|
this.project_id,
|
||||||
this.doc_id,
|
this.doc_id,
|
||||||
this.newDocLines,
|
this.newDocLines,
|
||||||
|
@ -602,7 +628,7 @@ describe('DocManager', function () {
|
||||||
describe('when the lines are null', function () {
|
describe('when the lines are null', function () {
|
||||||
it('should return an error', async function () {
|
it('should return an error', async function () {
|
||||||
await expect(
|
await expect(
|
||||||
this.DocManager.promises.updateDoc(
|
this.DocManager.updateDoc(
|
||||||
this.project_id,
|
this.project_id,
|
||||||
this.doc_id,
|
this.doc_id,
|
||||||
null,
|
null,
|
||||||
|
@ -616,7 +642,7 @@ describe('DocManager', function () {
|
||||||
describe('when the ranges are null', function () {
|
describe('when the ranges are null', function () {
|
||||||
it('should return an error', async function () {
|
it('should return an error', async function () {
|
||||||
await expect(
|
await expect(
|
||||||
this.DocManager.promises.updateDoc(
|
this.DocManager.updateDoc(
|
||||||
this.project_id,
|
this.project_id,
|
||||||
this.doc_id,
|
this.doc_id,
|
||||||
this.newDocLines,
|
this.newDocLines,
|
||||||
|
@ -630,9 +656,9 @@ describe('DocManager', function () {
|
||||||
describe('when there is a generic error getting the doc', function () {
|
describe('when there is a generic error getting the doc', function () {
|
||||||
beforeEach(async function () {
|
beforeEach(async function () {
|
||||||
this.error = new Error('doc could not be found')
|
this.error = new Error('doc could not be found')
|
||||||
this.DocManager.promises._getDoc = sinon.stub().rejects(this.error)
|
this.DocManager._getDoc = sinon.stub().rejects(this.error)
|
||||||
await expect(
|
await expect(
|
||||||
this.DocManager.promises.updateDoc(
|
this.DocManager.updateDoc(
|
||||||
this.project_id,
|
this.project_id,
|
||||||
this.doc_id,
|
this.doc_id,
|
||||||
this.newDocLines,
|
this.newDocLines,
|
||||||
|
@ -643,16 +669,15 @@ describe('DocManager', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should not upsert the document to the doc collection', function () {
|
it('should not upsert the document to the doc collection', function () {
|
||||||
this.MongoManager.promises.upsertIntoDocCollection.should.not.have.been
|
this.MongoManager.upsertIntoDocCollection.should.not.have.been.called
|
||||||
.called
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('when the version was decremented', function () {
|
describe('when the version was decremented', function () {
|
||||||
it('should return an error', async function () {
|
it('should return an error', async function () {
|
||||||
this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc)
|
this.DocManager._getDoc = sinon.stub().resolves(this.doc)
|
||||||
await expect(
|
await expect(
|
||||||
this.DocManager.promises.updateDoc(
|
this.DocManager.updateDoc(
|
||||||
this.project_id,
|
this.project_id,
|
||||||
this.doc_id,
|
this.doc_id,
|
||||||
this.newDocLines,
|
this.newDocLines,
|
||||||
|
@ -665,8 +690,8 @@ describe('DocManager', function () {
|
||||||
|
|
||||||
describe('when the doc lines have not changed', function () {
|
describe('when the doc lines have not changed', function () {
|
||||||
beforeEach(async function () {
|
beforeEach(async function () {
|
||||||
this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc)
|
this.DocManager._getDoc = sinon.stub().resolves(this.doc)
|
||||||
this.result = await this.DocManager.promises.updateDoc(
|
this.result = await this.DocManager.updateDoc(
|
||||||
this.project_id,
|
this.project_id,
|
||||||
this.doc_id,
|
this.doc_id,
|
||||||
this.oldDocLines.slice(),
|
this.oldDocLines.slice(),
|
||||||
|
@ -676,9 +701,7 @@ describe('DocManager', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should not update the doc', function () {
|
it('should not update the doc', function () {
|
||||||
this.MongoManager.promises.upsertIntoDocCollection.called.should.equal(
|
this.MongoManager.upsertIntoDocCollection.called.should.equal(false)
|
||||||
false
|
|
||||||
)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should return the existing rev', function () {
|
it('should return the existing rev', function () {
|
||||||
|
@ -688,8 +711,8 @@ describe('DocManager', function () {
|
||||||
|
|
||||||
describe('when the doc does not exist', function () {
|
describe('when the doc does not exist', function () {
|
||||||
beforeEach(async function () {
|
beforeEach(async function () {
|
||||||
this.DocManager.promises._getDoc = sinon.stub().resolves(null)
|
this.DocManager._getDoc = sinon.stub().resolves(null)
|
||||||
this.result = await this.DocManager.promises.updateDoc(
|
this.result = await this.DocManager.updateDoc(
|
||||||
this.project_id,
|
this.project_id,
|
||||||
this.doc_id,
|
this.doc_id,
|
||||||
this.newDocLines,
|
this.newDocLines,
|
||||||
|
@ -699,7 +722,7 @@ describe('DocManager', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should upsert the document to the doc collection', function () {
|
it('should upsert the document to the doc collection', function () {
|
||||||
this.MongoManager.promises.upsertIntoDocCollection.should.have.been.calledWith(
|
this.MongoManager.upsertIntoDocCollection.should.have.been.calledWith(
|
||||||
this.project_id,
|
this.project_id,
|
||||||
this.doc_id,
|
this.doc_id,
|
||||||
undefined,
|
undefined,
|
||||||
|
@ -718,12 +741,12 @@ describe('DocManager', function () {
|
||||||
|
|
||||||
describe('when another update is racing', function () {
|
describe('when another update is racing', function () {
|
||||||
beforeEach(async function () {
|
beforeEach(async function () {
|
||||||
this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc)
|
this.DocManager._getDoc = sinon.stub().resolves(this.doc)
|
||||||
this.MongoManager.promises.upsertIntoDocCollection
|
this.MongoManager.upsertIntoDocCollection
|
||||||
.onFirstCall()
|
.onFirstCall()
|
||||||
.rejects(new Errors.DocRevValueError())
|
.rejects(new Errors.DocRevValueError())
|
||||||
this.RangeManager.shouldUpdateRanges.returns(true)
|
this.RangeManager.shouldUpdateRanges.returns(true)
|
||||||
this.result = await this.DocManager.promises.updateDoc(
|
this.result = await this.DocManager.updateDoc(
|
||||||
this.project_id,
|
this.project_id,
|
||||||
this.doc_id,
|
this.doc_id,
|
||||||
this.newDocLines,
|
this.newDocLines,
|
||||||
|
@ -733,7 +756,7 @@ describe('DocManager', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should upsert the doc twice', function () {
|
it('should upsert the doc twice', function () {
|
||||||
this.MongoManager.promises.upsertIntoDocCollection.should.have.been.calledWith(
|
this.MongoManager.upsertIntoDocCollection.should.have.been.calledWith(
|
||||||
this.project_id,
|
this.project_id,
|
||||||
this.doc_id,
|
this.doc_id,
|
||||||
this.rev,
|
this.rev,
|
||||||
|
@ -743,8 +766,7 @@ describe('DocManager', function () {
|
||||||
version: this.version + 1,
|
version: this.version + 1,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
this.MongoManager.promises.upsertIntoDocCollection.should.have.been
|
this.MongoManager.upsertIntoDocCollection.should.have.been.calledTwice
|
||||||
.calledTwice
|
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should return the new rev', function () {
|
it('should return the new rev', function () {
|
||||||
|
|
|
@ -14,7 +14,7 @@ describe('HttpController', function () {
|
||||||
max_doc_length: 2 * 1024 * 1024,
|
max_doc_length: 2 * 1024 * 1024,
|
||||||
}
|
}
|
||||||
this.DocArchiveManager = {
|
this.DocArchiveManager = {
|
||||||
unArchiveAllDocs: sinon.stub().yields(),
|
unArchiveAllDocs: sinon.stub().returns(),
|
||||||
}
|
}
|
||||||
this.DocManager = {}
|
this.DocManager = {}
|
||||||
this.HttpController = SandboxedModule.require(modulePath, {
|
this.HttpController = SandboxedModule.require(modulePath, {
|
||||||
|
@ -54,15 +54,13 @@ describe('HttpController', function () {
|
||||||
|
|
||||||
describe('getDoc', function () {
|
describe('getDoc', function () {
|
||||||
describe('without deleted docs', function () {
|
describe('without deleted docs', function () {
|
||||||
beforeEach(function () {
|
beforeEach(async function () {
|
||||||
this.req.params = {
|
this.req.params = {
|
||||||
project_id: this.projectId,
|
project_id: this.projectId,
|
||||||
doc_id: this.docId,
|
doc_id: this.docId,
|
||||||
}
|
}
|
||||||
this.DocManager.getFullDoc = sinon
|
this.DocManager.getFullDoc = sinon.stub().resolves(this.doc)
|
||||||
.stub()
|
await this.HttpController.getDoc(this.req, this.res, this.next)
|
||||||
.callsArgWith(2, null, this.doc)
|
|
||||||
this.HttpController.getDoc(this.req, this.res, this.next)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should get the document with the version (including deleted)', function () {
|
it('should get the document with the version (including deleted)', function () {
|
||||||
|
@ -89,26 +87,24 @@ describe('HttpController', function () {
|
||||||
project_id: this.projectId,
|
project_id: this.projectId,
|
||||||
doc_id: this.docId,
|
doc_id: this.docId,
|
||||||
}
|
}
|
||||||
this.DocManager.getFullDoc = sinon
|
this.DocManager.getFullDoc = sinon.stub().resolves(this.deletedDoc)
|
||||||
.stub()
|
|
||||||
.callsArgWith(2, null, this.deletedDoc)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should get the doc from the doc manager', function () {
|
it('should get the doc from the doc manager', async function () {
|
||||||
this.HttpController.getDoc(this.req, this.res, this.next)
|
await this.HttpController.getDoc(this.req, this.res, this.next)
|
||||||
this.DocManager.getFullDoc
|
this.DocManager.getFullDoc
|
||||||
.calledWith(this.projectId, this.docId)
|
.calledWith(this.projectId, this.docId)
|
||||||
.should.equal(true)
|
.should.equal(true)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should return 404 if the query string delete is not set ', function () {
|
it('should return 404 if the query string delete is not set ', async function () {
|
||||||
this.HttpController.getDoc(this.req, this.res, this.next)
|
await this.HttpController.getDoc(this.req, this.res, this.next)
|
||||||
this.res.sendStatus.calledWith(404).should.equal(true)
|
this.res.sendStatus.calledWith(404).should.equal(true)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should return the doc as JSON if include_deleted is set to true', function () {
|
it('should return the doc as JSON if include_deleted is set to true', async function () {
|
||||||
this.req.query.include_deleted = 'true'
|
this.req.query.include_deleted = 'true'
|
||||||
this.HttpController.getDoc(this.req, this.res, this.next)
|
await this.HttpController.getDoc(this.req, this.res, this.next)
|
||||||
this.res.json
|
this.res.json
|
||||||
.calledWith({
|
.calledWith({
|
||||||
_id: this.docId,
|
_id: this.docId,
|
||||||
|
@ -123,13 +119,15 @@ describe('HttpController', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('getRawDoc', function () {
|
describe('getRawDoc', function () {
|
||||||
beforeEach(function () {
|
beforeEach(async function () {
|
||||||
this.req.params = {
|
this.req.params = {
|
||||||
project_id: this.projectId,
|
project_id: this.projectId,
|
||||||
doc_id: this.docId,
|
doc_id: this.docId,
|
||||||
}
|
}
|
||||||
this.DocManager.getDocLines = sinon.stub().callsArgWith(2, null, this.doc)
|
this.DocManager.getDocLines = sinon
|
||||||
this.HttpController.getRawDoc(this.req, this.res, this.next)
|
.stub()
|
||||||
|
.resolves(this.doc.lines.join('\n'))
|
||||||
|
await this.HttpController.getRawDoc(this.req, this.res, this.next)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should get the document without the version', function () {
|
it('should get the document without the version', function () {
|
||||||
|
@ -154,7 +152,7 @@ describe('HttpController', function () {
|
||||||
|
|
||||||
describe('getAllDocs', function () {
|
describe('getAllDocs', function () {
|
||||||
describe('normally', function () {
|
describe('normally', function () {
|
||||||
beforeEach(function () {
|
beforeEach(async function () {
|
||||||
this.req.params = { project_id: this.projectId }
|
this.req.params = { project_id: this.projectId }
|
||||||
this.docs = [
|
this.docs = [
|
||||||
{
|
{
|
||||||
|
@ -168,10 +166,8 @@ describe('HttpController', function () {
|
||||||
rev: 4,
|
rev: 4,
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
this.DocManager.getAllNonDeletedDocs = sinon
|
this.DocManager.getAllNonDeletedDocs = sinon.stub().resolves(this.docs)
|
||||||
.stub()
|
await this.HttpController.getAllDocs(this.req, this.res, this.next)
|
||||||
.callsArgWith(2, null, this.docs)
|
|
||||||
this.HttpController.getAllDocs(this.req, this.res, this.next)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should get all the (non-deleted) docs', function () {
|
it('should get all the (non-deleted) docs', function () {
|
||||||
|
@ -199,7 +195,7 @@ describe('HttpController', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('with null lines', function () {
|
describe('with null lines', function () {
|
||||||
beforeEach(function () {
|
beforeEach(async function () {
|
||||||
this.req.params = { project_id: this.projectId }
|
this.req.params = { project_id: this.projectId }
|
||||||
this.docs = [
|
this.docs = [
|
||||||
{
|
{
|
||||||
|
@ -213,10 +209,8 @@ describe('HttpController', function () {
|
||||||
rev: 4,
|
rev: 4,
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
this.DocManager.getAllNonDeletedDocs = sinon
|
this.DocManager.getAllNonDeletedDocs = sinon.stub().resolves(this.docs)
|
||||||
.stub()
|
await this.HttpController.getAllDocs(this.req, this.res, this.next)
|
||||||
.callsArgWith(2, null, this.docs)
|
|
||||||
this.HttpController.getAllDocs(this.req, this.res, this.next)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should return the doc with fallback lines', function () {
|
it('should return the doc with fallback lines', function () {
|
||||||
|
@ -238,7 +232,7 @@ describe('HttpController', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('with a null doc', function () {
|
describe('with a null doc', function () {
|
||||||
beforeEach(function () {
|
beforeEach(async function () {
|
||||||
this.req.params = { project_id: this.projectId }
|
this.req.params = { project_id: this.projectId }
|
||||||
this.docs = [
|
this.docs = [
|
||||||
{
|
{
|
||||||
|
@ -253,10 +247,8 @@ describe('HttpController', function () {
|
||||||
rev: 4,
|
rev: 4,
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
this.DocManager.getAllNonDeletedDocs = sinon
|
this.DocManager.getAllNonDeletedDocs = sinon.stub().resolves(this.docs)
|
||||||
.stub()
|
await this.HttpController.getAllDocs(this.req, this.res, this.next)
|
||||||
.callsArgWith(2, null, this.docs)
|
|
||||||
this.HttpController.getAllDocs(this.req, this.res, this.next)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should return the non null docs as JSON', function () {
|
it('should return the non null docs as JSON', function () {
|
||||||
|
@ -292,7 +284,7 @@ describe('HttpController', function () {
|
||||||
|
|
||||||
describe('getAllRanges', function () {
|
describe('getAllRanges', function () {
|
||||||
describe('normally', function () {
|
describe('normally', function () {
|
||||||
beforeEach(function () {
|
beforeEach(async function () {
|
||||||
this.req.params = { project_id: this.projectId }
|
this.req.params = { project_id: this.projectId }
|
||||||
this.docs = [
|
this.docs = [
|
||||||
{
|
{
|
||||||
|
@ -304,10 +296,8 @@ describe('HttpController', function () {
|
||||||
ranges: { mock_ranges: 'two' },
|
ranges: { mock_ranges: 'two' },
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
this.DocManager.getAllNonDeletedDocs = sinon
|
this.DocManager.getAllNonDeletedDocs = sinon.stub().resolves(this.docs)
|
||||||
.stub()
|
await this.HttpController.getAllRanges(this.req, this.res, this.next)
|
||||||
.callsArgWith(2, null, this.docs)
|
|
||||||
this.HttpController.getAllRanges(this.req, this.res, this.next)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should get all the (non-deleted) doc ranges', function () {
|
it('should get all the (non-deleted) doc ranges', function () {
|
||||||
|
@ -342,16 +332,17 @@ describe('HttpController', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('when the doc lines exist and were updated', function () {
|
describe('when the doc lines exist and were updated', function () {
|
||||||
beforeEach(function () {
|
beforeEach(async function () {
|
||||||
this.req.body = {
|
this.req.body = {
|
||||||
lines: (this.lines = ['hello', 'world']),
|
lines: (this.lines = ['hello', 'world']),
|
||||||
version: (this.version = 42),
|
version: (this.version = 42),
|
||||||
ranges: (this.ranges = { changes: 'mock' }),
|
ranges: (this.ranges = { changes: 'mock' }),
|
||||||
}
|
}
|
||||||
|
this.rev = 5
|
||||||
this.DocManager.updateDoc = sinon
|
this.DocManager.updateDoc = sinon
|
||||||
.stub()
|
.stub()
|
||||||
.yields(null, true, (this.rev = 5))
|
.resolves({ modified: true, rev: this.rev })
|
||||||
this.HttpController.updateDoc(this.req, this.res, this.next)
|
await this.HttpController.updateDoc(this.req, this.res, this.next)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should update the document', function () {
|
it('should update the document', function () {
|
||||||
|
@ -374,16 +365,17 @@ describe('HttpController', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('when the doc lines exist and were not updated', function () {
|
describe('when the doc lines exist and were not updated', function () {
|
||||||
beforeEach(function () {
|
beforeEach(async function () {
|
||||||
this.req.body = {
|
this.req.body = {
|
||||||
lines: (this.lines = ['hello', 'world']),
|
lines: (this.lines = ['hello', 'world']),
|
||||||
version: (this.version = 42),
|
version: (this.version = 42),
|
||||||
ranges: {},
|
ranges: {},
|
||||||
}
|
}
|
||||||
|
this.rev = 5
|
||||||
this.DocManager.updateDoc = sinon
|
this.DocManager.updateDoc = sinon
|
||||||
.stub()
|
.stub()
|
||||||
.yields(null, false, (this.rev = 5))
|
.resolves({ modified: false, rev: this.rev })
|
||||||
this.HttpController.updateDoc(this.req, this.res, this.next)
|
await this.HttpController.updateDoc(this.req, this.res, this.next)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should return a modified status', function () {
|
it('should return a modified status', function () {
|
||||||
|
@ -394,10 +386,12 @@ describe('HttpController', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('when the doc lines are not provided', function () {
|
describe('when the doc lines are not provided', function () {
|
||||||
beforeEach(function () {
|
beforeEach(async function () {
|
||||||
this.req.body = { version: 42, ranges: {} }
|
this.req.body = { version: 42, ranges: {} }
|
||||||
this.DocManager.updateDoc = sinon.stub().yields(null, false)
|
this.DocManager.updateDoc = sinon
|
||||||
this.HttpController.updateDoc(this.req, this.res, this.next)
|
.stub()
|
||||||
|
.resolves({ modified: false, rev: 0 })
|
||||||
|
await this.HttpController.updateDoc(this.req, this.res, this.next)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should not update the document', function () {
|
it('should not update the document', function () {
|
||||||
|
@ -410,10 +404,12 @@ describe('HttpController', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('when the doc version are not provided', function () {
|
describe('when the doc version are not provided', function () {
|
||||||
beforeEach(function () {
|
beforeEach(async function () {
|
||||||
this.req.body = { version: 42, lines: ['hello world'] }
|
this.req.body = { version: 42, lines: ['hello world'] }
|
||||||
this.DocManager.updateDoc = sinon.stub().yields(null, false)
|
this.DocManager.updateDoc = sinon
|
||||||
this.HttpController.updateDoc(this.req, this.res, this.next)
|
.stub()
|
||||||
|
.resolves({ modified: false, rev: 0 })
|
||||||
|
await this.HttpController.updateDoc(this.req, this.res, this.next)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should not update the document', function () {
|
it('should not update the document', function () {
|
||||||
|
@ -426,10 +422,12 @@ describe('HttpController', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('when the doc ranges is not provided', function () {
|
describe('when the doc ranges is not provided', function () {
|
||||||
beforeEach(function () {
|
beforeEach(async function () {
|
||||||
this.req.body = { lines: ['foo'], version: 42 }
|
this.req.body = { lines: ['foo'], version: 42 }
|
||||||
this.DocManager.updateDoc = sinon.stub().yields(null, false)
|
this.DocManager.updateDoc = sinon
|
||||||
this.HttpController.updateDoc(this.req, this.res, this.next)
|
.stub()
|
||||||
|
.resolves({ modified: false, rev: 0 })
|
||||||
|
await this.HttpController.updateDoc(this.req, this.res, this.next)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should not update the document', function () {
|
it('should not update the document', function () {
|
||||||
|
@ -442,13 +440,20 @@ describe('HttpController', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('when the doc body is too large', function () {
|
describe('when the doc body is too large', function () {
|
||||||
beforeEach(function () {
|
beforeEach(async function () {
|
||||||
this.req.body = {
|
this.req.body = {
|
||||||
lines: (this.lines = Array(2049).fill('a'.repeat(1024))),
|
lines: (this.lines = Array(2049).fill('a'.repeat(1024))),
|
||||||
version: (this.version = 42),
|
version: (this.version = 42),
|
||||||
ranges: (this.ranges = { changes: 'mock' }),
|
ranges: (this.ranges = { changes: 'mock' }),
|
||||||
}
|
}
|
||||||
this.HttpController.updateDoc(this.req, this.res, this.next)
|
this.DocManager.updateDoc = sinon
|
||||||
|
.stub()
|
||||||
|
.resolves({ modified: false, rev: 0 })
|
||||||
|
await this.HttpController.updateDoc(this.req, this.res, this.next)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should not update the document', function () {
|
||||||
|
this.DocManager.updateDoc.called.should.equal(false)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should return a 413 (too large) response', function () {
|
it('should return a 413 (too large) response', function () {
|
||||||
|
@ -462,14 +467,14 @@ describe('HttpController', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('patchDoc', function () {
|
describe('patchDoc', function () {
|
||||||
beforeEach(function () {
|
beforeEach(async function () {
|
||||||
this.req.params = {
|
this.req.params = {
|
||||||
project_id: this.projectId,
|
project_id: this.projectId,
|
||||||
doc_id: this.docId,
|
doc_id: this.docId,
|
||||||
}
|
}
|
||||||
this.req.body = { name: 'foo.tex' }
|
this.req.body = { name: 'foo.tex' }
|
||||||
this.DocManager.patchDoc = sinon.stub().yields(null)
|
this.DocManager.patchDoc = sinon.stub().resolves()
|
||||||
this.HttpController.patchDoc(this.req, this.res, this.next)
|
await this.HttpController.patchDoc(this.req, this.res, this.next)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should delete the document', function () {
|
it('should delete the document', function () {
|
||||||
|
@ -484,11 +489,11 @@ describe('HttpController', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('with an invalid payload', function () {
|
describe('with an invalid payload', function () {
|
||||||
beforeEach(function () {
|
beforeEach(async function () {
|
||||||
this.req.body = { cannot: 'happen' }
|
this.req.body = { cannot: 'happen' }
|
||||||
|
|
||||||
this.DocManager.patchDoc = sinon.stub().yields(null)
|
this.DocManager.patchDoc = sinon.stub().resolves()
|
||||||
this.HttpController.patchDoc(this.req, this.res, this.next)
|
await this.HttpController.patchDoc(this.req, this.res, this.next)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should log a message', function () {
|
it('should log a message', function () {
|
||||||
|
@ -509,10 +514,10 @@ describe('HttpController', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('archiveAllDocs', function () {
|
describe('archiveAllDocs', function () {
|
||||||
beforeEach(function () {
|
beforeEach(async function () {
|
||||||
this.req.params = { project_id: this.projectId }
|
this.req.params = { project_id: this.projectId }
|
||||||
this.DocArchiveManager.archiveAllDocs = sinon.stub().callsArg(1)
|
this.DocArchiveManager.archiveAllDocs = sinon.stub().resolves()
|
||||||
this.HttpController.archiveAllDocs(this.req, this.res, this.next)
|
await this.HttpController.archiveAllDocs(this.req, this.res, this.next)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should archive the project', function () {
|
it('should archive the project', function () {
|
||||||
|
@ -532,9 +537,12 @@ describe('HttpController', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('on success', function () {
|
describe('on success', function () {
|
||||||
beforeEach(function (done) {
|
beforeEach(async function () {
|
||||||
this.res.sendStatus.callsFake(() => done())
|
await this.HttpController.unArchiveAllDocs(
|
||||||
this.HttpController.unArchiveAllDocs(this.req, this.res, this.next)
|
this.req,
|
||||||
|
this.res,
|
||||||
|
this.next
|
||||||
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('returns a 200', function () {
|
it('returns a 200', function () {
|
||||||
|
@ -543,12 +551,15 @@ describe('HttpController', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("when the archived rev doesn't match", function () {
|
describe("when the archived rev doesn't match", function () {
|
||||||
beforeEach(function (done) {
|
beforeEach(async function () {
|
||||||
this.res.sendStatus.callsFake(() => done())
|
this.DocArchiveManager.unArchiveAllDocs.rejects(
|
||||||
this.DocArchiveManager.unArchiveAllDocs.yields(
|
|
||||||
new Errors.DocRevValueError('bad rev')
|
new Errors.DocRevValueError('bad rev')
|
||||||
)
|
)
|
||||||
this.HttpController.unArchiveAllDocs(this.req, this.res, this.next)
|
await this.HttpController.unArchiveAllDocs(
|
||||||
|
this.req,
|
||||||
|
this.res,
|
||||||
|
this.next
|
||||||
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('returns a 409', function () {
|
it('returns a 409', function () {
|
||||||
|
@ -558,10 +569,10 @@ describe('HttpController', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('destroyProject', function () {
|
describe('destroyProject', function () {
|
||||||
beforeEach(function () {
|
beforeEach(async function () {
|
||||||
this.req.params = { project_id: this.projectId }
|
this.req.params = { project_id: this.projectId }
|
||||||
this.DocArchiveManager.destroyProject = sinon.stub().callsArg(1)
|
this.DocArchiveManager.destroyProject = sinon.stub().resolves()
|
||||||
this.HttpController.destroyProject(this.req, this.res, this.next)
|
await this.HttpController.destroyProject(this.req, this.res, this.next)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should destroy the docs', function () {
|
it('should destroy the docs', function () {
|
||||||
|
|
|
@ -41,7 +41,7 @@ describe('MongoManager', function () {
|
||||||
this.doc = { name: 'mock-doc' }
|
this.doc = { name: 'mock-doc' }
|
||||||
this.db.docs.findOne = sinon.stub().resolves(this.doc)
|
this.db.docs.findOne = sinon.stub().resolves(this.doc)
|
||||||
this.filter = { lines: true }
|
this.filter = { lines: true }
|
||||||
this.result = await this.MongoManager.promises.findDoc(
|
this.result = await this.MongoManager.findDoc(
|
||||||
this.projectId,
|
this.projectId,
|
||||||
this.docId,
|
this.docId,
|
||||||
this.filter
|
this.filter
|
||||||
|
@ -70,11 +70,7 @@ describe('MongoManager', function () {
|
||||||
describe('patchDoc', function () {
|
describe('patchDoc', function () {
|
||||||
beforeEach(async function () {
|
beforeEach(async function () {
|
||||||
this.meta = { name: 'foo.tex' }
|
this.meta = { name: 'foo.tex' }
|
||||||
await this.MongoManager.promises.patchDoc(
|
await this.MongoManager.patchDoc(this.projectId, this.docId, this.meta)
|
||||||
this.projectId,
|
|
||||||
this.docId,
|
|
||||||
this.meta
|
|
||||||
)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should pass the parameter along', function () {
|
it('should pass the parameter along', function () {
|
||||||
|
@ -104,7 +100,7 @@ describe('MongoManager', function () {
|
||||||
|
|
||||||
describe('with included_deleted = false', function () {
|
describe('with included_deleted = false', function () {
|
||||||
beforeEach(async function () {
|
beforeEach(async function () {
|
||||||
this.result = await this.MongoManager.promises.getProjectsDocs(
|
this.result = await this.MongoManager.getProjectsDocs(
|
||||||
this.projectId,
|
this.projectId,
|
||||||
{ include_deleted: false },
|
{ include_deleted: false },
|
||||||
this.filter
|
this.filter
|
||||||
|
@ -132,7 +128,7 @@ describe('MongoManager', function () {
|
||||||
|
|
||||||
describe('with included_deleted = true', function () {
|
describe('with included_deleted = true', function () {
|
||||||
beforeEach(async function () {
|
beforeEach(async function () {
|
||||||
this.result = await this.MongoManager.promises.getProjectsDocs(
|
this.result = await this.MongoManager.getProjectsDocs(
|
||||||
this.projectId,
|
this.projectId,
|
||||||
{ include_deleted: true },
|
{ include_deleted: true },
|
||||||
this.filter
|
this.filter
|
||||||
|
@ -167,7 +163,7 @@ describe('MongoManager', function () {
|
||||||
this.db.docs.find = sinon.stub().returns({
|
this.db.docs.find = sinon.stub().returns({
|
||||||
toArray: sinon.stub().resolves([this.doc1, this.doc2, this.doc3]),
|
toArray: sinon.stub().resolves([this.doc1, this.doc2, this.doc3]),
|
||||||
})
|
})
|
||||||
this.result = await this.MongoManager.promises.getProjectsDeletedDocs(
|
this.result = await this.MongoManager.getProjectsDeletedDocs(
|
||||||
this.projectId,
|
this.projectId,
|
||||||
this.filter
|
this.filter
|
||||||
)
|
)
|
||||||
|
@ -203,7 +199,7 @@ describe('MongoManager', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should upsert the document', async function () {
|
it('should upsert the document', async function () {
|
||||||
await this.MongoManager.promises.upsertIntoDocCollection(
|
await this.MongoManager.upsertIntoDocCollection(
|
||||||
this.projectId,
|
this.projectId,
|
||||||
this.docId,
|
this.docId,
|
||||||
this.oldRev,
|
this.oldRev,
|
||||||
|
@ -223,7 +219,7 @@ describe('MongoManager', function () {
|
||||||
it('should handle update error', async function () {
|
it('should handle update error', async function () {
|
||||||
this.db.docs.updateOne.rejects(this.stubbedErr)
|
this.db.docs.updateOne.rejects(this.stubbedErr)
|
||||||
await expect(
|
await expect(
|
||||||
this.MongoManager.promises.upsertIntoDocCollection(
|
this.MongoManager.upsertIntoDocCollection(
|
||||||
this.projectId,
|
this.projectId,
|
||||||
this.docId,
|
this.docId,
|
||||||
this.rev,
|
this.rev,
|
||||||
|
@ -235,7 +231,7 @@ describe('MongoManager', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should insert without a previous rev', async function () {
|
it('should insert without a previous rev', async function () {
|
||||||
await this.MongoManager.promises.upsertIntoDocCollection(
|
await this.MongoManager.upsertIntoDocCollection(
|
||||||
this.projectId,
|
this.projectId,
|
||||||
this.docId,
|
this.docId,
|
||||||
null,
|
null,
|
||||||
|
@ -254,7 +250,7 @@ describe('MongoManager', function () {
|
||||||
it('should handle generic insert error', async function () {
|
it('should handle generic insert error', async function () {
|
||||||
this.db.docs.insertOne.rejects(this.stubbedErr)
|
this.db.docs.insertOne.rejects(this.stubbedErr)
|
||||||
await expect(
|
await expect(
|
||||||
this.MongoManager.promises.upsertIntoDocCollection(
|
this.MongoManager.upsertIntoDocCollection(
|
||||||
this.projectId,
|
this.projectId,
|
||||||
this.docId,
|
this.docId,
|
||||||
null,
|
null,
|
||||||
|
@ -266,7 +262,7 @@ describe('MongoManager', function () {
|
||||||
it('should handle duplicate insert error', async function () {
|
it('should handle duplicate insert error', async function () {
|
||||||
this.db.docs.insertOne.rejects({ code: 11000 })
|
this.db.docs.insertOne.rejects({ code: 11000 })
|
||||||
await expect(
|
await expect(
|
||||||
this.MongoManager.promises.upsertIntoDocCollection(
|
this.MongoManager.upsertIntoDocCollection(
|
||||||
this.projectId,
|
this.projectId,
|
||||||
this.docId,
|
this.docId,
|
||||||
null,
|
null,
|
||||||
|
@ -280,7 +276,7 @@ describe('MongoManager', function () {
|
||||||
beforeEach(async function () {
|
beforeEach(async function () {
|
||||||
this.projectId = new ObjectId()
|
this.projectId = new ObjectId()
|
||||||
this.db.docs.deleteMany = sinon.stub().resolves()
|
this.db.docs.deleteMany = sinon.stub().resolves()
|
||||||
await this.MongoManager.promises.destroyProject(this.projectId)
|
await this.MongoManager.destroyProject(this.projectId)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should destroy all docs', function () {
|
it('should destroy all docs', function () {
|
||||||
|
@ -297,13 +293,13 @@ describe('MongoManager', function () {
|
||||||
|
|
||||||
it('should not error when the rev has not changed', async function () {
|
it('should not error when the rev has not changed', async function () {
|
||||||
this.db.docs.findOne = sinon.stub().resolves({ rev: 1 })
|
this.db.docs.findOne = sinon.stub().resolves({ rev: 1 })
|
||||||
await this.MongoManager.promises.checkRevUnchanged(this.doc)
|
await this.MongoManager.checkRevUnchanged(this.doc)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should return an error when the rev has changed', async function () {
|
it('should return an error when the rev has changed', async function () {
|
||||||
this.db.docs.findOne = sinon.stub().resolves({ rev: 2 })
|
this.db.docs.findOne = sinon.stub().resolves({ rev: 2 })
|
||||||
await expect(
|
await expect(
|
||||||
this.MongoManager.promises.checkRevUnchanged(this.doc)
|
this.MongoManager.checkRevUnchanged(this.doc)
|
||||||
).to.be.rejectedWith(Errors.DocModifiedError)
|
).to.be.rejectedWith(Errors.DocModifiedError)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -311,14 +307,14 @@ describe('MongoManager', function () {
|
||||||
this.db.docs.findOne = sinon.stub().resolves({ rev: 2 })
|
this.db.docs.findOne = sinon.stub().resolves({ rev: 2 })
|
||||||
this.doc = { _id: new ObjectId(), name: 'mock-doc', rev: NaN }
|
this.doc = { _id: new ObjectId(), name: 'mock-doc', rev: NaN }
|
||||||
await expect(
|
await expect(
|
||||||
this.MongoManager.promises.checkRevUnchanged(this.doc)
|
this.MongoManager.checkRevUnchanged(this.doc)
|
||||||
).to.be.rejectedWith(Errors.DocRevValueError)
|
).to.be.rejectedWith(Errors.DocRevValueError)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should return a value error if checked doc rev is NaN', async function () {
|
it('should return a value error if checked doc rev is NaN', async function () {
|
||||||
this.db.docs.findOne = sinon.stub().resolves({ rev: NaN })
|
this.db.docs.findOne = sinon.stub().resolves({ rev: NaN })
|
||||||
await expect(
|
await expect(
|
||||||
this.MongoManager.promises.checkRevUnchanged(this.doc)
|
this.MongoManager.checkRevUnchanged(this.doc)
|
||||||
).to.be.rejectedWith(Errors.DocRevValueError)
|
).to.be.rejectedWith(Errors.DocRevValueError)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
@ -334,7 +330,7 @@ describe('MongoManager', function () {
|
||||||
|
|
||||||
describe('complete doc', function () {
|
describe('complete doc', function () {
|
||||||
beforeEach(async function () {
|
beforeEach(async function () {
|
||||||
await this.MongoManager.promises.restoreArchivedDoc(
|
await this.MongoManager.restoreArchivedDoc(
|
||||||
this.projectId,
|
this.projectId,
|
||||||
this.docId,
|
this.docId,
|
||||||
this.archivedDoc
|
this.archivedDoc
|
||||||
|
@ -364,7 +360,7 @@ describe('MongoManager', function () {
|
||||||
describe('without ranges', function () {
|
describe('without ranges', function () {
|
||||||
beforeEach(async function () {
|
beforeEach(async function () {
|
||||||
delete this.archivedDoc.ranges
|
delete this.archivedDoc.ranges
|
||||||
await this.MongoManager.promises.restoreArchivedDoc(
|
await this.MongoManager.restoreArchivedDoc(
|
||||||
this.projectId,
|
this.projectId,
|
||||||
this.docId,
|
this.docId,
|
||||||
this.archivedDoc
|
this.archivedDoc
|
||||||
|
@ -395,7 +391,7 @@ describe('MongoManager', function () {
|
||||||
it('throws a DocRevValueError', async function () {
|
it('throws a DocRevValueError', async function () {
|
||||||
this.db.docs.updateOne.resolves({ matchedCount: 0 })
|
this.db.docs.updateOne.resolves({ matchedCount: 0 })
|
||||||
await expect(
|
await expect(
|
||||||
this.MongoManager.promises.restoreArchivedDoc(
|
this.MongoManager.restoreArchivedDoc(
|
||||||
this.projectId,
|
this.projectId,
|
||||||
this.docId,
|
this.docId,
|
||||||
this.archivedDoc
|
this.archivedDoc
|
||||||
|
|
|
@ -30,7 +30,7 @@ describe('RangeManager', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('jsonRangesToMongo', function () {
|
describe('jsonRangesToMongo', function () {
|
||||||
it('should convert ObjectIds and dates to proper objects', function () {
|
it('should convert ObjectIds and dates to proper objects and fix comment id', function () {
|
||||||
const changeId = new ObjectId().toString()
|
const changeId = new ObjectId().toString()
|
||||||
const commentId = new ObjectId().toString()
|
const commentId = new ObjectId().toString()
|
||||||
const userId = new ObjectId().toString()
|
const userId = new ObjectId().toString()
|
||||||
|
@ -66,7 +66,7 @@ describe('RangeManager', function () {
|
||||||
],
|
],
|
||||||
comments: [
|
comments: [
|
||||||
{
|
{
|
||||||
id: new ObjectId(commentId),
|
id: new ObjectId(threadId),
|
||||||
op: { c: 'foo', p: 3, t: new ObjectId(threadId) },
|
op: { c: 'foo', p: 3, t: new ObjectId(threadId) },
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
|
@ -110,7 +110,6 @@ describe('RangeManager', function () {
|
||||||
|
|
||||||
return it('should be consistent when transformed through json -> mongo -> json', function () {
|
return it('should be consistent when transformed through json -> mongo -> json', function () {
|
||||||
const changeId = new ObjectId().toString()
|
const changeId = new ObjectId().toString()
|
||||||
const commentId = new ObjectId().toString()
|
|
||||||
const userId = new ObjectId().toString()
|
const userId = new ObjectId().toString()
|
||||||
const threadId = new ObjectId().toString()
|
const threadId = new ObjectId().toString()
|
||||||
const ts = new Date().toJSON()
|
const ts = new Date().toJSON()
|
||||||
|
@ -127,7 +126,7 @@ describe('RangeManager', function () {
|
||||||
],
|
],
|
||||||
comments: [
|
comments: [
|
||||||
{
|
{
|
||||||
id: commentId,
|
id: threadId,
|
||||||
op: { c: 'foo', p: 3, t: threadId },
|
op: { c: 'foo', p: 3, t: threadId },
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
|
@ -142,6 +141,7 @@ describe('RangeManager', function () {
|
||||||
|
|
||||||
return describe('shouldUpdateRanges', function () {
|
return describe('shouldUpdateRanges', function () {
|
||||||
beforeEach(function () {
|
beforeEach(function () {
|
||||||
|
const threadId = new ObjectId()
|
||||||
this.ranges = {
|
this.ranges = {
|
||||||
changes: [
|
changes: [
|
||||||
{
|
{
|
||||||
|
@ -155,8 +155,8 @@ describe('RangeManager', function () {
|
||||||
],
|
],
|
||||||
comments: [
|
comments: [
|
||||||
{
|
{
|
||||||
id: new ObjectId(),
|
id: threadId,
|
||||||
op: { c: 'foo', p: 3, t: new ObjectId() },
|
op: { c: 'foo', p: 3, t: threadId },
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
const OError = require('@overleaf/o-error')
|
||||||
const DMP = require('diff-match-patch')
|
const DMP = require('diff-match-patch')
|
||||||
const { TextOperation } = require('overleaf-editor-core')
|
const { TextOperation } = require('overleaf-editor-core')
|
||||||
const dmp = new DMP()
|
const dmp = new DMP()
|
||||||
|
@ -38,23 +39,62 @@ module.exports = {
|
||||||
return ops
|
return ops
|
||||||
},
|
},
|
||||||
|
|
||||||
diffAsHistoryV1EditOperation(before, after) {
|
/**
|
||||||
const diffs = dmp.diff_main(before, after)
|
* @param {import("overleaf-editor-core").StringFileData} file
|
||||||
|
* @param {string} after
|
||||||
|
* @return {TextOperation}
|
||||||
|
*/
|
||||||
|
diffAsHistoryOTEditOperation(file, after) {
|
||||||
|
const beforeWithoutTrackedDeletes = file.getContent({
|
||||||
|
filterTrackedDeletes: true,
|
||||||
|
})
|
||||||
|
const diffs = dmp.diff_main(beforeWithoutTrackedDeletes, after)
|
||||||
dmp.diff_cleanupSemantic(diffs)
|
dmp.diff_cleanupSemantic(diffs)
|
||||||
|
|
||||||
|
const trackedChanges = file.trackedChanges.asSorted()
|
||||||
|
let nextTc = trackedChanges.shift()
|
||||||
|
|
||||||
const op = new TextOperation()
|
const op = new TextOperation()
|
||||||
for (const diff of diffs) {
|
for (const diff of diffs) {
|
||||||
const [type, content] = diff
|
let [type, content] = diff
|
||||||
if (type === this.ADDED) {
|
if (type === this.ADDED) {
|
||||||
op.insert(content)
|
op.insert(content)
|
||||||
} else if (type === this.REMOVED) {
|
} else if (type === this.REMOVED || type === this.UNCHANGED) {
|
||||||
op.remove(content.length)
|
while (op.baseLength + content.length > nextTc?.range.start) {
|
||||||
} else if (type === this.UNCHANGED) {
|
if (nextTc.tracking.type === 'delete') {
|
||||||
op.retain(content.length)
|
const untilRange = nextTc.range.start - op.baseLength
|
||||||
|
if (type === this.REMOVED) {
|
||||||
|
op.remove(untilRange)
|
||||||
|
} else if (type === this.UNCHANGED) {
|
||||||
|
op.retain(untilRange)
|
||||||
|
}
|
||||||
|
op.retain(nextTc.range.end - nextTc.range.start)
|
||||||
|
content = content.slice(untilRange)
|
||||||
|
}
|
||||||
|
nextTc = trackedChanges.shift()
|
||||||
|
}
|
||||||
|
if (type === this.REMOVED) {
|
||||||
|
op.remove(content.length)
|
||||||
|
} else if (type === this.UNCHANGED) {
|
||||||
|
op.retain(content.length)
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
throw new Error('Unknown type')
|
throw new Error('Unknown type')
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
while (nextTc) {
|
||||||
|
if (
|
||||||
|
nextTc.tracking.type !== 'delete' ||
|
||||||
|
nextTc.range.start !== op.baseLength
|
||||||
|
) {
|
||||||
|
throw new OError(
|
||||||
|
'StringFileData.trackedChanges out of sync: unexpected range after end of diff',
|
||||||
|
{ nextTc, baseLength: op.baseLength }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
op.retain(nextTc.range.end - nextTc.range.start)
|
||||||
|
nextTc = trackedChanges.shift()
|
||||||
|
}
|
||||||
return op
|
return op
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
|
@ -194,9 +194,8 @@ const DocumentManager = {
|
||||||
let op
|
let op
|
||||||
if (type === 'history-ot') {
|
if (type === 'history-ot') {
|
||||||
const file = StringFileData.fromRaw(oldLines)
|
const file = StringFileData.fromRaw(oldLines)
|
||||||
const operation = DiffCodec.diffAsHistoryV1EditOperation(
|
const operation = DiffCodec.diffAsHistoryOTEditOperation(
|
||||||
// TODO(24596): tc support for history-ot
|
file,
|
||||||
file.getContent({ filterTrackedDeletes: true }),
|
|
||||||
newLines.join('\n')
|
newLines.join('\n')
|
||||||
)
|
)
|
||||||
if (operation.isNoop()) {
|
if (operation.isNoop()) {
|
||||||
|
@ -536,11 +535,6 @@ const DocumentManager = {
|
||||||
if (opts.historyRangesMigration) {
|
if (opts.historyRangesMigration) {
|
||||||
historyRangesSupport = opts.historyRangesMigration === 'forwards'
|
historyRangesSupport = opts.historyRangesMigration === 'forwards'
|
||||||
}
|
}
|
||||||
if (!Array.isArray(lines)) {
|
|
||||||
const file = StringFileData.fromRaw(lines)
|
|
||||||
// TODO(24596): tc support for history-ot
|
|
||||||
lines = file.getLines()
|
|
||||||
}
|
|
||||||
|
|
||||||
await ProjectHistoryRedisManager.promises.queueResyncDocContent(
|
await ProjectHistoryRedisManager.promises.queueResyncDocContent(
|
||||||
projectId,
|
projectId,
|
||||||
|
|
|
@ -28,4 +28,19 @@ module.exports = {
|
||||||
// since we didn't hit the limit in the loop, the document is within the allowed length
|
// since we didn't hit the limit in the loop, the document is within the allowed length
|
||||||
return false
|
return false
|
||||||
},
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {StringFileRawData} raw
|
||||||
|
* @param {number} maxDocLength
|
||||||
|
*/
|
||||||
|
stringFileDataContentIsTooLarge(raw, maxDocLength) {
|
||||||
|
let n = raw.content.length
|
||||||
|
if (n <= maxDocLength) return false // definitely under the limit, no need to calculate the total size
|
||||||
|
for (const tc of raw.trackedChanges ?? []) {
|
||||||
|
if (tc.tracking.type !== 'delete') continue
|
||||||
|
n -= tc.range.length
|
||||||
|
if (n <= maxDocLength) return false // under the limit now, no need to calculate the exact size
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,13 +8,14 @@ const rclient = require('@overleaf/redis-wrapper').createClient(
|
||||||
)
|
)
|
||||||
const logger = require('@overleaf/logger')
|
const logger = require('@overleaf/logger')
|
||||||
const metrics = require('./Metrics')
|
const metrics = require('./Metrics')
|
||||||
const { docIsTooLarge } = require('./Limits')
|
const { docIsTooLarge, stringFileDataContentIsTooLarge } = require('./Limits')
|
||||||
const { addTrackedDeletesToContent, extractOriginOrSource } = require('./Utils')
|
const { addTrackedDeletesToContent, extractOriginOrSource } = require('./Utils')
|
||||||
const HistoryConversions = require('./HistoryConversions')
|
const HistoryConversions = require('./HistoryConversions')
|
||||||
const OError = require('@overleaf/o-error')
|
const OError = require('@overleaf/o-error')
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @import { Ranges } from './types'
|
* @import { Ranges } from './types'
|
||||||
|
* @import { StringFileRawData } from 'overleaf-editor-core/lib/types'
|
||||||
*/
|
*/
|
||||||
|
|
||||||
const ProjectHistoryRedisManager = {
|
const ProjectHistoryRedisManager = {
|
||||||
|
@ -180,7 +181,7 @@ const ProjectHistoryRedisManager = {
|
||||||
* @param {string} projectId
|
* @param {string} projectId
|
||||||
* @param {string} projectHistoryId
|
* @param {string} projectHistoryId
|
||||||
* @param {string} docId
|
* @param {string} docId
|
||||||
* @param {string[]} lines
|
* @param {string[] | StringFileRawData} lines
|
||||||
* @param {Ranges} ranges
|
* @param {Ranges} ranges
|
||||||
* @param {string[]} resolvedCommentIds
|
* @param {string[]} resolvedCommentIds
|
||||||
* @param {number} version
|
* @param {number} version
|
||||||
|
@ -204,13 +205,8 @@ const ProjectHistoryRedisManager = {
|
||||||
'queue doc content resync'
|
'queue doc content resync'
|
||||||
)
|
)
|
||||||
|
|
||||||
let content = lines.join('\n')
|
|
||||||
if (historyRangesSupport) {
|
|
||||||
content = addTrackedDeletesToContent(content, ranges.changes ?? [])
|
|
||||||
}
|
|
||||||
|
|
||||||
const projectUpdate = {
|
const projectUpdate = {
|
||||||
resyncDocContent: { content, version },
|
resyncDocContent: { version },
|
||||||
projectHistoryId,
|
projectHistoryId,
|
||||||
path: pathname,
|
path: pathname,
|
||||||
doc: docId,
|
doc: docId,
|
||||||
|
@ -219,17 +215,38 @@ const ProjectHistoryRedisManager = {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
if (historyRangesSupport) {
|
let content = ''
|
||||||
projectUpdate.resyncDocContent.ranges =
|
if (Array.isArray(lines)) {
|
||||||
HistoryConversions.toHistoryRanges(ranges)
|
content = lines.join('\n')
|
||||||
projectUpdate.resyncDocContent.resolvedCommentIds = resolvedCommentIds
|
if (historyRangesSupport) {
|
||||||
|
content = addTrackedDeletesToContent(content, ranges.changes ?? [])
|
||||||
|
projectUpdate.resyncDocContent.ranges =
|
||||||
|
HistoryConversions.toHistoryRanges(ranges)
|
||||||
|
projectUpdate.resyncDocContent.resolvedCommentIds = resolvedCommentIds
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
content = lines.content
|
||||||
|
projectUpdate.resyncDocContent.historyOTRanges = {
|
||||||
|
comments: lines.comments,
|
||||||
|
trackedChanges: lines.trackedChanges,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
projectUpdate.resyncDocContent.content = content
|
||||||
|
|
||||||
const jsonUpdate = JSON.stringify(projectUpdate)
|
const jsonUpdate = JSON.stringify(projectUpdate)
|
||||||
// Do an optimised size check on the docLines using the serialised
|
// Do an optimised size check on the docLines using the serialised
|
||||||
// project update length as an upper bound
|
// project update length as an upper bound
|
||||||
const sizeBound = jsonUpdate.length
|
const sizeBound = jsonUpdate.length
|
||||||
if (docIsTooLarge(sizeBound, lines, Settings.max_doc_length)) {
|
if (Array.isArray(lines)) {
|
||||||
|
if (docIsTooLarge(sizeBound, lines, Settings.max_doc_length)) {
|
||||||
|
throw new OError(
|
||||||
|
'blocking resync doc content insert into project history queue: doc is too large',
|
||||||
|
{ projectId, docId, docSize: sizeBound }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
} else if (
|
||||||
|
stringFileDataContentIsTooLarge(lines, Settings.max_doc_length)
|
||||||
|
) {
|
||||||
throw new OError(
|
throw new OError(
|
||||||
'blocking resync doc content insert into project history queue: doc is too large',
|
'blocking resync doc content insert into project history queue: doc is too large',
|
||||||
{ projectId, docId, docSize: sizeBound }
|
{ projectId, docId, docSize: sizeBound }
|
||||||
|
|
|
@ -28,12 +28,15 @@ services:
|
||||||
MOCHA_GREP: ${MOCHA_GREP}
|
MOCHA_GREP: ${MOCHA_GREP}
|
||||||
NODE_ENV: test
|
NODE_ENV: test
|
||||||
NODE_OPTIONS: "--unhandled-rejections=strict"
|
NODE_OPTIONS: "--unhandled-rejections=strict"
|
||||||
|
volumes:
|
||||||
|
- ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it
|
||||||
depends_on:
|
depends_on:
|
||||||
mongo:
|
mongo:
|
||||||
condition: service_started
|
condition: service_started
|
||||||
redis:
|
redis:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
user: node
|
user: node
|
||||||
|
entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 --
|
||||||
command: npm run test:acceptance
|
command: npm run test:acceptance
|
||||||
|
|
||||||
|
|
||||||
|
@ -45,7 +48,7 @@ services:
|
||||||
command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs .
|
command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs .
|
||||||
user: root
|
user: root
|
||||||
redis:
|
redis:
|
||||||
image: redis
|
image: redis:7.4.3
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ping="$$(redis-cli ping)" && [ "$$ping" = 'PONG' ]
|
test: ping="$$(redis-cli ping)" && [ "$$ping" = 'PONG' ]
|
||||||
interval: 1s
|
interval: 1s
|
||||||
|
|
|
@ -26,6 +26,7 @@ services:
|
||||||
- .:/overleaf/services/document-updater
|
- .:/overleaf/services/document-updater
|
||||||
- ../../node_modules:/overleaf/node_modules
|
- ../../node_modules:/overleaf/node_modules
|
||||||
- ../../libraries:/overleaf/libraries
|
- ../../libraries:/overleaf/libraries
|
||||||
|
- ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it
|
||||||
working_dir: /overleaf/services/document-updater
|
working_dir: /overleaf/services/document-updater
|
||||||
environment:
|
environment:
|
||||||
ELASTIC_SEARCH_DSN: es:9200
|
ELASTIC_SEARCH_DSN: es:9200
|
||||||
|
@ -45,10 +46,11 @@ services:
|
||||||
condition: service_started
|
condition: service_started
|
||||||
redis:
|
redis:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
|
entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 --
|
||||||
command: npm run --silent test:acceptance
|
command: npm run --silent test:acceptance
|
||||||
|
|
||||||
redis:
|
redis:
|
||||||
image: redis
|
image: redis:7.4.3
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ping=$$(redis-cli ping) && [ "$$ping" = 'PONG' ]
|
test: ping=$$(redis-cli ping) && [ "$$ping" = 'PONG' ]
|
||||||
interval: 1s
|
interval: 1s
|
||||||
|
|
|
@ -15,6 +15,7 @@ const request = require('requestretry').defaults({
|
||||||
retryDelay: 10,
|
retryDelay: 10,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
const ONLY_PROJECT_ID = process.env.ONLY_PROJECT_ID
|
||||||
const AUTO_FIX_VERSION_MISMATCH =
|
const AUTO_FIX_VERSION_MISMATCH =
|
||||||
process.env.AUTO_FIX_VERSION_MISMATCH === 'true'
|
process.env.AUTO_FIX_VERSION_MISMATCH === 'true'
|
||||||
const AUTO_FIX_PARTIALLY_DELETED_DOC_METADATA =
|
const AUTO_FIX_PARTIALLY_DELETED_DOC_METADATA =
|
||||||
|
@ -319,10 +320,12 @@ async function processProject(projectId) {
|
||||||
* @return {Promise<{perIterationOutOfSync: number, done: boolean}>}
|
* @return {Promise<{perIterationOutOfSync: number, done: boolean}>}
|
||||||
*/
|
*/
|
||||||
async function scanOnce(processed, outOfSync) {
|
async function scanOnce(processed, outOfSync) {
|
||||||
const projectIds = await ProjectFlusher.promises.flushAllProjects({
|
const projectIds = ONLY_PROJECT_ID
|
||||||
limit: LIMIT,
|
? [ONLY_PROJECT_ID]
|
||||||
dryRun: true,
|
: await ProjectFlusher.promises.flushAllProjects({
|
||||||
})
|
limit: LIMIT,
|
||||||
|
dryRun: true,
|
||||||
|
})
|
||||||
|
|
||||||
let perIterationOutOfSync = 0
|
let perIterationOutOfSync = 0
|
||||||
for (const projectId of projectIds) {
|
for (const projectId of projectIds) {
|
||||||
|
|
|
@ -686,4 +686,285 @@ describe('Setting a document', function () {
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
describe('with track changes (history-ot)', function () {
|
||||||
|
const lines = ['one', 'one and a half', 'two', 'three']
|
||||||
|
const userId = DocUpdaterClient.randomId()
|
||||||
|
const ts = new Date().toISOString()
|
||||||
|
beforeEach(function (done) {
|
||||||
|
numberOfReceivedUpdates = 0
|
||||||
|
this.newLines = ['one', 'two', 'three']
|
||||||
|
this.project_id = DocUpdaterClient.randomId()
|
||||||
|
this.doc_id = DocUpdaterClient.randomId()
|
||||||
|
this.historyOTUpdate = {
|
||||||
|
doc: this.doc_id,
|
||||||
|
op: [
|
||||||
|
{
|
||||||
|
textOperation: [
|
||||||
|
4,
|
||||||
|
{
|
||||||
|
r: 'one and a half\n'.length,
|
||||||
|
tracking: {
|
||||||
|
type: 'delete',
|
||||||
|
userId,
|
||||||
|
ts,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
9,
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
v: this.version,
|
||||||
|
meta: { source: 'random-publicId' },
|
||||||
|
}
|
||||||
|
MockWebApi.insertDoc(this.project_id, this.doc_id, {
|
||||||
|
lines,
|
||||||
|
version: this.version,
|
||||||
|
otMigrationStage: 1,
|
||||||
|
})
|
||||||
|
DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, error => {
|
||||||
|
if (error) {
|
||||||
|
throw error
|
||||||
|
}
|
||||||
|
DocUpdaterClient.sendUpdate(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
this.historyOTUpdate,
|
||||||
|
error => {
|
||||||
|
if (error) {
|
||||||
|
throw error
|
||||||
|
}
|
||||||
|
DocUpdaterClient.waitForPendingUpdates(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
done
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
afterEach(function () {
|
||||||
|
MockProjectHistoryApi.flushProject.resetHistory()
|
||||||
|
MockWebApi.setDocument.resetHistory()
|
||||||
|
})
|
||||||
|
it('should record tracked changes', function (done) {
|
||||||
|
docUpdaterRedis.get(
|
||||||
|
Keys.docLines({ doc_id: this.doc_id }),
|
||||||
|
(error, data) => {
|
||||||
|
if (error) {
|
||||||
|
throw error
|
||||||
|
}
|
||||||
|
expect(JSON.parse(data)).to.deep.equal({
|
||||||
|
content: lines.join('\n'),
|
||||||
|
trackedChanges: [
|
||||||
|
{
|
||||||
|
range: {
|
||||||
|
pos: 4,
|
||||||
|
length: 15,
|
||||||
|
},
|
||||||
|
tracking: {
|
||||||
|
ts,
|
||||||
|
type: 'delete',
|
||||||
|
userId,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
})
|
||||||
|
done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should apply the change', function (done) {
|
||||||
|
DocUpdaterClient.getDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
(error, res, data) => {
|
||||||
|
if (error) {
|
||||||
|
throw error
|
||||||
|
}
|
||||||
|
expect(data.lines).to.deep.equal(this.newLines)
|
||||||
|
done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
const cases = [
|
||||||
|
{
|
||||||
|
name: 'when resetting the content',
|
||||||
|
lines,
|
||||||
|
want: {
|
||||||
|
content: 'one\none and a half\none and a half\ntwo\nthree',
|
||||||
|
trackedChanges: [
|
||||||
|
{
|
||||||
|
range: {
|
||||||
|
pos: 'one and a half\n'.length + 4,
|
||||||
|
length: 15,
|
||||||
|
},
|
||||||
|
tracking: {
|
||||||
|
ts,
|
||||||
|
type: 'delete',
|
||||||
|
userId,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'when adding content before a tracked delete',
|
||||||
|
lines: ['one', 'INSERT', 'two', 'three'],
|
||||||
|
want: {
|
||||||
|
content: 'one\nINSERT\none and a half\ntwo\nthree',
|
||||||
|
trackedChanges: [
|
||||||
|
{
|
||||||
|
range: {
|
||||||
|
pos: 'INSERT\n'.length + 4,
|
||||||
|
length: 15,
|
||||||
|
},
|
||||||
|
tracking: {
|
||||||
|
ts,
|
||||||
|
type: 'delete',
|
||||||
|
userId,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'when adding content after a tracked delete',
|
||||||
|
lines: ['one', 'two', 'INSERT', 'three'],
|
||||||
|
want: {
|
||||||
|
content: 'one\none and a half\ntwo\nINSERT\nthree',
|
||||||
|
trackedChanges: [
|
||||||
|
{
|
||||||
|
range: {
|
||||||
|
pos: 4,
|
||||||
|
length: 15,
|
||||||
|
},
|
||||||
|
tracking: {
|
||||||
|
ts,
|
||||||
|
type: 'delete',
|
||||||
|
userId,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'when deleting content before a tracked delete',
|
||||||
|
lines: ['two', 'three'],
|
||||||
|
want: {
|
||||||
|
content: 'one and a half\ntwo\nthree',
|
||||||
|
trackedChanges: [
|
||||||
|
{
|
||||||
|
range: {
|
||||||
|
pos: 0,
|
||||||
|
length: 15,
|
||||||
|
},
|
||||||
|
tracking: {
|
||||||
|
ts,
|
||||||
|
type: 'delete',
|
||||||
|
userId,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'when deleting content after a tracked delete',
|
||||||
|
lines: ['one', 'two'],
|
||||||
|
want: {
|
||||||
|
content: 'one\none and a half\ntwo',
|
||||||
|
trackedChanges: [
|
||||||
|
{
|
||||||
|
range: {
|
||||||
|
pos: 4,
|
||||||
|
length: 15,
|
||||||
|
},
|
||||||
|
tracking: {
|
||||||
|
ts,
|
||||||
|
type: 'delete',
|
||||||
|
userId,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'when deleting content immediately after a tracked delete',
|
||||||
|
lines: ['one', 'three'],
|
||||||
|
want: {
|
||||||
|
content: 'one\none and a half\nthree',
|
||||||
|
trackedChanges: [
|
||||||
|
{
|
||||||
|
range: {
|
||||||
|
pos: 4,
|
||||||
|
length: 15,
|
||||||
|
},
|
||||||
|
tracking: {
|
||||||
|
ts,
|
||||||
|
type: 'delete',
|
||||||
|
userId,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'when deleting content across a tracked delete',
|
||||||
|
lines: ['onethree'],
|
||||||
|
want: {
|
||||||
|
content: 'oneone and a half\nthree',
|
||||||
|
trackedChanges: [
|
||||||
|
{
|
||||||
|
range: {
|
||||||
|
pos: 3,
|
||||||
|
length: 15,
|
||||||
|
},
|
||||||
|
tracking: {
|
||||||
|
ts,
|
||||||
|
type: 'delete',
|
||||||
|
userId,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
for (const { name, lines, want } of cases) {
|
||||||
|
describe(name, function () {
|
||||||
|
beforeEach(function (done) {
|
||||||
|
DocUpdaterClient.setDocLines(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
lines,
|
||||||
|
this.source,
|
||||||
|
userId,
|
||||||
|
false,
|
||||||
|
(error, res, body) => {
|
||||||
|
if (error) {
|
||||||
|
return done(error)
|
||||||
|
}
|
||||||
|
this.statusCode = res.statusCode
|
||||||
|
this.body = body
|
||||||
|
done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
it('should update accordingly', function (done) {
|
||||||
|
docUpdaterRedis.get(
|
||||||
|
Keys.docLines({ doc_id: this.doc_id }),
|
||||||
|
(error, data) => {
|
||||||
|
if (error) {
|
||||||
|
throw error
|
||||||
|
}
|
||||||
|
expect(JSON.parse(data)).to.deep.equal(want)
|
||||||
|
done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -81,4 +81,88 @@ describe('Limits', function () {
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
describe('stringFileDataContentIsTooLarge', function () {
|
||||||
|
it('should handle small docs', function () {
|
||||||
|
expect(
|
||||||
|
this.Limits.stringFileDataContentIsTooLarge({ content: '' }, 123)
|
||||||
|
).to.equal(false)
|
||||||
|
})
|
||||||
|
it('should handle docs at the limit', function () {
|
||||||
|
expect(
|
||||||
|
this.Limits.stringFileDataContentIsTooLarge(
|
||||||
|
{ content: 'x'.repeat(123) },
|
||||||
|
123
|
||||||
|
)
|
||||||
|
).to.equal(false)
|
||||||
|
})
|
||||||
|
it('should handle docs above the limit', function () {
|
||||||
|
expect(
|
||||||
|
this.Limits.stringFileDataContentIsTooLarge(
|
||||||
|
{ content: 'x'.repeat(123 + 1) },
|
||||||
|
123
|
||||||
|
)
|
||||||
|
).to.equal(true)
|
||||||
|
})
|
||||||
|
it('should handle docs above the limit and below with tracked-deletes removed', function () {
|
||||||
|
expect(
|
||||||
|
this.Limits.stringFileDataContentIsTooLarge(
|
||||||
|
{
|
||||||
|
content: 'x'.repeat(123 + 1),
|
||||||
|
trackedChanges: [
|
||||||
|
{
|
||||||
|
range: { pos: 1, length: 1 },
|
||||||
|
tracking: {
|
||||||
|
type: 'delete',
|
||||||
|
ts: '2025-06-16T14:31:44.910Z',
|
||||||
|
userId: 'user-id',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
123
|
||||||
|
)
|
||||||
|
).to.equal(false)
|
||||||
|
})
|
||||||
|
it('should handle docs above the limit and above with tracked-deletes removed', function () {
|
||||||
|
expect(
|
||||||
|
this.Limits.stringFileDataContentIsTooLarge(
|
||||||
|
{
|
||||||
|
content: 'x'.repeat(123 + 2),
|
||||||
|
trackedChanges: [
|
||||||
|
{
|
||||||
|
range: { pos: 1, length: 1 },
|
||||||
|
tracking: {
|
||||||
|
type: 'delete',
|
||||||
|
ts: '2025-06-16T14:31:44.910Z',
|
||||||
|
userId: 'user-id',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
123
|
||||||
|
)
|
||||||
|
).to.equal(true)
|
||||||
|
})
|
||||||
|
it('should handle docs above the limit and with tracked-inserts', function () {
|
||||||
|
expect(
|
||||||
|
this.Limits.stringFileDataContentIsTooLarge(
|
||||||
|
{
|
||||||
|
content: 'x'.repeat(123 + 1),
|
||||||
|
trackedChanges: [
|
||||||
|
{
|
||||||
|
range: { pos: 1, length: 1 },
|
||||||
|
tracking: {
|
||||||
|
type: 'insert',
|
||||||
|
ts: '2025-06-16T14:31:44.910Z',
|
||||||
|
userId: 'user-id',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
123
|
||||||
|
)
|
||||||
|
).to.equal(true)
|
||||||
|
})
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -15,6 +15,7 @@ describe('ProjectHistoryRedisManager', function () {
|
||||||
|
|
||||||
this.Limits = {
|
this.Limits = {
|
||||||
docIsTooLarge: sinon.stub().returns(false),
|
docIsTooLarge: sinon.stub().returns(false),
|
||||||
|
stringFileDataContentIsTooLarge: sinon.stub().returns(false),
|
||||||
}
|
}
|
||||||
|
|
||||||
this.ProjectHistoryRedisManager = SandboxedModule.require(modulePath, {
|
this.ProjectHistoryRedisManager = SandboxedModule.require(modulePath, {
|
||||||
|
@ -61,22 +62,18 @@ describe('ProjectHistoryRedisManager', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should queue an update', function () {
|
it('should queue an update', function () {
|
||||||
this.multi.rpush
|
this.multi.rpush.should.have.been.calledWithExactly(
|
||||||
.calledWithExactly(
|
`ProjectHistory:Ops:${this.project_id}`,
|
||||||
`ProjectHistory:Ops:${this.project_id}`,
|
this.ops[0],
|
||||||
this.ops[0],
|
this.ops[1]
|
||||||
this.ops[1]
|
)
|
||||||
)
|
|
||||||
.should.equal(true)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should set the queue timestamp if not present', function () {
|
it('should set the queue timestamp if not present', function () {
|
||||||
this.multi.setnx
|
this.multi.setnx.should.have.been.calledWithExactly(
|
||||||
.calledWithExactly(
|
`ProjectHistory:FirstOpTimestamp:${this.project_id}`,
|
||||||
`ProjectHistory:FirstOpTimestamp:${this.project_id}`,
|
Date.now()
|
||||||
Date.now()
|
)
|
||||||
)
|
|
||||||
.should.equal(true)
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -118,9 +115,10 @@ describe('ProjectHistoryRedisManager', function () {
|
||||||
file: this.file_id,
|
file: this.file_id,
|
||||||
}
|
}
|
||||||
|
|
||||||
this.ProjectHistoryRedisManager.promises.queueOps
|
this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly(
|
||||||
.calledWithExactly(this.project_id, JSON.stringify(update))
|
this.project_id,
|
||||||
.should.equal(true)
|
JSON.stringify(update)
|
||||||
|
)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -166,9 +164,10 @@ describe('ProjectHistoryRedisManager', function () {
|
||||||
doc: this.doc_id,
|
doc: this.doc_id,
|
||||||
}
|
}
|
||||||
|
|
||||||
this.ProjectHistoryRedisManager.promises.queueOps
|
this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly(
|
||||||
.calledWithExactly(this.project_id, JSON.stringify(update))
|
this.project_id,
|
||||||
.should.equal(true)
|
JSON.stringify(update)
|
||||||
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should queue an update with file metadata', async function () {
|
it('should queue an update with file metadata', async function () {
|
||||||
|
@ -350,9 +349,10 @@ describe('ProjectHistoryRedisManager', function () {
|
||||||
doc: this.doc_id,
|
doc: this.doc_id,
|
||||||
}
|
}
|
||||||
|
|
||||||
this.ProjectHistoryRedisManager.promises.queueOps
|
this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly(
|
||||||
.calledWithExactly(this.project_id, JSON.stringify(update))
|
this.project_id,
|
||||||
.should.equal(true)
|
JSON.stringify(update)
|
||||||
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should not forward ranges if history ranges support is undefined', async function () {
|
it('should not forward ranges if history ranges support is undefined', async function () {
|
||||||
|
@ -402,9 +402,10 @@ describe('ProjectHistoryRedisManager', function () {
|
||||||
doc: this.doc_id,
|
doc: this.doc_id,
|
||||||
}
|
}
|
||||||
|
|
||||||
this.ProjectHistoryRedisManager.promises.queueOps
|
this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly(
|
||||||
.calledWithExactly(this.project_id, JSON.stringify(update))
|
this.project_id,
|
||||||
.should.equal(true)
|
JSON.stringify(update)
|
||||||
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should pass "false" as the createdBlob field if not provided', async function () {
|
it('should pass "false" as the createdBlob field if not provided', async function () {
|
||||||
|
@ -432,9 +433,10 @@ describe('ProjectHistoryRedisManager', function () {
|
||||||
doc: this.doc_id,
|
doc: this.doc_id,
|
||||||
}
|
}
|
||||||
|
|
||||||
this.ProjectHistoryRedisManager.promises.queueOps
|
this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly(
|
||||||
.calledWithExactly(this.project_id, JSON.stringify(update))
|
this.project_id,
|
||||||
.should.equal(true)
|
JSON.stringify(update)
|
||||||
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should pass through the value of the createdBlob field', async function () {
|
it('should pass through the value of the createdBlob field', async function () {
|
||||||
|
@ -463,9 +465,10 @@ describe('ProjectHistoryRedisManager', function () {
|
||||||
doc: this.doc_id,
|
doc: this.doc_id,
|
||||||
}
|
}
|
||||||
|
|
||||||
this.ProjectHistoryRedisManager.promises.queueOps
|
this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly(
|
||||||
.calledWithExactly(this.project_id, JSON.stringify(update))
|
this.project_id,
|
||||||
.should.equal(true)
|
JSON.stringify(update)
|
||||||
|
)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -493,8 +496,8 @@ describe('ProjectHistoryRedisManager', function () {
|
||||||
beforeEach(async function () {
|
beforeEach(async function () {
|
||||||
this.update = {
|
this.update = {
|
||||||
resyncDocContent: {
|
resyncDocContent: {
|
||||||
content: 'one\ntwo',
|
|
||||||
version: this.version,
|
version: this.version,
|
||||||
|
content: 'one\ntwo',
|
||||||
},
|
},
|
||||||
projectHistoryId: this.projectHistoryId,
|
projectHistoryId: this.projectHistoryId,
|
||||||
path: this.pathname,
|
path: this.pathname,
|
||||||
|
@ -516,19 +519,18 @@ describe('ProjectHistoryRedisManager', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should check if the doc is too large', function () {
|
it('should check if the doc is too large', function () {
|
||||||
this.Limits.docIsTooLarge
|
this.Limits.docIsTooLarge.should.have.been.calledWith(
|
||||||
.calledWith(
|
JSON.stringify(this.update).length,
|
||||||
JSON.stringify(this.update).length,
|
this.lines,
|
||||||
this.lines,
|
this.settings.max_doc_length
|
||||||
this.settings.max_doc_length
|
)
|
||||||
)
|
|
||||||
.should.equal(true)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should queue an update', function () {
|
it('should queue an update', function () {
|
||||||
this.ProjectHistoryRedisManager.promises.queueOps
|
this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly(
|
||||||
.calledWithExactly(this.project_id, JSON.stringify(this.update))
|
this.project_id,
|
||||||
.should.equal(true)
|
JSON.stringify(this.update)
|
||||||
|
)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -551,9 +553,8 @@ describe('ProjectHistoryRedisManager', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should not queue an update if the doc is too large', function () {
|
it('should not queue an update if the doc is too large', function () {
|
||||||
this.ProjectHistoryRedisManager.promises.queueOps.called.should.equal(
|
this.ProjectHistoryRedisManager.promises.queueOps.should.not.have.been
|
||||||
false
|
.called
|
||||||
)
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -561,10 +562,10 @@ describe('ProjectHistoryRedisManager', function () {
|
||||||
beforeEach(async function () {
|
beforeEach(async function () {
|
||||||
this.update = {
|
this.update = {
|
||||||
resyncDocContent: {
|
resyncDocContent: {
|
||||||
content: 'onedeleted\ntwo',
|
|
||||||
version: this.version,
|
version: this.version,
|
||||||
ranges: this.ranges,
|
ranges: this.ranges,
|
||||||
resolvedCommentIds: this.resolvedCommentIds,
|
resolvedCommentIds: this.resolvedCommentIds,
|
||||||
|
content: 'onedeleted\ntwo',
|
||||||
},
|
},
|
||||||
projectHistoryId: this.projectHistoryId,
|
projectHistoryId: this.projectHistoryId,
|
||||||
path: this.pathname,
|
path: this.pathname,
|
||||||
|
@ -601,9 +602,76 @@ describe('ProjectHistoryRedisManager', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should queue an update', function () {
|
it('should queue an update', function () {
|
||||||
this.ProjectHistoryRedisManager.promises.queueOps
|
this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly(
|
||||||
.calledWithExactly(this.project_id, JSON.stringify(this.update))
|
this.project_id,
|
||||||
.should.equal(true)
|
JSON.stringify(this.update)
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('history-ot', function () {
|
||||||
|
beforeEach(async function () {
|
||||||
|
this.lines = {
|
||||||
|
content: 'onedeleted\ntwo',
|
||||||
|
comments: [{ id: 'id1', ranges: [{ pos: 0, length: 3 }] }],
|
||||||
|
trackedChanges: [
|
||||||
|
{
|
||||||
|
range: { pos: 3, length: 7 },
|
||||||
|
tracking: {
|
||||||
|
type: 'delete',
|
||||||
|
userId: 'user-id',
|
||||||
|
ts: '2025-06-16T14:31:44.910Z',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
this.update = {
|
||||||
|
resyncDocContent: {
|
||||||
|
version: this.version,
|
||||||
|
historyOTRanges: {
|
||||||
|
comments: this.lines.comments,
|
||||||
|
trackedChanges: this.lines.trackedChanges,
|
||||||
|
},
|
||||||
|
content: this.lines.content,
|
||||||
|
},
|
||||||
|
projectHistoryId: this.projectHistoryId,
|
||||||
|
path: this.pathname,
|
||||||
|
doc: this.doc_id,
|
||||||
|
meta: { ts: new Date() },
|
||||||
|
}
|
||||||
|
|
||||||
|
await this.ProjectHistoryRedisManager.promises.queueResyncDocContent(
|
||||||
|
this.project_id,
|
||||||
|
this.projectHistoryId,
|
||||||
|
this.doc_id,
|
||||||
|
this.lines,
|
||||||
|
this.ranges,
|
||||||
|
this.resolvedCommentIds,
|
||||||
|
this.version,
|
||||||
|
this.pathname,
|
||||||
|
true
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should include tracked deletes in the update', function () {
|
||||||
|
this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly(
|
||||||
|
this.project_id,
|
||||||
|
JSON.stringify(this.update)
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should check the doc length without tracked deletes', function () {
|
||||||
|
this.Limits.stringFileDataContentIsTooLarge.should.have.been.calledWith(
|
||||||
|
this.lines,
|
||||||
|
this.settings.max_doc_length
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should queue an update', function () {
|
||||||
|
this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly(
|
||||||
|
this.project_id,
|
||||||
|
JSON.stringify(this.update)
|
||||||
|
)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -111,6 +111,11 @@ if (settings.filestore.stores.template_files) {
|
||||||
keyBuilder.templateFileKeyMiddleware,
|
keyBuilder.templateFileKeyMiddleware,
|
||||||
fileController.insertFile
|
fileController.insertFile
|
||||||
)
|
)
|
||||||
|
app.delete(
|
||||||
|
'/template/:template_id/v/:version/:format',
|
||||||
|
keyBuilder.templateFileKeyMiddleware,
|
||||||
|
fileController.deleteFile
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
app.get(
|
app.get(
|
||||||
|
|
|
@ -5,7 +5,7 @@ const { callbackify } = require('node:util')
|
||||||
const safeExec = require('./SafeExec').promises
|
const safeExec = require('./SafeExec').promises
|
||||||
const { ConversionError } = require('./Errors')
|
const { ConversionError } = require('./Errors')
|
||||||
|
|
||||||
const APPROVED_FORMATS = ['png']
|
const APPROVED_FORMATS = ['png', 'jpg']
|
||||||
const FOURTY_SECONDS = 40 * 1000
|
const FOURTY_SECONDS = 40 * 1000
|
||||||
const KILL_SIGNAL = 'SIGTERM'
|
const KILL_SIGNAL = 'SIGTERM'
|
||||||
|
|
||||||
|
@ -34,16 +34,14 @@ async function convert(sourcePath, requestedFormat) {
|
||||||
}
|
}
|
||||||
|
|
||||||
async function thumbnail(sourcePath) {
|
async function thumbnail(sourcePath) {
|
||||||
const width = '260x'
|
const width = '548x'
|
||||||
return await convert(sourcePath, 'png', [
|
return await _convert(sourcePath, 'jpg', [
|
||||||
'convert',
|
'convert',
|
||||||
'-flatten',
|
'-flatten',
|
||||||
'-background',
|
'-background',
|
||||||
'white',
|
'white',
|
||||||
'-density',
|
'-density',
|
||||||
'300',
|
'300',
|
||||||
'-define',
|
|
||||||
`pdf:fit-page=${width}`,
|
|
||||||
`${sourcePath}[0]`,
|
`${sourcePath}[0]`,
|
||||||
'-resize',
|
'-resize',
|
||||||
width,
|
width,
|
||||||
|
@ -51,16 +49,14 @@ async function thumbnail(sourcePath) {
|
||||||
}
|
}
|
||||||
|
|
||||||
async function preview(sourcePath) {
|
async function preview(sourcePath) {
|
||||||
const width = '548x'
|
const width = '794x'
|
||||||
return await convert(sourcePath, 'png', [
|
return await _convert(sourcePath, 'jpg', [
|
||||||
'convert',
|
'convert',
|
||||||
'-flatten',
|
'-flatten',
|
||||||
'-background',
|
'-background',
|
||||||
'white',
|
'white',
|
||||||
'-density',
|
'-density',
|
||||||
'300',
|
'300',
|
||||||
'-define',
|
|
||||||
`pdf:fit-page=${width}`,
|
|
||||||
`${sourcePath}[0]`,
|
`${sourcePath}[0]`,
|
||||||
'-resize',
|
'-resize',
|
||||||
width,
|
width,
|
||||||
|
|
|
@ -150,7 +150,9 @@ async function _getConvertedFileAndCache(bucket, key, convertedKey, opts) {
|
||||||
let convertedFsPath
|
let convertedFsPath
|
||||||
try {
|
try {
|
||||||
convertedFsPath = await _convertFile(bucket, key, opts)
|
convertedFsPath = await _convertFile(bucket, key, opts)
|
||||||
await ImageOptimiser.promises.compressPng(convertedFsPath)
|
if (convertedFsPath.toLowerCase().endsWith(".png")) {
|
||||||
|
await ImageOptimiser.promises.compressPng(convertedFsPath)
|
||||||
|
}
|
||||||
await PersistorManager.sendFile(bucket, convertedKey, convertedFsPath)
|
await PersistorManager.sendFile(bucket, convertedKey, convertedFsPath)
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
LocalFileWriter.deleteFile(convertedFsPath, () => {})
|
LocalFileWriter.deleteFile(convertedFsPath, () => {})
|
||||||
|
|
76
services/history-v1/api/app/rollout.js
Normal file
76
services/history-v1/api/app/rollout.js
Normal file
|
@ -0,0 +1,76 @@
|
||||||
|
const crypto = require('node:crypto')
|
||||||
|
|
||||||
|
class Rollout {
|
||||||
|
constructor(config) {
|
||||||
|
// The history buffer level is used to determine whether to queue changes
|
||||||
|
// in Redis or persist them directly to the chunk store.
|
||||||
|
// If defaults to 0 (no queuing) if not set.
|
||||||
|
this.historyBufferLevel = config.has('historyBufferLevel')
|
||||||
|
? parseInt(config.get('historyBufferLevel'), 10)
|
||||||
|
: 0
|
||||||
|
// The forcePersistBuffer flag will ensure the buffer is fully persisted before
|
||||||
|
// any persist operation. Set this to true if you want to make the persisted-version
|
||||||
|
// in Redis match the endVersion of the latest chunk. This should be set to true
|
||||||
|
// when downgrading from a history buffer level that queues changes in Redis
|
||||||
|
// without persisting them immediately.
|
||||||
|
this.forcePersistBuffer = config.has('forcePersistBuffer')
|
||||||
|
? config.get('forcePersistBuffer') === 'true'
|
||||||
|
: false
|
||||||
|
|
||||||
|
// Support gradual rollout of the next history buffer level
|
||||||
|
// with a percentage of projects using it.
|
||||||
|
this.nextHistoryBufferLevel = config.has('nextHistoryBufferLevel')
|
||||||
|
? parseInt(config.get('nextHistoryBufferLevel'), 10)
|
||||||
|
: null
|
||||||
|
this.nextHistoryBufferLevelRolloutPercentage = config.has(
|
||||||
|
'nextHistoryBufferLevelRolloutPercentage'
|
||||||
|
)
|
||||||
|
? parseInt(config.get('nextHistoryBufferLevelRolloutPercentage'), 10)
|
||||||
|
: 0
|
||||||
|
}
|
||||||
|
|
||||||
|
report(logger) {
|
||||||
|
logger.info(
|
||||||
|
{
|
||||||
|
historyBufferLevel: this.historyBufferLevel,
|
||||||
|
forcePersistBuffer: this.forcePersistBuffer,
|
||||||
|
nextHistoryBufferLevel: this.nextHistoryBufferLevel,
|
||||||
|
nextHistoryBufferLevelRolloutPercentage:
|
||||||
|
this.nextHistoryBufferLevelRolloutPercentage,
|
||||||
|
},
|
||||||
|
this.historyBufferLevel > 0 || this.forcePersistBuffer
|
||||||
|
? 'using history buffer'
|
||||||
|
: 'history buffer disabled'
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the history buffer level for a project.
|
||||||
|
* @param {string} projectId
|
||||||
|
* @returns {Object} - An object containing the history buffer level and force persist buffer flag.
|
||||||
|
* @property {number} historyBufferLevel - The history buffer level to use for processing changes.
|
||||||
|
* @property {boolean} forcePersistBuffer - If true, forces the buffer to be persisted before any operation.
|
||||||
|
*/
|
||||||
|
getHistoryBufferLevelOptions(projectId) {
|
||||||
|
if (
|
||||||
|
this.nextHistoryBufferLevel > this.historyBufferLevel &&
|
||||||
|
this.nextHistoryBufferLevelRolloutPercentage > 0
|
||||||
|
) {
|
||||||
|
const hash = crypto.createHash('sha1').update(projectId).digest('hex')
|
||||||
|
const percentage = parseInt(hash.slice(0, 8), 16) % 100
|
||||||
|
// If the project is in the rollout percentage, we use the next history buffer level.
|
||||||
|
if (percentage < this.nextHistoryBufferLevelRolloutPercentage) {
|
||||||
|
return {
|
||||||
|
historyBufferLevel: this.nextHistoryBufferLevel,
|
||||||
|
forcePersistBuffer: this.forcePersistBuffer,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
historyBufferLevel: this.historyBufferLevel,
|
||||||
|
forcePersistBuffer: this.forcePersistBuffer,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = Rollout
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue