Compare commits

...

249 commits

Author SHA1 Message Date
yu-i-i
072f18107c Update README: variable to control JIT account creation in OIDC authentication 2025-06-24 03:20:51 +02:00
yu-i-i
370e822909 Introduce an environment variable to control user creation in OIDC authentication, closes #47 2025-06-24 02:45:48 +02:00
555cdf7d6e I need the --network=host settings for building the container 2025-06-24 02:44:19 +02:00
yu-i-i
ace526e55e Avoid DEP0174 by removing async from callback-based getGroupPolicyForUser 2025-06-24 02:22:39 +02:00
yu-i-i
860ee4b4c2 Track changes: use getTrackedChangesUserIds introduced in the mainstream 2025-06-24 02:22:39 +02:00
yu-i-i
e30d5b9db2 README.md: v5.5.0-v3 2025-06-24 02:22:38 +02:00
yu-i-i
3f33d17237 Fix missing Templates link on login page 2025-06-24 02:22:38 +02:00
yu-i-i
148fc1e64d Fix login page 2025-06-24 02:22:38 +02:00
yu-i-i
71f02142c8 Update README.md: Changes to Template Gallery and Sandboxed Compiles, v5.4.1-ext-v3 2025-06-24 02:22:38 +02:00
yu-i-i
5d0958759f Symbol Palette: make close button visible 2025-06-24 02:22:38 +02:00
yu-i-i
b6482407d0 Template Gallery: replace markdown-it with marked 2025-06-24 02:22:38 +02:00
yu-i-i
5110a55266 Refactor Template Gallery; resolves #38 and #39
- Replace free-text license input with a select box
- Improve visual presentation of modals and enhance keyboard interaction
2025-06-24 02:22:38 +02:00
yu-i-i
5504a3471e Refactor Sandboxed Compiles 2025-06-24 02:22:38 +02:00
yu-i-i
a0dd7169a0 README.md: v5.4.0-v2 2025-06-24 02:22:37 +02:00
yu-i-i
858c89d3b0 Symbol Palette: improve keyboard input experience 2025-06-24 02:22:37 +02:00
yu-i-i
f94eb73e05 Symbol Palette: get rid of @reach/tabs 2025-06-24 02:22:37 +02:00
yu-i-i
1a12fab4d1 Add 'poll' to clsi seccomp profile, fixes minted. Thanks, David. 2025-06-24 02:22:37 +02:00
yu-i-i
a5bf2f844e Update README.md: Template Gallery 2025-06-24 02:22:37 +02:00
yu-i-i
69b869c0aa Make Template Gallery optional; rename environment variables 2025-06-24 02:22:37 +02:00
yu-i-i
0035ab85b7 Add Template Gallery support 2025-06-24 02:22:37 +02:00
yu-i-i
dc6f0180d5 Enable From External URL feature 2025-06-24 02:22:36 +02:00
yu-i-i
44d64d6397 Minor changes in README.md 2025-06-24 02:22:36 +02:00
yu-i-i
16144d4db3 Track changes / comments: update backend to support frontend changes 2025-06-24 02:22:36 +02:00
yu-i-i
2d8593a7a3 Allow EXTERNAL_AUTH to be undefined, fixes #26 2025-06-24 02:22:36 +02:00
yu-i-i
5048b60b02 Clarify OIDC redirect URI in README.md (closes #28) 2025-06-24 02:22:36 +02:00
yu-i-i
38f045ade4 Symbol palette: switch to 'OL' UI components and apply minor cosmetic changes 2025-06-24 02:22:36 +02:00
yu-i-i
35f8549ffb Clarify OVERLEAF_OIDC_USER_ID_FIELD parameter usage (closes #24) 2025-06-24 02:22:36 +02:00
yu-i-i
462e13f661 Make OVERLEAF_OIDC_USER_ID_FIELD support 'email' as a value 2025-06-24 02:22:36 +02:00
yu-i-i
635964a830 See upstream commit 42ee56e 2025-06-24 02:22:36 +02:00
yu-i-i
4159568ebe Fix glitches in symbol palette after switching to Bootstrap 5 2025-06-24 02:22:36 +02:00
yu-i-i
3183fc14c1 Whitelist /oidc/login endpoint, fixes #21 2025-06-24 02:22:36 +02:00
yu-i-i
13e6d2c00f Update README.md (add ENV variables to control SAML signature validation) 2025-06-24 02:22:35 +02:00
yu-i-i
7fed874d53 Add ENV variables to control SAML signature validation 2025-06-24 02:22:35 +02:00
yu-i-i
534b9263d9 Update README.md 2025-06-24 02:22:35 +02:00
yu-i-i
4146c920cf Re-export doLogout (was removed from exports in commit b9fb636). 2025-06-24 02:22:35 +02:00
yu-i-i
b1d077cc8a Refactor authentication code; add OIDC support 2025-06-24 02:22:32 +02:00
yu-i-i
f3a11e3581 Allow adding extra flags to LaTeX compiler through environment variable 2025-06-24 02:21:40 +02:00
yu-i-i
3a018dd207 Update README.md 2025-06-24 02:21:40 +02:00
yu-i-i
9c8589bde6 Enable LDAP and SAML authentication support 2025-06-24 02:21:39 +02:00
yu-i-i
079f200939 Enable Symbol Palette 2025-06-24 02:21:39 +02:00
yu-i-i
a1dcdaf283 Allow selecting a TeX Live image for a project 2025-06-24 02:21:39 +02:00
Sam Van den Vonder
1a1851f658 Enable Sandboxed Compiles feature 2025-06-24 02:21:39 +02:00
yu-i-i
0faa9d5355 Enable autocomplete of reference keys feature 2025-06-24 02:21:39 +02:00
yu-i-i
44e59d5fe8 Enable track changes and comments feature 2025-06-24 02:21:39 +02:00
yu-i-i
0c90b4d100 Redirect non-existing links to Overleaf page 2025-06-24 02:21:38 +02:00
Tim Down
19dc71f414 Merge pull request #26456 from overleaf/td-limit-browser-translate-non-react-icons
Prevent browser translation of icons in Pug pages

GitOrigin-RevId: 97e4d3ba70a4c95bed2c9f52e66038911625613d
2025-06-23 08:05:06 +00:00
Miguel Serrano
069e42e763 Merge pull request #26556 from overleaf/add-chat-capability-sp
[web] Populate `chat` capability for CE/SP

GitOrigin-RevId: d9a3a9f6540552ad47b0c937115d0cf6318c49e9
2025-06-20 08:06:00 +00:00
Miguel Serrano
04fa5366ce Merge pull request #26070 from overleaf/msm-disable-dropbox
[web] Disable Dropbox Capability

GitOrigin-RevId: 5f91d2918bf3b88e52f4d27c828a4715f9b88629
2025-06-20 08:05:20 +00:00
David
edf4fdda50 Merge pull request #26491 from overleaf/dp-move-synctex
Move synctex controls lower to account for breadcrumbs in new editor

GitOrigin-RevId: 78ae0f6f1eb1384b8b3014ba4d1a0565ed3fd452
2025-06-20 08:05:16 +00:00
David
6e30a1a32d Merge pull request #26527 from overleaf/dp-errors-notification
Add promo for new error logs

GitOrigin-RevId: 68ce79653484dc018be302d753c572c39864c723
2025-06-20 08:05:11 +00:00
Eric Mc Sween
1042092144 Merge pull request #26547 from overleaf/em-revert-expiry-post
Revert "use POST requests for expiring redis buffer from cron"

GitOrigin-RevId: 95e9fd1be7b73699d6fac24035437d467c273d0a
2025-06-19 08:06:58 +00:00
Eric Mc Sween
150dfd6cba Merge pull request #26539 from overleaf/jpa-post
[history-v1] use POST requests for expiring redis buffer from cron

GitOrigin-RevId: 51c9a25b998e581ed20c0e113bd4989537a1e6ef
2025-06-19 08:06:50 +00:00
Eric Mc Sween
fd9fd9f0e7 Merge pull request #26545 from overleaf/jpa-fix-resync
[history-v1] use mongo projectId for initiating resync

GitOrigin-RevId: f93f2358695782fb222d23ba3720d98724b9a291
2025-06-19 08:06:45 +00:00
Jessica Lawshe
c9174cdecc Merge pull request #25965 from overleaf/jel-admin-ui-use-ukamf-settings
[web] Overleaf admin UI to toggle group SSO using UKAMF settings

GitOrigin-RevId: c4f976d8125c9a8e549c049841f5d7b58edf8192
2025-06-19 08:06:30 +00:00
Jessica Lawshe
a20a0923b7 Merge pull request #26207 from overleaf/jel-group-sso-ukamf-endpoints-tests
[web] Acceptance tests for option to use UKAMF path for group SSO

GitOrigin-RevId: 16d6669083c0e651adea755f1b24926838b0737a
2025-06-19 08:06:25 +00:00
Jessica Lawshe
91a308a62f Merge pull request #25868 from overleaf/jel-group-sso-ukamf-endpoints
[web] Option to use UKAMF path for group SSO

GitOrigin-RevId: 117fca5913682a02e9f8e8442eed13568f6551a1
2025-06-19 08:06:21 +00:00
Eric Mc Sween
7bdc4291fc Merge pull request #26533 from overleaf/jpa-queue
[history-v1] use POST requests for flushing history queues

GitOrigin-RevId: ae87a046a7844b25cc123d08ac6c8d1624608394
2025-06-19 08:06:16 +00:00
Rebeka Dekany
af99f736bd Create bug_report.md (#26525)
GitOrigin-RevId: 3795d1505d58bdbebc3d196e3a7709ba8ac05fd1
2025-06-19 08:06:08 +00:00
Domagoj Kriskovic
3a1ef872cd Check for existing reviewers when adding user to a project (#26463)
GitOrigin-RevId: 935335345461133fb8b08fa95f960f801b8775a7
2025-06-19 08:05:27 +00:00
Jakob Ackermann
4310d3ec88 [docstore] add runtime fix for mismatching comment vs thread ids (#26488)
GitOrigin-RevId: e7cefa88d125a73a26863e6fae8b49530efa2b4e
2025-06-19 08:05:22 +00:00
Jakob Ackermann
adf399fb95 [project-history] add support for resync of history-ot ranges (#26475)
* [project-history] add support for resync of history-ot ranges

* [project-history] avoid compressing sharejs and history-ot upgrades

* [document-updater] improve error message of some assertions

... by migrating the assertions like this:
```diff
-stub.calledWith().should.equal(true)
+stub.should.have.been.calledWith()
```
```diff
-stub.called.should.equal(false)
+stub.should.not.have.been.called
```

* [document-updater] move content field in resyncDocContent

* [document-updater] add support for resync of history-ot ranges

GitOrigin-RevId: e6104686a26934a5f25a8f095cbe00c163fbbaa7
2025-06-19 08:05:18 +00:00
Jakob Ackermann
5b39c76aa8 Merge pull request #26506 from overleaf/em-lazy-string-file-data-store
Fix lazy file data truncation on store()

GitOrigin-RevId: 2316a096e6a365178afbded58351359893a36312
2025-06-19 08:05:13 +00:00
Rebeka Dekany
8423829714 Migrate subscription related pages to Bootstrap 5 (#26372)
* Enable Bootstrap 5 for the subscription error pages

* Override contact modal on the plans page

* Convert AI Assist related styles to CSS

* Extend single layout-website-redesign.pug template for both Bootstrap 3 and 5

* Formatting to tab indentation

* Add the switcher for AI Assist

* Fix translations in heading

GitOrigin-RevId: 54ddc35602831f1ec1fa83c9f67a091eefda7a77
2025-06-19 08:05:05 +00:00
Kristina
cc7c01132b Merge pull request #26193 from overleaf/ls-consider-group-plan-when-get-next-subscription-items
Consider group plan when getting next subscription items

GitOrigin-RevId: cbf05c1a7de9e957739273b865c335807a58d739
2025-06-19 08:04:59 +00:00
Rebeka Dekany
bf8abb3181
Update ISSUE_TEMPLATE.md 2025-06-18 14:20:53 +02:00
Antoine Clausse
494f0a4b1a [web] Rename docRoot_id to docRootId in the frontend code (#26337)
* Rename `rootDoc_id` to `rootDocId` in the frontend

* Update types

* Fix frontend test

GitOrigin-RevId: b755a4ebf7b8c0b8ed800d713bbae8cfcfdd5046
2025-06-18 08:07:05 +00:00
Kristina
0dab9369ee Merge pull request #26434 from overleaf/kh-limit-trial-extentions
[web] limit trial extensions

GitOrigin-RevId: 57973190cdb57a04ce6a2585394bcb38321838f7
2025-06-18 08:07:00 +00:00
Kristina
b15758da97 Merge pull request #26494 from overleaf/kh-fix-trial-transitions
[web] fix upgrades/downgrades while on trial

GitOrigin-RevId: 4076befc5dbbee32b0cf2a4ff99db96a0cf3ad8b
2025-06-18 08:06:55 +00:00
Antoine Clausse
3ba002460e [web] Remove instances of $scope and _ide (#26297)
* Remove `$scope` from `getMockIde`

* Replace `...getMockIde()` by `_id: projectId`

* Simplify stories decorator scope.tsx: less reliance on `window`

* Update helper editor-providers.jsx: pass data instead directly instead of using `window`

* Remove `cleanUpContext`

* Remove unused prop `clsiServerId`

* Update types to reflect unused properties

* Remove comment

* Add `ol-chatEnabled` in Storybook

* Revert moving `getMeta` outside of the component

This causes issues in Storybook

GitOrigin-RevId: dc2558ce814c2d738fb39450c57c104f4419efb8
2025-06-18 08:06:42 +00:00
David
a559cbb590 Merge pull request #26493 from overleaf/dp-raw-logs
Improvements to raw logs log entry ui in new editor

GitOrigin-RevId: 516094fadfa4db7e82431c91b766dbe7e378b4a7
2025-06-18 08:06:33 +00:00
Jessica Lawshe
4648661ce6 Merge pull request #26449 from overleaf/jel-password-reset-err-msg
[web] Fix message check when managed user linked to SSO tries to reset password

GitOrigin-RevId: f271cd38484c48418f60c28eab2c4863d2984313
2025-06-18 08:06:21 +00:00
Eric Mc Sween
f68bf5a69f Merge pull request #26477 from overleaf/em-redis-buffer-resync
Handle invalid content hash when persisting changes

GitOrigin-RevId: 5259190396c8c261cad1abcd5de66314c1e871fb
2025-06-18 08:06:17 +00:00
David
90309f59ae Merge pull request #26471 from overleaf/dp-update-editor-switcher-modal
Add line about new error logs to editor switcher modal

GitOrigin-RevId: 13ca361060ab1e9144c3a3d4ebc1fc487e6a8895
2025-06-18 08:06:08 +00:00
David
89937d9635 Merge pull request #26487 from overleaf/dp-error-log-clickbox
Expand clickable area of log header in new editor

GitOrigin-RevId: dbb24ea84d04b41ce779f0490b34d51f44164f9e
2025-06-18 08:06:03 +00:00
David
3eeee3b983 Merge pull request #26470 from overleaf/dp-labs-survey
Launch new editor redesign labs survey

GitOrigin-RevId: a39b319aecde7fa9a00fb0f7f77814f6a277ab6c
2025-06-18 08:05:58 +00:00
David
4e03e0fbe1 Merge pull request #26468 from overleaf/dp-error-colors
Small style updates to new error logs

GitOrigin-RevId: fdfe7489bc87733b065d08c77353dce9ab940fc2
2025-06-18 08:05:53 +00:00
David
dc252fe772 Merge pull request #26472 from overleaf/dp-full-project-search-shortcut
Add mod-shift-f shortcut to open full-project-search to new editor

GitOrigin-RevId: a843c408cc8bd30228cbb7bc17309f4f9ff355b3
2025-06-18 08:05:48 +00:00
Jakob Ackermann
30143ead97 [web] migration fixes (#26443)
* [web] fix typo in ESM migration of a db migration

* [web] migrate old migration to ESM

* [web] use batchedUpdate for bulk updates in old migrations

GitOrigin-RevId: a984f785c577c2ac4125c947b8a3efffa57e1eb7
2025-06-18 08:05:44 +00:00
Jakob Ackermann
982f647845 [web] copy duplicated comment id into id field (#26481)
GitOrigin-RevId: accd6d9dc0165ff2cc9f8edd28c42d38b76ba909
2025-06-18 08:05:39 +00:00
David
39b4aed85f Merge pull request #26479 from overleaf/dp-synctex
Add synctex buttons to new editor

GitOrigin-RevId: 7790c848f96d9b12d95f2f01c5048da2ea17d8b4
2025-06-18 08:05:33 +00:00
Eric Mc Sween
6f461564d5 Merge pull request #26367 from overleaf/em-history-ot-undo
Fixes to TextOperation invert algorithm

GitOrigin-RevId: dd655660f6ecad7b6e9b2d4435dc9a5364d0fde2
2025-06-17 08:06:11 +00:00
Jakob Ackermann
0c2f79b0b8 [misc] make log level configurable when running tests in ESM services (#26465)
GitOrigin-RevId: 153fa99731b2fbff6dfd5edc80ce3e8acf9fc5ce
2025-06-17 08:06:04 +00:00
Eric Mc Sween
0f330ef6a3 Merge pull request #26393 from overleaf/bg-history-redis-gradual-rollout
add gradual rollout mechanism for history-v1 rollout

GitOrigin-RevId: 5fa69f5c3874bd5df1f31fdd3115e4ba6a0dab51
2025-06-17 08:06:00 +00:00
Eric Mc Sween
9cb4ef4d7d Merge pull request #26353 from overleaf/bg-history-redis-extend-persist-worker
extend persist worker to make parallel requests

GitOrigin-RevId: 8def7d5a8b5c9fcbe5fe45ac8f3ace503d31877a
2025-06-17 08:05:55 +00:00
Kristina
6b38336c7b Merge pull request #26397 from overleaf/kh-use-new-price-lookups
[web] use new price lookup keys

GitOrigin-RevId: f4c077d946100862aaea0288d5035a34d6188e83
2025-06-17 08:05:51 +00:00
Rebeka Dekany
9aa261eaf6 Hide tooltips on the Esc key (#26305)
* Hide the tooltip when Esc key is pressed

* Simplify ActionsDropdown

* Rename to tooltipDescription

* Use OLTooltip instead of Tooltip

GitOrigin-RevId: ee27cde2735ae3a0de5e37bfb8ab1dd99069742c
2025-06-17 08:05:47 +00:00
David
aa4d8f4925 Merge pull request #26389 from overleaf/dp-last-suggested-fix
Update ui for previous suggested ai fix in new editor

GitOrigin-RevId: 2f4edbcb0614d560bad28f862408ed73d39e988c
2025-06-17 08:05:39 +00:00
David
b6fe6ae062 Merge pull request #26407 from overleaf/dp-review-panel-gap
Allow comments to be positioned at top of review panel in new editor

GitOrigin-RevId: 581bbf85cc54b68b54235123b14b1564ed019e6d
2025-06-17 08:05:35 +00:00
David
b14a131b43 Merge pull request #26398 from overleaf/dp-premium-button
Add upgrade button to new editor toolbar

GitOrigin-RevId: 6c4832ccb6c32e72a842671c35d2315e3b792c15
2025-06-17 08:05:31 +00:00
David
7ca01dc925 Merge pull request #26303 from overleaf/dp-ai-troubleshooting-tip
Update UI of "Start at first error" tip to match designs

GitOrigin-RevId: fd745d1cce7b21847041614af7313768c26b5d08
2025-06-17 08:05:27 +00:00
David
e1a3037ffa Merge pull request #26162 from overleaf/dp-error-logs-ai-2
Update error logs AI UI

GitOrigin-RevId: 6f9783090797e6d7d2fe01178f6945aa364ff4ac
2025-06-17 08:05:22 +00:00
Tim Down
6bde3acc62 Merge pull request #25973 from overleaf/td-restricted-home-link
Use a single link for home link in restricted page

GitOrigin-RevId: 0220116c89845ad7704fb446e41abf99cfff3b45
2025-06-17 08:05:18 +00:00
Tim Down
d9914bf80a Merge pull request #25877 from overleaf/td-limit-browser-translate
Prevent browser translation of stuff that shouldn't be translated in project dashboard

GitOrigin-RevId: aba5d28d368277730d3bdc9aced6b9257cbd7950
2025-06-17 08:05:14 +00:00
Jakob Ackermann
7e9a33841d [project-history] migrate retry_failures script to ESM (#26444)
GitOrigin-RevId: 425e0e6c0a61ef7f8c8c3e07f2d16089f594eb32
2025-06-16 08:07:08 +00:00
Alf Eaton
afe146a620 Sort Storybook stories alphabetically (#26225)
GitOrigin-RevId: d7cf167a67ea24d7095fb27e6090ef735e966536
2025-06-16 08:07:04 +00:00
Jakob Ackermann
c7dd7208fb [document-updater] add flag for checking sync state of a single project (#26433)
GitOrigin-RevId: 504f607c652e59fa1305067f273db849d7923da2
2025-06-16 08:06:59 +00:00
Jakob Ackermann
8b937c91f4 [project-history] fix unit tests after merging multiple PRs (#26442)
GitOrigin-RevId: 9ccbadaa807da2f20010930477724e9370bf103e
2025-06-16 08:06:54 +00:00
Jakob Ackermann
22016ffef9 [clsi] adopt Server Pro env vars for sandboxed compiles (#26430)
GitOrigin-RevId: b7c0d9f4ff37d6cef77694a6a9ea4d50dcb148b2
2025-06-16 08:06:50 +00:00
Andrew Rumble
569e72a1c0 Downgrade outdated-ack message to debugConsole.warn
GitOrigin-RevId: 3537d4e3ee311cafc2956b2a9d3267e8184af48e
2025-06-16 08:06:45 +00:00
Jakob Ackermann
740b1d3f50 [project-history] add script for retrying failures (#26427)
GitOrigin-RevId: 265cec9719825613de01d7f476b20203bbff8d7f
2025-06-16 08:06:41 +00:00
Jakob Ackermann
0aa56fbe2c [project-history] fix callback signature when processing no updates (#26420)
* [project-history] fix tests and cover callback for processing updates

The before setup was hiding that some tests were not doing what the
assertions were expecting.

* [project-history] fix callback signature when processing no updates

GitOrigin-RevId: 4fa14d47b9a1afd998316b0c9024d49760785a47
2025-06-16 08:06:36 +00:00
Jakob Ackermann
6f516b25af [project-history] add metrics for compression of updates (#26307)
* [project-history] add metrics for compression of updates

* [project-history] sample compression metrics

GitOrigin-RevId: 1cd67dc4ec7b44285afb436c62392b464f007f97
2025-06-16 08:06:28 +00:00
Jimmy Domagala-Tang
a1591e8b0c feat: log error for support to investigate, and return 200 to recurly to prevent retries (#26317)
GitOrigin-RevId: 6f71fd4451e950a7bdbe1140a86a78bf8a04b039
2025-06-16 08:06:23 +00:00
Domagoj Kriskovic
b0c5d6fc5a Update font size and spacing in AI assist part of plans page (#26437)
GitOrigin-RevId: 2b0e3b68f73e72ef1024db9c1088d20b973f6245
2025-06-16 08:06:18 +00:00
Rebeka Dekany
0ac2ddd686 Migrate onboarding/bonus/beta program templates to Bootstrap 5 (#26344)
* Migrate the Try Premium for free page to Bootstrap 5

* Migrate the Overleaf Beta Program page to Bootstrap 5

* Fix buttons alignment on smaller screen size

* Migrate the Overleaf Bonus Program page to Bootstrap 5

GitOrigin-RevId: 811db783af6a86ab472aae95e075bfa301786a31
2025-06-16 08:06:14 +00:00
Kristina
53fc78d83e Merge pull request #25771 from overleaf/kh-pass-locale-to-checkout
[web] pass locale and currency to Stripe checkout

GitOrigin-RevId: 410cde4849226a70c5a6b295554d96984f48f9fb
2025-06-16 08:05:51 +00:00
roo hutton
980a8458d4 Merge pull request #26140 from overleaf/rh-cio-sub-status
Use analyticsId as primary identifier in customer.io and support best-subscription property

GitOrigin-RevId: d412f2edbff2e430d7fe3192b8843ad9ac2c226a
2025-06-16 08:05:39 +00:00
Jakob Ackermann
f025f1d0cb [web] let docstore determine a projects comment thread ids (#26364)
* [docstore] add endpoint for getting a projects comment thread ids

* [web] let docstore determine a projects comment thread ids

Also fetch the comment thread ids once when reverting project.

GitOrigin-RevId: c3ebab976821509c9627962e58918f9c6ebb0e1d
2025-06-13 08:08:00 +00:00
MoxAmber
e95b159edd Merge pull request #26395 from overleaf/as-compile-timeout-event-fix
[web] Rename time segmentation value to compileTime to prevent MixPanel errors

GitOrigin-RevId: 70ddfd5091e3f8d98849f09dc9d09adc8c0bf2c7
2025-06-13 08:07:48 +00:00
Davinder Singh
227f035c2e Merge pull request #26358 from overleaf/ds-bs5-customer-story-cern
[B2C] MIgrating CERN customer story page to Bootstrap 5

GitOrigin-RevId: dcffa5117c66438caeef3793e7f6a87055371f91
2025-06-13 08:07:40 +00:00
Davinder Singh
08ea0f270b Merge pull request #26268 from overleaf/ds-cms-bs5-customer-story-layout-2
[B2C] Bootstrap 5 migration of Customer story layout

GitOrigin-RevId: 6156d69f24be4818e68d044e44539ec3fc8b2595
2025-06-13 08:07:35 +00:00
Domagoj Kriskovic
fd1926a1c8 Preserve resolve state in history on file restore (#26302)
* Preserve resolve state in history when doing a restore

* added comment why is resovled property deleted

GitOrigin-RevId: fb1011cf98a658b302c6eef1da83fb1006bb2052
2025-06-13 08:07:26 +00:00
Tim Down
9c287ba36c Merge pull request #26390 from overleaf/td-logged-out-invite-layout
Use correct layout for logged-out user invite page

GitOrigin-RevId: f8b95e90068b3b18d937f56a4e5edba59e39d111
2025-06-13 08:07:21 +00:00
Tim Down
ef7cc20694 Merge pull request #26274 from overleaf/td-account-enrollment-error-fix
Prevent front-end errors in account enrollment page

GitOrigin-RevId: d05e295f70a8e9cb6d5e0da6800d7eaf4468cb39
2025-06-13 08:07:16 +00:00
Tim Down
cc21f42a14 Merge pull request #26023 from overleaf/td-remove-components-from-includes
Remove imports of react-bootstrap components from marketing pages

GitOrigin-RevId: 12a3446cc42f1438a52f49a893e53a02a1b5be4f
2025-06-13 08:07:12 +00:00
Antoine Clausse
5c7bef31ca [web] Fix donut chart in non-commons uni portal pages (#26379)
* Revert me: show fake data for donut chart

* Re-add `nvd3` styles in BS3

* Revert "Revert me: show fake data for donut chart"

This reverts commit b93e2aa5b0838571a5c4d96e85483b3d029038c7.

* Prettierignore nvd3.less

GitOrigin-RevId: 90702447244e7a2ddac495e9203c16c6bfc17bb0
2025-06-13 08:07:07 +00:00
Domagoj Kriskovic
fc050983c9 AI assist section for plans page (#26187)
* AI assist plans section

* fix merge issues, add tests

* translate img alt

* startInterval only if there are images found

* update casing for TeXGPT

* update mobile price font

* small design tweaks

GitOrigin-RevId: 87d993bb5da1929f99ab3b4721316961d78a46f5
2025-06-13 08:07:02 +00:00
Domagoj Kriskovic
92626393ec Check if AI assist standalone plan is used in shouldPlanChangeAtTermEnd (#26272)
GitOrigin-RevId: d6737ea28071d565109dba695876b6fbf3f5daa2
2025-06-13 08:06:57 +00:00
Domagoj Kriskovic
ce00213c4a Add permissions checks for AI assist addon purchase (#26355)
* Add permissions checks for AI assist when trying to buy the addon

* more explicit check for DuplicateAddOnError

* remove useCapabilities()

GitOrigin-RevId: 1979e27410981e2ef020cecc731e228483d3315a
2025-06-13 08:06:52 +00:00
Brian Gough
92731848ac Merge pull request #26352 from overleaf/bg-history-redis-add-flush-endpoint
add flush endpoint to history-v1

GitOrigin-RevId: b2ca60f7d040459f9c542e4e87147b9eecc9f596
2025-06-13 08:06:37 +00:00
Brian Gough
2f44a4eb5a Merge pull request #26351 from overleaf/bg-history-redis-convert-persist-worker-to-esm
history redis convert persist worker to esm

GitOrigin-RevId: edcbac6e3f1d3dde3fa8239378995f3ff3afcfdd
2025-06-13 08:06:32 +00:00
Brian Gough
d189c91c59 Merge pull request #26378 from overleaf/bg-history-redis-fix-misc
add type check to scan.js

GitOrigin-RevId: a306ec841b8801a2a5eab8c35f246180206f7231
2025-06-13 08:06:27 +00:00
Brian Gough
eed6a982f7 Merge pull request #26377 from overleaf/bg-history-redis-remove-unwanted-parameters
remove unwanted parameters in queueChangesFake and queueChangesFakeOnlyIfExists functions

GitOrigin-RevId: 5946cd1f81db7076eb545b5a0aca28b81fa19be4
2025-06-13 08:06:22 +00:00
Jakob Ackermann
ab0199f238 [misc] migrate remaining references to our GCR repositories to AR (#26370)
* [misc] migrate remaining references to our GCR repositories to AR

* [server-ce] fix retagging of texlive images

GitOrigin-RevId: 81f955ad4c4486ad42b29cbd6bcc9d5ef4b1a432
2025-06-13 08:06:18 +00:00
MoxAmber
0a79ac75ff Merge pull request #26312 from overleaf/as-10s-compile-timeout
[web] 10s Compile Timeout: Warning Phase

GitOrigin-RevId: 749baad646fa7ef0d3a8e14fbbb5edec7b227ed3
2025-06-13 08:06:13 +00:00
ilkin-overleaf
d49a9e9e80 Merge pull request #25843 from overleaf/ii-managed-users-make-unmanaged-terminate-subscription
[web] Terminate subscription when joining a managed group

GitOrigin-RevId: 2a4f2fd57e1319970780043a633fb8027593e5d4
2025-06-13 08:06:08 +00:00
ilkin-overleaf
0fc229dfc0 Merge pull request #25729 from overleaf/ii-managed-users-make-unmanaged-explanations-copy
[web] Managed users copy changes

GitOrigin-RevId: 2cbcdc80eafb2257302d11c179f7efd8742a3945
2025-06-13 08:06:03 +00:00
Kristina
02e7ac52e2 Merge pull request #26208 from overleaf/kh-extend-trial
[web] support extending trials for Stripe

GitOrigin-RevId: ae7d863f61ce5ec463509c590199c6e583300e8f
2025-06-13 08:05:55 +00:00
ilkin-overleaf
cfc6ff0759 Merge pull request #25983 from overleaf/ii-managed-users-make-unmanaged-roles-access
[web] Prevent managers from removing/deleting themselves

GitOrigin-RevId: 9287dc06bab8024bf03fecff678a4118a9456919
2025-06-13 08:05:46 +00:00
ilkin-overleaf
277e59fbd5 Merge pull request #25713 from overleaf/ii-managed-users-make-unmanaged-events
[web] Tracking events for offboarding managed users

GitOrigin-RevId: 12d0f178bab9f2f0923135ce3661738acdc85b82
2025-06-13 08:05:42 +00:00
ilkin-overleaf
272303cb58 Merge pull request #24907 from overleaf/ii-managed-users-make-unmanaged
[web] Release users from managed group

GitOrigin-RevId: 15921286af332d2294fb900ab3055991ca8b1673
2025-06-13 08:05:37 +00:00
Mathias Jakobsen
819cd85a0e Merge pull request #26310 from overleaf/mj-history-view-cut-off
[web] Editor redesign: Avoid cutting off history view

GitOrigin-RevId: ecffc404ef6c210c852edd13c99b742545b73b32
2025-06-13 08:05:25 +00:00
Rebeka Dekany
d2e784e11c Remove .text-centered and use .text-center instead (#26217)
GitOrigin-RevId: 75774d877e3d513574818afc517be815cb7201ae
2025-06-13 08:05:19 +00:00
Jakob Ackermann
8b91b3b749 [misc] wait for mongo before running acceptance tests (#26374)
GitOrigin-RevId: 1fde30c2b630a51f5eda9d318ac721a81cc23607
2025-06-13 08:05:08 +00:00
Jakob Ackermann
365af778b6 [web] fetch user details for chat messages/ranges in a single batch (#26342)
GitOrigin-RevId: 5a4238e30388bb7f58b7a93dda9e43338ab94f6e
2025-06-12 08:06:16 +00:00
Jakob Ackermann
3862826589 [web] let docstore determine user ids of tracked changes (#26333)
* [docstore] add endpoint for getting user ids of tracked changes

* [web] let docstore determine user ids of tracked changes

GitOrigin-RevId: 8d0a131555aa827f7ff80690fedc1aca26cf0817
2025-06-12 08:06:11 +00:00
Brian Gough
2e4b57bf81 Merge pull request #26357 from overleaf/bg-history-redis-fix-history-buffer-level
remove incorrect parameter from queueChanges call in queueChangesFake function

GitOrigin-RevId: 4567be5eb5634c0c419f7f8758f1a6f27586a9af
2025-06-12 08:05:58 +00:00
Jimmy Domagala-Tang
55295ece9c feat: allow for users on both free plans and premium plans to use the freemium ai assist (#26350)
GitOrigin-RevId: cef1051800abac3d4f7039ade2d79c75d5c15c8f
2025-06-12 08:05:49 +00:00
Brian Gough
8a90173aa7 Merge pull request #26271 from overleaf/bg-history-redis-deployment-refactor
introduce history-v1 buffering levels

GitOrigin-RevId: 7709935a5ceb19ef6c5723ded647217b7399759a
2025-06-12 08:05:39 +00:00
David
72ff927a52 Merge pull request #26311 from overleaf/dp-auto-compile-failure
Add a code check banner to the new editor

GitOrigin-RevId: 6ee01d8379247824f4ec0684809ad432c4617c96
2025-06-12 08:05:35 +00:00
Miguel Serrano
9601eeb7c9 [CE/SP] Hotfix 5.5.1 (#26091)
* [CE/SP] Hotfix 5.5.1

* [web] Fix License tab in CE/SP

* Added patch to improve logging

* Added patch to fix create-user.mjs

* Added check for `featureCompatibilityVersion` on CE/SP startup

* Patch with `multer` and `tar-fs` updates

* Install manually missing @paralleldrive/cuid2 on CE 5.1.1

GitOrigin-RevId: 0138dffdcb171382014a383bee13676fc873b1dd
2025-06-12 08:05:30 +00:00
Eric Mc Sween
91c1c6858a Merge pull request #26281 from overleaf/em-history-ot-undo
Fix undo in history OT

GitOrigin-RevId: 22e8da99abb3de70095539fd8acb5c7eb947fbd1
2025-06-12 08:05:25 +00:00
Eric Mc Sween
101c994fec Merge pull request #26279 from overleaf/em-compose-tracking-props
Merge tracked inserts and deletes during composition

GitOrigin-RevId: f8cfcf79aef7cb3e7acaecf7c3baa69d71a4efa9
2025-06-12 08:05:21 +00:00
Antoine Clausse
ab19b01d43 [web] Migrate metrics module Pug files to Bootstrap 5 (2) (#26199)
* Reapply "[web] Migrate metrics module Pug files to Bootstrap 5 (#25745)"

This reverts commit 0962383998f29313cc1fa33b98255a38896738a0.

* Remove foot-scripts from metricsApp.pug

* Fix loading-overlay position

* Hide carets on print display

* Fix Dropdown

* Fix Tooltips

GitOrigin-RevId: 754d9a004f7b476578ee20565203aef98b08bbf4
2025-06-12 08:05:16 +00:00
Antoine Clausse
b3dc0097fd Merge pull request #26188 from overleaf/ac-bs5-fix-redundant-carets
[web] Fix redundant carets in BS5 marketing pages

GitOrigin-RevId: 479687d982db23e4f5f2efcc3f5f39bb78f0eb24
2025-06-12 08:05:11 +00:00
Antoine Clausse
6a951e2ff0 [web] Migrate general Pug pages to BS5 (2) (#26121)
* Reapply "[web] Migrate general Pug pages to BS5 (#25937)"

This reverts commit c0afd7db2dde6a051043ab3e85a969c1eeb7d6a3.

* Fixup layouts in `404` and `closed` pages

Oversight from https://github.com/overleaf/internal/pull/25937

* Use `.container-custom-sm` and `.container-custom-md` instead of inconsistent page widths

* Copy error-pages.less

* Convert error-pages.lss to SCSS

* Revert changes to pug files

* Nest CSS in `.error-container` so nothing leaks to other pages

* Remove `font-family-serif`

* Use CSS variables

* Remove `padding: 0` from `.full-height`: it breaks the layout in BS5

* Fix error-actions buttons

* Revert changes to .container-custom...

* Update services/web/app/views/external/planned_maintenance.pug

Co-authored-by: Rebeka Dekany <50901361+rebekadekany@users.noreply.github.com>

* Update services/web/app/views/general/closed.pug

Co-authored-by: Rebeka Dekany <50901361+rebekadekany@users.noreply.github.com>

---------

Co-authored-by: Rebeka Dekany <50901361+rebekadekany@users.noreply.github.com>
GitOrigin-RevId: 313f04782a72fae7cc66d36f9d6467bad135fd60
2025-06-12 08:05:06 +00:00
Brian Gough
b290e93441 Merge pull request #26270 from overleaf/bg-history-redis-commit-change-manager
replace redis logic in persistChanges with new commitChanges method

GitOrigin-RevId: e06f9477b9d5548fa92ef87fb6e1f4f672001a35
2025-06-11 08:07:45 +00:00
Andrew Rumble
5799d534a9 Ensure we wait after processing each subscription
GitOrigin-RevId: f6a184bc8a65934f24857cfc4f71f95574576b9d
2025-06-11 08:07:41 +00:00
Andrew Rumble
07b47606c1 Disable script in production
GitOrigin-RevId: 81fe077a5816a23fa20c78a6271fbdf62021e3b2
2025-06-11 08:07:36 +00:00
Brian Gough
b946c2abff Merge pull request #26304 from overleaf/bg-history-redis-clear-persist-time-on-persist
add persist time handling to setPersistedVersion method

GitOrigin-RevId: 5e115b49116ee4604e3e478c206c7e9cf147cbc8
2025-06-11 08:07:32 +00:00
Jakob Ackermann
25c3699862 [docstore] finish async/await migration (#26295)
* [docstore] DocManager.getDocLines returns flat content

* [docstore] peekDoc throws NotFoundError, skip check in HttpController

* [docstore] getFullDoc throws NotFoundError, skip check in HttpController

* [docstore] migrate HealthChecker to async/await

* [docstore] migrate HttpController to async/await

* [docstore] remove .promises/callbackify wrapper from all the modules

GitOrigin-RevId: a9938b03cdd2b5e80c2c999039e8f63b20d59dc5
2025-06-11 08:07:20 +00:00
Mathias Jakobsen
0397b02214 Merge pull request #26221 from overleaf/mj-history-dark-mode-entries
[web] Editor redesign: Add dark mode to history entries

GitOrigin-RevId: 16c9743bdee85dc3825ce6e9901a0107956205ca
2025-06-11 08:07:12 +00:00
Mathias Jakobsen
7c23655c79 Merge pull request #26177 from overleaf/mj-ide-history-file-tree
[web] Editor redesign: Update history view file tree

GitOrigin-RevId: bb0fe871837ffac6e1af6c18c7c1ae651dee7f81
2025-06-11 08:07:07 +00:00
Brian Gough
fdd0d95554 Merge pull request #26293 from overleaf/bg-history-redis-fix-persist-worker
add missing load global blobs from persist worker

GitOrigin-RevId: ae9393f2353fb4d5afe349aa7d0a26bab80c7f53
2025-06-11 08:06:59 +00:00
Brian Gough
2a833aa23a Merge pull request #26250 from overleaf/bg-history-redis-add-return-value-to-persistBuffer
provide return value from persistBuffer

GitOrigin-RevId: ba52ff42b91ffe9adc23ab0461fa836540735563
2025-06-11 08:06:54 +00:00
Brian Gough
fec6dde00f Merge pull request #26203 from overleaf/bg-history-redis-fix-loadAtVersion
Extend loadAtVersion to handle nonpersisted versions

GitOrigin-RevId: 22060605ea7bb89a8d4d61bafab8f63b94d59067
2025-06-11 08:06:50 +00:00
Brian Gough
c81cc4055e Merge pull request #26220 from overleaf/bg-history-redis-fix-loadAtVersion-startVersion
correct startVersion calculation in loadAtVersion

GitOrigin-RevId: b81c30dcab90b137169a4bddef3c22f44a957f68
2025-06-11 08:06:45 +00:00
Brian Gough
2d0706591b Merge pull request #26219 from overleaf/bg-history-redis-fix-loadAtTimestamp
correct startVersion calculation in loadAtTimestamp

GitOrigin-RevId: ad46aae47c0769943e787199d68e895cf139bb56
2025-06-11 08:06:41 +00:00
Brian Gough
f904933d68 Merge pull request #26180 from overleaf/bg-history-redis-add-queueChanges
add queueChanges method to history-v1

GitOrigin-RevId: fb6da79bd5ca40e7cbdcb077ad3a036cc5509ced
2025-06-11 08:06:36 +00:00
Andrew Rumble
c227c1e2d9 Remove some unused variables
These miseed the lint rule as they were merged between the last rebase
and deploy.

GitOrigin-RevId: 16b1117d56f2fc824509b9a0f340dba2ede9902f
2025-06-11 08:06:31 +00:00
Brian Gough
c23e84eb37 Merge pull request #26273 from overleaf/bg-history-redis-add-persist-worker-to-cron
modify existing run-chunk-lifecycle cron job to persist and expire redis queues

GitOrigin-RevId: afb94b3e2fba7368cfec11997dfd5b2bbd6321a9
2025-06-11 08:06:23 +00:00
David
637312e4f8 Merge pull request #26135 from overleaf/dp-error-logs-ai
Add AI paywall to new error logs

GitOrigin-RevId: 2d6dad11dfe3b27c8ff322a9778a53496cfe7277
2025-06-11 08:06:19 +00:00
Andrew Rumble
ce3054713f Remove unused variable
GitOrigin-RevId: 57b864aff3317513f981b101feafac28d3379403
2025-06-11 08:06:14 +00:00
Andrew Rumble
2c07fa1f77 Skip unused array members
GitOrigin-RevId: 5ea4dd880505e65fe7545e0c0d4301236ad103e7
2025-06-11 08:06:10 +00:00
andrew rumble
52280febf6 When filtering object members from rest use full name
GitOrigin-RevId: 0c21c70b2512931744f18e79c8d9e4bb85e83dfa
2025-06-11 08:06:05 +00:00
andrew rumble
f871130773 Disable lint warnings for stubbed class
GitOrigin-RevId: bcee2d1ea4fcb5543fa31fd2174641e55d6c4d39
2025-06-11 08:06:00 +00:00
andrew rumble
25675ce2ba Remove unused params from destructuring
GitOrigin-RevId: e47a16e2d99e923c314fd0fa2220c19b7b2c9b51
2025-06-11 08:05:56 +00:00
Andrew Rumble
c1f5d7c40c Ignore params that are needed for type integrity
These params are either used in a descendent or ancestor of the relevant
file and form part of the interface of the method even if they are not
directly used.

GitOrigin-RevId: 8bf64cecc69a9ae9e6c50797de5ce8db86757440
2025-06-11 08:05:51 +00:00
andrew rumble
4960569648 Remove unused full arguments
As distinct from removing destructured props.

GitOrigin-RevId: d02ad8d36fb532559ed2899268d7b699f2f2fa37
2025-06-11 08:05:46 +00:00
andrew rumble
eb60d364f6 Fix instances of ...rest filtering
GitOrigin-RevId: 9f2889b08ffed20466d7022a5aba69d3e87c5ed9
2025-06-11 08:05:42 +00:00
andrew rumble
542008c61d Remove unused event arguments
GitOrigin-RevId: 25858d07865d6b9a7caa4997d031586a248d8e8b
2025-06-11 08:05:37 +00:00
andrew rumble
3da4dc71f1 Modify no-unused-vars behaviour
using @typescript-eslint/no-unused-vars reduces the number of false
positives in TS code. The changes:
1. Allow the arguments to a function to be checked (reporting only after
the last used variable)
2. Allow rest siblings to be checked
3. Allow these rules to be skipped with an _ prefix to a variable

GitOrigin-RevId: 1f6eac4109859415218248d5b2068a22b34cfd7e
2025-06-11 08:05:33 +00:00
Davinder Singh
312664bd2d Merge pull request #26265 from overleaf/ds-cms-bs5-customer-stories-2
[B2C] Bootstrap 5 migration of Customer stories page

GitOrigin-RevId: cca0d00412ab4ec5da15e26e4e7eb3c40de9e47c
2025-06-11 08:05:14 +00:00
ilkin-overleaf
69e2a57769 Merge pull request #26141 from overleaf/ii-managed-users-consent-screen
[web] Joining managed group from projects page

GitOrigin-RevId: 191203559fba94cad45f35de1af2427b2abb9326
2025-06-11 08:05:09 +00:00
Eric Mc Sween
6d202432ff Merge pull request #26209 from overleaf/em-multiple-edit-ops
Support multiple ops in the history OT ShareJS type

GitOrigin-RevId: fad1e9081ed1978de414c5130692d3b23fcd13d8
2025-06-10 08:06:13 +00:00
Miguel Serrano
5b08adc4ff Merge pull request #26218 from overleaf/msm-bump-tar-fs-multer
[clsi/web/history-v1] Bump `tar-fs` and `multer`

GitOrigin-RevId: c76b964224c8367d68dc1190ff29627cc6919ade
2025-06-10 08:06:05 +00:00
Davinder Singh
86626ca44e Merge pull request #25856 from overleaf/ds-cms-bs5-migration-universities-2
[B2C] Bootstrap 5 migration of Universities page

GitOrigin-RevId: b069c04131531e9f9774a9a53aaa53858ba568c7
2025-06-10 08:06:00 +00:00
Davinder Singh
45c6ce2219 Merge pull request #25842 from overleaf/ds-cms-bs5-migration-enterprises-2
[B2C] Bootstrap 5 migration of Enterprises page

GitOrigin-RevId: 63c4095ddb2ee688bc1780883b86f5a994b262c0
2025-06-10 08:05:55 +00:00
David
ff63215d73 Merge pull request #26155 from overleaf/dp-content-info
Add content-info and content-info-dark to standard colours and use in editor redesign logs

GitOrigin-RevId: 40c026a9ccfe511cab2bf4e28fbfbed7cf218642
2025-06-10 08:05:51 +00:00
Mathias Jakobsen
d3a9b4943a Merge pull request #26257 from overleaf/mj-ide-breadcrumbs-crash
[web] Avoid editor crash when breadcrumbs can't find open entity

GitOrigin-RevId: 7c7f198c82e102ee9f8e2a59ca1755c3550bdf37
2025-06-10 08:05:46 +00:00
Mathias Jakobsen
e0f6ee8b20 Merge pull request #26133 from overleaf/mj-ide-keyboard-shortcuts
[web] Editor redesign: Add keyboard shortcuts to menu bar

GitOrigin-RevId: 8fe844389de70a919ba836d03f0390f585532bb1
2025-06-10 08:05:42 +00:00
Andrew Rumble
edc7634007 Update bootstrap process to use vitest chai
GitOrigin-RevId: 5576223019c0e2b4554707f0025e82ab3a7ca514
2025-06-10 08:05:23 +00:00
Andrew Rumble
c0b7efea10 Change imports that use chai to use vitest
GitOrigin-RevId: 59d780f754adbb5160a2de8e5eca1def6968584b
2025-06-10 08:05:18 +00:00
Brian Gough
2eb695f4c3 Merge pull request #26122 from overleaf/bg-history-redis-make-persist-buffer-consistent
make persistBuffer export consistent with other methods

GitOrigin-RevId: 24536e521e1d20ef63cc74bd9ba40e095025d512
2025-06-09 08:05:39 +00:00
Brian Gough
d280f40885 Merge pull request #26116 from overleaf/bg-history-redis-show-buffer
add script to display redis buffer for a given history ID

GitOrigin-RevId: 71c2e79480c0873d30801ed3c13aa9a7fc7873f6
2025-06-09 08:05:35 +00:00
Kristina
a9923fed4e Merge pull request #26198 from overleaf/jpa-recurly-metrics
[web] add metrics for recurly API usage

GitOrigin-RevId: 89840829f86ce1ff750d57f3445f279f4b151d6f
2025-06-09 08:05:24 +00:00
Kristina
7a449f4686 Merge pull request #26014 from overleaf/kh-remaining-references-to-recurly-fields
[web] update remaining references to `recurlyStatus` and `recurlySubscription_id`

GitOrigin-RevId: f5e905eba598cfcd146803c6ccc36a2304021544
2025-06-09 08:05:17 +00:00
Kristina
a8df91e91b Merge pull request #26087 from overleaf/mf-change-to-stripe-uk
[web] Configure to use Stripe UK account

GitOrigin-RevId: 0856f6da2caae8caf9887ec2acea8e7f0972e598
2025-06-09 08:05:09 +00:00
CloudBuild
9e9ad3c005 auto update translation
GitOrigin-RevId: 52a28c6823536ef916c656128dbcdff1da80635b
2025-06-06 08:06:46 +00:00
Eric Mc Sween
e5d828673e Merge pull request #26128 from overleaf/em-no-tracked-deletes-in-cm
History OT: Remove tracked deletes from CodeMirror

GitOrigin-RevId: 4e7f30cf2ed90b0c261eaa4ba51a2f54fe6e3cef
2025-06-06 08:06:34 +00:00
Andrew Rumble
df233f3e5e Add commands for running just mocha tests
GitOrigin-RevId: 6cd5c6aedd4fb2f222a758d6aca130f178a4acf3
2025-06-06 08:06:18 +00:00
Rebeka Dekany
784559f1b8 Add video caption track if captionFile is available (#25997)
GitOrigin-RevId: fefcce66fe573385dfec34cc0f8697220fe418a3
2025-06-06 08:06:06 +00:00
Rebeka Dekany
ae51e57c75 Migrate user email confirmation page to Bootstrap 5 (#26026)
GitOrigin-RevId: 8e12b19fb941c0adfeaa16089bfe229e8816ad8d
2025-06-06 08:06:01 +00:00
Rebeka Dekany
24e12bfbd4 Migrate institutional account linking pages to Bootstrap 5 (#25900)
GitOrigin-RevId: 75734bdbde52e90305ae759789acaf4203ec49b4
2025-06-06 08:05:57 +00:00
Andrew Rumble
1386ca1669 Add migration for drop projectHistoryMetaData collection
GitOrigin-RevId: 1ebfc60ee9591837f37e507fb1dcb059c09a7f3b
2025-06-06 08:05:52 +00:00
Andrew Rumble
f7fcf4c23f Remove projectHistoryMetaData from mongo db interface
GitOrigin-RevId: dbbc2218c7b1ff8b7907248f86b03189e9e4006d
2025-06-06 08:05:47 +00:00
Jakob Ackermann
3b684e08ca [web] fetch token users in a single db query per access mode (#26078)
* [web] skip db query when getting empty list of users

* [web] fetch token users in a single db query per access mode

GitOrigin-RevId: fa5d9edcb761bd5d5e5ea07d137a5a86efdbdd5c
2025-06-06 08:05:42 +00:00
Brian Gough
d7833afd35 Merge pull request #26173 from overleaf/bg-fix-typo-in-project-deletion
fix deleted project owner ID in expireDeletedProject function

GitOrigin-RevId: 7e427bf9877865752f259a75b99354597d2e0a7f
2025-06-06 08:05:38 +00:00
Brian Gough
af7bcfc96a Merge pull request #25486 from overleaf/bg-add-logging-when-projects-are-expired
add logging when projects are expired

GitOrigin-RevId: 5107f9f3d2f35aac1ee3f02a9a92c5f625d47f7a
2025-06-06 08:05:33 +00:00
Jakob Ackermann
842f6c289f [document-updater] make setDoc aware of tracked deletes in history-ot (#26126)
GitOrigin-RevId: efa1a94f2f435058b553f639e43832454c58591d
2025-06-06 08:05:23 +00:00
Brian Gough
1e6112d5b0 Merge pull request #25467 from overleaf/bg-fix-error-handling-when-accounts-are-deleted
improve logging deleted when user data is expired

GitOrigin-RevId: ac85b66c503184a815348a11a730fb68a504d80a
2025-06-05 08:06:27 +00:00
Antoine Clausse
11e410c9c0 Merge pull request #26163 from overleaf/revert-25745-ac-bs5-metrics-module
Revert "[web] Migrate metrics module Pug files to Bootstrap 5 (#25745)"

GitOrigin-RevId: b97eecc2232f56833391fb789902f9a85936c365
2025-06-05 08:06:16 +00:00
CloudBuild
0037b0b3fc auto update translation
GitOrigin-RevId: f0b783bc74dc2212d330305600c8f3d16d27eef3
2025-06-05 08:06:11 +00:00
Jakob Ackermann
cd10a31a16 [server-ce] fix direct invocation of create-user.mjs script in web (#26152)
GitOrigin-RevId: 9c7917e489dc8f3651f4ccf88a740ad60b6b4437
2025-06-05 08:06:06 +00:00
M Fahru
ca10904484 Merge pull request #26027 from overleaf/mf-admin-panel-stripe
[web] Update admin panel with Stripe subscription data

GitOrigin-RevId: fc4f773c5d6d2eae206a791c1ad40d8ccbf766e7
2025-06-05 08:05:58 +00:00
M Fahru
e3310e2358 Merge pull request #26117 from overleaf/sg-money-back-wording
Update en.json

GitOrigin-RevId: 5b02970e6344b65e37c49c196c9e3c89b1555c75
2025-06-05 08:05:54 +00:00
Domagoj Kriskovic
62714d995d Revert "Reinitialise Writefull toolbar after buying AI assist (#25741)" (#26144)
This reverts commit 7247ae45ca7de7f1f3778b1b22f49e2ff840a7ef.

GitOrigin-RevId: c6dc1a073ce3d0f9703e426df1c12fa1c7ffac5c
2025-06-05 08:05:28 +00:00
David
a8a21e05af Merge pull request #26100 from overleaf/dp-compile-timeout-paywall
Add compile timeout paywall to new editor

GitOrigin-RevId: 9742ae67b4103c72cc9d87852801ae8751f85d6d
2025-06-05 08:05:20 +00:00
David
08316442cf Merge pull request #26067 from overleaf/dp-separate-rail-sizes
Separate rail size storage for different rail tabs

GitOrigin-RevId: dd97215de9ea4e4a932d10dabb234e343f5e3fa5
2025-06-05 08:05:15 +00:00
David
db98f5132b Merge pull request #25939 from overleaf/dp-error-logs
Update error logs designs for new editor

GitOrigin-RevId: 0de3a54446a0ff114a1debb7b5f274d3a8f19c42
2025-06-05 08:05:11 +00:00
Kristina
a134a2b799 [web] support purchasing/removing add-ons for Stripe subscriptions (#26081)
GitOrigin-RevId: 01c2eaccc7c34bc37be43120de83270490e5e6da
2025-06-05 08:05:06 +00:00
Eric Mc Sween
7a556cf1fd Merge pull request #26041 from overleaf/em-history-ot-type-serialize
History OT type: operate on parsed EditOperations

GitOrigin-RevId: dbb35789736958d4ef398e566400d6e9a0e49e8b
2025-06-04 08:07:54 +00:00
Eric Mc Sween
f11ea06c1a Merge pull request #25910 from overleaf/em-track-changes-sharejs
Track changes in the history OT sharejs doc

GitOrigin-RevId: 17365219f24a25790eac611dbde9681eb73d0961
2025-06-04 08:07:50 +00:00
M Fahru
d173bdf8e2 Merge pull request #25355 from overleaf/mf-whitelist-staging-url-stripe-test
[web] Bypass country requirement for Stripe if user is on staging or dev environment to ease the testing process

GitOrigin-RevId: 0924a57d3a1b7b530a3822fb8f9056a1dd7119e9
2025-06-04 08:07:35 +00:00
M Fahru
832f9923b9 Merge pull request #25998 from overleaf/mf-update-stripe-email-from-subscription-dashboard
[web] Make user able to sync their email address in subscription dashboard for Stripe subscription

GitOrigin-RevId: 9abdc0e18ebea29b18c2041130946b9e50fa43db
2025-06-04 08:07:27 +00:00
M Fahru
ef810a9f36 Merge pull request #25967 from overleaf/mf-sync-email-update-to-stripe-account
[web] Sync Stripe customer email when user update their primary email in account setting

GitOrigin-RevId: a5f4b4e960d2c9d4ba96a2b3036329f4868e1bb8
2025-06-04 08:07:23 +00:00
Brian Gough
54c0eb7fdc Merge pull request #25958 from overleaf/bg-history-redis-check-persisted-version-on-update
prevent setPersistedVersion from setting an out of bounds version

GitOrigin-RevId: 9561b7b96399bed901db5c2ac20a0cdbf4c67395
2025-06-04 08:07:19 +00:00
Antoine Clausse
edacb9ec0b Merge pull request #26111 from overleaf/revert-25937-ac-bs5-general-pug-pages
Revert "[web] Migrate general Pug pages to BS5"

GitOrigin-RevId: fcc42ee28004aa55c09ecbd5f5e96c6067e717e9
2025-06-04 08:07:07 +00:00
Jakob Ackermann
b84d23564b [web] remove spurious cleanup of project audit log entries (#26102)
GitOrigin-RevId: 32693f89b417b357588d059500ab51c3a9dd46dd
2025-06-04 08:07:01 +00:00
Mathias Jakobsen
d5ba2e3f1c Merge pull request #26094 from overleaf/mj-ide-fps-update
[web] Add full project search to redesign switcher modal

GitOrigin-RevId: 3f494ddc3bf94d9f7c2d6de62183b1805b110601
2025-06-04 08:06:51 +00:00
Alf Eaton
385f5706d8 Add doc and file counts to the admin info page for a project (#26076)
GitOrigin-RevId: afa7fa4e562962a4c7c88f6d3d5f13c0f1feb2e3
2025-06-04 08:06:45 +00:00
Antoine Clausse
2226594ade [web] Migrate 4 simple user pages to BS5 (#25947)
* Migrate email-preferences.pug to BS5

https://www.dev-overleaf.com/user/email-preferences

* Migrate sessions.pug to BS5

https://www.dev-overleaf.com/user/sessions

* Migrate one_time_login.pug to BS5

https://www.dev-overleaf.com/read-only/one-time-login

* Fix positions in back-to-btns mixin

* Migrate accountSuspended.pug to BS5

https://www.dev-overleaf.com/account-suspended

* Set max-width of 400px in account-suspended page

* Fix column widths in sessions.pug

GitOrigin-RevId: 8ec6100fb230cf532049fcc9aba7c00def20ea0e
2025-06-04 08:06:40 +00:00
Antoine Clausse
a210a7b14d [web] Migrate general Pug pages to BS5 (#25937)
* Revert me! Temporarily update code to test updates

* Update layout-no-js.pug to use BS5

* Migrate pages to BS5

* Revert "Revert me! Temporarily update code to test updates"

This reverts commit 03d980939dcbdc3f73ddf1e673acbc3fbfdfe2ec.

* Use `.error-container` class instead of BS5 utility

* Fix breakbpoints

* Use `.error-container` instead of utility class

GitOrigin-RevId: fd39c4f7278f175bbdeee24826f7a2226b1d7c70
2025-06-04 08:06:36 +00:00
Antoine Clausse
25d3972810 [web] Migrate post-gateway.pug to BS5 (#25860)
* Remove `data-ol-auto-submit`, to test the page

* Migrate post-gateway.pug to BS5

* Revert "Remove `data-ol-auto-submit`, to test the page"

This reverts commit ee728b0bdda80d739bd09b2e4e9419303f7053db.

* Fix breakbpoints

* Use `layout-marketing`

GitOrigin-RevId: 73aa4da1e4ddae03d9c8e6671c6a8ccb89ecf0b0
2025-06-04 08:06:32 +00:00
Antoine Clausse
397016744e [web] Migrate metrics module Pug files to Bootstrap 5 (#25745)
* Remove `bootstrap5PageStatus = 'disabled'`

* Update from 'col-xs-' to 'col-'

* Rename LESS files to SCSS

* Rename local vars

* Refactor color variables to use SCSS variables in stylesheets

* Remove unused `.superscript`

It was added in 6696ffdd50

* Remove -moz and -webkit properties

* Remove unused(?) `.hub-circle img`

* Fix selector specificity for calendar display in daterange-picker

* Fix space/tab indents

* Fixup btn-link classes: fixes some borders

* Add support for svg.nvd3-iddle alongside svg.nvd3-svg in styles

* Add dropdown-item classes (improves styles)

* Replace `data-toggle` by `data-bs-toggle`

* Fixup table: remove .card class, add scope="col", add tbody

* Update dropdown caret icon

* Update icons to material symbols

* Remove green color override for links

* Remove/rearrange CSS unrelated to metrics module

* Add space after "by" in lags-container (by Day/Week/Month)

* Fix SCSS linting

* Re-add CSS that belongs in portals module

* Use `layout-react`

* Put table in Card. It still overflows but looks slightly better

* Fix columns breakbpoints

* Revert "Use `layout-react`"

This reverts commit a9e0d8f5c19d1dfd7417bf67b90799ad199a5913.

* Use css variables, use breakpoint mixins

* Add `.py-0` on subscriptions table card, so overflows appear less bad

GitOrigin-RevId: 55295ad76c112609baf43de4aa606d0c3da7a91f
2025-06-04 08:06:27 +00:00
Antoine Clausse
4dbc70b745 [web] Replace action button to "Go to Account Settings" link in group-settings alert for email confirmation (#25672)
* Replace action button to "Go to Account Settings" link in group-settings alert for email confirmation

* `bin/run web npm run extract-translations` & `make cleanup_unused_locales`

* Fix test capitalization

* Update "Go to account settings" to lowercase and link-styling

* `bin/run web npm run extract-translations`

* Fix test

GitOrigin-RevId: d66ce34556bdfc2a37f12900055640cc995ac140
2025-06-04 08:06:23 +00:00
Brian Gough
393cee7af5 Merge pull request #25993 from overleaf/bg-history-refactor-persist-buffer-limits
refactor persist buffer to add limits

GitOrigin-RevId: 4a40a7a8812acf5bb7f98bfd7b94d81ebe19fc57
2025-06-04 08:06:19 +00:00
Brian Gough
50df3862e9 Merge pull request #25954 from overleaf/bg-history-expire-worker-fix
fix expire_redis_chunks to only clear job on error

GitOrigin-RevId: f7ec435edda95958b453fba501686dcfd84426f7
2025-06-04 08:06:14 +00:00
Brian Gough
a80203f748 Merge pull request #25909 from overleaf/bg-history-persist-worker
add history persist worker

GitOrigin-RevId: b9e31e7bdd84570efc0b87b9f5e90b4078551a8c
2025-06-04 08:06:10 +00:00
Brian Gough
cb350ecc65 Merge pull request #25907 from overleaf/bg-history-redis-persist-buffer
add a `persistBuffer` method to history-v1

GitOrigin-RevId: 71a34e48e9ebe378e2f765f3216023e505a58a5d
2025-06-04 08:06:06 +00:00
Brian Gough
b2b676249d Merge pull request #25928 from overleaf/bg-history-redis-move-test-script-helpers
move test script helpers in history-v1

GitOrigin-RevId: cc2e5d8b1baea7396f948883a12a91846f77836c
2025-06-04 08:06:02 +00:00
Miguel Serrano
ee23e8f49f Merge pull request #26093 from overleaf/msm-e2e-fix
[CE/SP] Force build of docker compose containers

GitOrigin-RevId: 0605fcdcaf670e3d8435f1e180d2bfc34a29ed81
2025-06-04 08:05:57 +00:00
Jakob Ackermann
4aaf411cd2 [misc] improve logging in history system (#26086)
* [project-history] tag all the errors

* [history-v1] log warnings for unexpected cases

GitOrigin-RevId: 3189fa487eee88985688ff990ec101daad0d13b1
2025-06-04 08:05:50 +00:00
roo hutton
a63e25953f Merge pull request #25896 from overleaf/rh-load-odc-data
Load ODC data when revisiting onboarding form

GitOrigin-RevId: 506df5d58a8b0305d83b9f43986a55fd309a2720
2025-06-04 08:05:42 +00:00
Mathias Jakobsen
48337b2e2c Merge pull request #25808 from overleaf/mj-ide-full-project-search
[web] Editor redesign: Add full project search

GitOrigin-RevId: b4327c4ba0ddd7387ec8d6640e31200ca0fe4a6e
2025-06-03 08:06:52 +00:00
Brian Gough
3a96df4623 Merge pull request #26050 from overleaf/em-saml-user-query
Improve index usage for SAML user query

GitOrigin-RevId: 189aba60a12c8369a0062e7df4c57bef8a16c98c
2025-06-03 08:06:47 +00:00
Brian Gough
4b9963757f Merge pull request #26047 from overleaf/bg-web-api-is-leaking-disk-space
clean up temporary files in GitBridgeHandler operations

GitOrigin-RevId: b4a202f4f4c563a020fed8a47da1a84417ccbd2d
2025-06-03 08:06:43 +00:00
M Fahru
35500cc72b Merge pull request #25607 from overleaf/mf-free-trial-limit-stripe-handler
[web] Limit user free trial on stripe subscription

GitOrigin-RevId: b3d978ed598d20451a99cf811fcae9ba2e3b23f0
2025-06-03 08:06:35 +00:00
Jakob Ackermann
3fbbb50ef7 [web] use correct term in setPublicAccessLevel API wrapper (#25848)
GitOrigin-RevId: 022c59d6d5c6f239438ed8e91f3ca47954198a0c
2025-06-03 08:06:26 +00:00
Jakob Ackermann
0aae5c48b4 [web] skip fetching members and invites for restricted users (#25673)
* [web] hide sensitive data from joinProject when building project view

* [web] skip fetching members and invites for restricted users

* [web] fix owner features in joinProject view

* [web] separate invited members from owner

* [web] skip fetching users with empty members  list

* [web] split await chain

Co-authored-by: Antoine Clausse <antoine.clausse@overleaf.com>

* [web] remove spurious parentheses

* [web] remove dead code

Co-authored-by: Antoine Clausse <antoine.clausse@overleaf.com>

---------

Co-authored-by: Antoine Clausse <antoine.clausse@overleaf.com>
GitOrigin-RevId: 5b4d874f974971e9c14d7412620805f8ebf63541
2025-06-03 08:06:22 +00:00
Jakob Ackermann
6cbacc8cb7 [web] fetch project once for joinProject (#25667)
* [web] fetch project once for joinProject

* [web] await all the nested helpers for getting privilege levels

Co-authored-by: Mathias Jakobsen <mathias.jakobsen@overleaf.com>

---------

Co-authored-by: Mathias Jakobsen <mathias.jakobsen@overleaf.com>
GitOrigin-RevId: f0280c36ef995b417ccdab15014f05954e18c5f0
2025-06-03 08:06:13 +00:00
Jakob Ackermann
2e50e0ffa1 [web] add ProjectAccess helper class (#25663)
* [web] add ProjectAccess helper class

* [web] remove ts-ignore for calling OError.tag with try/catch error

GitOrigin-RevId: e097a95b4d929a3927a3eeb70635590680c93007
2025-06-03 08:06:01 +00:00
Mathias Jakobsen
da449f9f5f Merge pull request #26015 from overleaf/mj-ide-breadcrumbs-setting
[web] Add setting to control editor breadcrumbs

GitOrigin-RevId: 6e0a4bb97eba63a1df43d85840f8962bf0238b7c
2025-06-03 08:05:49 +00:00
Mathias Jakobsen
1b15dc3854 Merge pull request #26003 from overleaf/mj-ide-duplicate-project
[web] Editor redesign: Add project duplication button

GitOrigin-RevId: 93e5aa66a7ccc13650e07fda041394811874dafa
2025-06-03 08:05:44 +00:00
Mathias Jakobsen
86e13b088a Merge pull request #25938 from overleaf/mj-core-pug-teardown
[web] Tear down core-pug-bs5 feature flag

GitOrigin-RevId: 875417ca02d8212940b4782bc3016778344116ba
2025-06-03 08:05:39 +00:00
Liangjun Song
26a77e739d Merge pull request #25852 from overleaf/ls-sync-stripe-subscription-logic
Replicate syncing logic for Stripe subscription

GitOrigin-RevId: 9422a3e193160409eddd4c5f2c80e8578bd88559
2025-06-02 08:05:35 +00:00
Eric Mc Sween
c6f4229147 Merge pull request #25952 from overleaf/em-split-editor-facade
Split EditorFacade functionality for history OT (2nd attempt)

GitOrigin-RevId: 2bc6d6c54a9f336fd4a69f0eb548dd06b9f06f5f
2025-06-02 08:05:30 +00:00
Christopher Hoskin
fe64856be7 Merge pull request #26021 from overleaf/csh-issue-25976-dev-env-ci
Upgrade to Redis 7.4 in dev and CI

GitOrigin-RevId: 068e54899bf50a247fedd0243d66f1545bc7cf01
2025-06-02 08:05:21 +00:00
Kristina
9ba772b18f [web] handle 3DS challenges for Stripe (#25918)
* handle 3DS challenges on the subscription dashboard
* add `/user/subscription/sync` endpoint
* upgrade `stripe-js` & rm `react-stripe-js`
* group related unit tests together
* add modules `SubscriptionController` unit tests and convert to async/await
* add `StripeClient` unit tests for 3DS failure

GitOrigin-RevId: 9da4758703f6ef4ec08248b328abddbbdd8e44ad
2025-06-02 08:05:16 +00:00
730 changed files with 31905 additions and 6346 deletions

View file

@ -1,10 +1,19 @@
---
name: Bug report
about: Report a bug
title: ''
labels: type:bug
assignees: ''
---
<!-- <!--
Note: If you are using www.overleaf.com and have a problem, Note: If you are using www.overleaf.com and have a problem,
or if you would like to request a new feature please contact or if you would like to request a new feature please contact
the support team at support@overleaf.com the support team at support@overleaf.com
This form should only be used to report bugs in the This form should only be used to report bugs in the
Community Edition release of Overleaf. Community Edition release of Overleaf.
--> -->

1035
README.md

File diff suppressed because it is too large Load diff

View file

@ -77,6 +77,7 @@ each service:
| `filestore` | 9235 | | `filestore` | 9235 |
| `notifications` | 9236 | | `notifications` | 9236 |
| `real-time` | 9237 | | `real-time` | 9237 |
| `references` | 9238 |
| `history-v1` | 9239 | | `history-v1` | 9239 |
| `project-history` | 9240 | | `project-history` | 9240 |

View file

@ -13,6 +13,7 @@ NOTIFICATIONS_HOST=notifications
PROJECT_HISTORY_HOST=project-history PROJECT_HISTORY_HOST=project-history
REALTIME_HOST=real-time REALTIME_HOST=real-time
REDIS_HOST=redis REDIS_HOST=redis
REFERENCES_HOST=references
SESSION_SECRET=foo SESSION_SECRET=foo
WEBPACK_HOST=webpack WEBPACK_HOST=webpack
WEB_API_PASSWORD=overleaf WEB_API_PASSWORD=overleaf

View file

@ -112,6 +112,17 @@ services:
- ../services/real-time/app.js:/overleaf/services/real-time/app.js - ../services/real-time/app.js:/overleaf/services/real-time/app.js
- ../services/real-time/config:/overleaf/services/real-time/config - ../services/real-time/config:/overleaf/services/real-time/config
references:
command: ["node", "--watch", "app.js"]
environment:
- NODE_OPTIONS=--inspect=0.0.0.0:9229
ports:
- "127.0.0.1:9238:9229"
volumes:
- ../services/references/app:/overleaf/services/references/app
- ../services/references/config:/overleaf/services/references/config
- ../services/references/app.js:/overleaf/services/references/app.js
web: web:
command: ["node", "--watch", "app.js", "--watch-locales"] command: ["node", "--watch", "app.js", "--watch-locales"]
environment: environment:

View file

@ -25,10 +25,10 @@ services:
env_file: env_file:
- dev.env - dev.env
environment: environment:
- DOCKER_RUNNER=true
- TEXLIVE_IMAGE=texlive-full # docker build texlive -t texlive-full - TEXLIVE_IMAGE=texlive-full # docker build texlive -t texlive-full
- COMPILES_HOST_DIR=${PWD}/compiles - SANDBOXED_COMPILES=true
- OUTPUT_HOST_DIR=${PWD}/output - SANDBOXED_COMPILES_HOST_DIR_COMPILES=${PWD}/compiles
- SANDBOXED_COMPILES_HOST_DIR_OUTPUT=${PWD}/output
user: root user: root
volumes: volumes:
- ${PWD}/compiles:/overleaf/services/clsi/compiles - ${PWD}/compiles:/overleaf/services/clsi/compiles
@ -123,7 +123,7 @@ services:
dockerfile: services/real-time/Dockerfile dockerfile: services/real-time/Dockerfile
env_file: env_file:
- dev.env - dev.env
redis: redis:
image: redis:5 image: redis:5
ports: ports:
@ -131,6 +131,13 @@ services:
volumes: volumes:
- redis-data:/data - redis-data:/data
references:
build:
context: ..
dockerfile: services/references/Dockerfile
env_file:
- dev.env
web: web:
build: build:
context: .. context: ..
@ -140,7 +147,7 @@ services:
- dev.env - dev.env
environment: environment:
- APP_NAME=Overleaf Community Edition - APP_NAME=Overleaf Community Edition
- ENABLED_LINKED_FILE_TYPES=project_file,project_output_file - ENABLED_LINKED_FILE_TYPES=project_file,project_output_file,url
- EMAIL_CONFIRMATION_DISABLED=true - EMAIL_CONFIRMATION_DISABLED=true
- NODE_ENV=development - NODE_ENV=development
- OVERLEAF_ALLOW_PUBLIC_ACCESS=true - OVERLEAF_ALLOW_PUBLIC_ACCESS=true
@ -161,6 +168,7 @@ services:
- notifications - notifications
- project-history - project-history
- real-time - real-time
- references
webpack: webpack:
build: build:

Binary file not shown.

Before

Width:  |  Height:  |  Size: 587 KiB

After

Width:  |  Height:  |  Size: 1 MiB

Before After
Before After

View file

@ -32,7 +32,7 @@ services:
OVERLEAF_REDIS_HOST: redis OVERLEAF_REDIS_HOST: redis
REDIS_HOST: redis REDIS_HOST: redis
ENABLED_LINKED_FILE_TYPES: 'project_file,project_output_file' ENABLED_LINKED_FILE_TYPES: 'project_file,project_output_file,url'
# Enables Thumbnail generation using ImageMagick # Enables Thumbnail generation using ImageMagick
ENABLE_CONVERSIONS: 'true' ENABLE_CONVERSIONS: 'true'

View file

@ -1,6 +1,6 @@
access-token-encryptor access-token-encryptor
--dependencies=None --dependencies=None
--docker-repos=gcr.io/overleaf-ops --docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
--env-add= --env-add=
--env-pass-through= --env-pass-through=
--esmock-loader=False --esmock-loader=False

View file

@ -1,6 +1,6 @@
fetch-utils fetch-utils
--dependencies=None --dependencies=None
--docker-repos=gcr.io/overleaf-ops --docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
--env-add= --env-add=
--env-pass-through= --env-pass-through=
--esmock-loader=False --esmock-loader=False

View file

@ -1,6 +1,6 @@
logger logger
--dependencies=None --dependencies=None
--docker-repos=gcr.io/overleaf-ops --docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
--env-add= --env-add=
--env-pass-through= --env-pass-through=
--esmock-loader=False --esmock-loader=False

View file

@ -1,6 +1,6 @@
metrics metrics
--dependencies=None --dependencies=None
--docker-repos=gcr.io/overleaf-ops --docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
--env-add= --env-add=
--env-pass-through= --env-pass-through=
--esmock-loader=False --esmock-loader=False

View file

@ -1,6 +1,6 @@
mongo-utils mongo-utils
--dependencies=None --dependencies=None
--docker-repos=gcr.io/overleaf-ops --docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
--env-add= --env-add=
--env-pass-through= --env-pass-through=
--esmock-loader=False --esmock-loader=False

View file

@ -1,6 +1,6 @@
o-error o-error
--dependencies=None --dependencies=None
--docker-repos=gcr.io/overleaf-ops --docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
--env-add= --env-add=
--env-pass-through= --env-pass-through=
--esmock-loader=False --esmock-loader=False

View file

@ -1,6 +1,6 @@
object-persistor object-persistor
--dependencies=None --dependencies=None
--docker-repos=gcr.io/overleaf-ops --docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
--env-add= --env-add=
--env-pass-through= --env-pass-through=
--esmock-loader=False --esmock-loader=False

View file

@ -1,6 +1,6 @@
overleaf-editor-core overleaf-editor-core
--dependencies=None --dependencies=None
--docker-repos=gcr.io/overleaf-ops --docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
--env-add= --env-add=
--env-pass-through= --env-pass-through=
--esmock-loader=False --esmock-loader=False

View file

@ -1,7 +1,7 @@
// @ts-check // @ts-check
/** /**
* @import { ClearTrackingPropsRawData } from '../types' * @import { ClearTrackingPropsRawData, TrackingDirective } from '../types'
*/ */
class ClearTrackingProps { class ClearTrackingProps {
@ -11,12 +11,27 @@ class ClearTrackingProps {
/** /**
* @param {any} other * @param {any} other
* @returns {boolean} * @returns {other is ClearTrackingProps}
*/ */
equals(other) { equals(other) {
return other instanceof ClearTrackingProps return other instanceof ClearTrackingProps
} }
/**
* @param {TrackingDirective} other
* @returns {other is ClearTrackingProps}
*/
canMergeWith(other) {
return other instanceof ClearTrackingProps
}
/**
* @param {TrackingDirective} other
*/
mergeWith(other) {
return this
}
/** /**
* @returns {ClearTrackingPropsRawData} * @returns {ClearTrackingPropsRawData}
*/ */

View file

@ -11,7 +11,7 @@ const EditOperation = require('../operation/edit_operation')
const EditOperationBuilder = require('../operation/edit_operation_builder') const EditOperationBuilder = require('../operation/edit_operation_builder')
/** /**
* @import { BlobStore, ReadonlyBlobStore, RangesBlob, RawFileData, RawLazyStringFileData } from '../types' * @import { BlobStore, ReadonlyBlobStore, RangesBlob, RawHashFileData, RawLazyStringFileData } from '../types'
*/ */
class LazyStringFileData extends FileData { class LazyStringFileData extends FileData {
@ -159,11 +159,11 @@ class LazyStringFileData extends FileData {
/** @inheritdoc /** @inheritdoc
* @param {BlobStore} blobStore * @param {BlobStore} blobStore
* @return {Promise<RawFileData>} * @return {Promise<RawHashFileData>}
*/ */
async store(blobStore) { async store(blobStore) {
if (this.operations.length === 0) { if (this.operations.length === 0) {
/** @type RawFileData */ /** @type RawHashFileData */
const raw = { hash: this.hash } const raw = { hash: this.hash }
if (this.rangesHash) { if (this.rangesHash) {
raw.rangesHash = this.rangesHash raw.rangesHash = this.rangesHash
@ -171,9 +171,11 @@ class LazyStringFileData extends FileData {
return raw return raw
} }
const eager = await this.toEager(blobStore) const eager = await this.toEager(blobStore)
const raw = await eager.store(blobStore)
this.hash = raw.hash
this.rangesHash = raw.rangesHash
this.operations.length = 0 this.operations.length = 0
/** @type RawFileData */ return raw
return await eager.store(blobStore)
} }
} }

View file

@ -8,7 +8,7 @@ const CommentList = require('./comment_list')
const TrackedChangeList = require('./tracked_change_list') const TrackedChangeList = require('./tracked_change_list')
/** /**
* @import { StringFileRawData, RawFileData, BlobStore, CommentRawData } from "../types" * @import { StringFileRawData, RawHashFileData, BlobStore, CommentRawData } from "../types"
* @import { TrackedChangeRawData, RangesBlob } from "../types" * @import { TrackedChangeRawData, RangesBlob } from "../types"
* @import EditOperation from "../operation/edit_operation" * @import EditOperation from "../operation/edit_operation"
*/ */
@ -139,7 +139,7 @@ class StringFileData extends FileData {
/** /**
* @inheritdoc * @inheritdoc
* @param {BlobStore} blobStore * @param {BlobStore} blobStore
* @return {Promise<RawFileData>} * @return {Promise<RawHashFileData>}
*/ */
async store(blobStore) { async store(blobStore) {
const blob = await blobStore.putString(this.content) const blob = await blobStore.putString(this.content)

View file

@ -84,6 +84,21 @@ class TrackedChange {
) )
) )
} }
/**
* Return an equivalent tracked change whose extent is limited to the given
* range
*
* @param {Range} range
* @returns {TrackedChange | null} - the result or null if the intersection is empty
*/
intersectRange(range) {
const intersection = this.range.intersect(range)
if (intersection == null) {
return null
}
return new TrackedChange(intersection, this.tracking)
}
} }
module.exports = TrackedChange module.exports = TrackedChange

View file

@ -2,9 +2,11 @@
const Range = require('../range') const Range = require('../range')
const TrackedChange = require('./tracked_change') const TrackedChange = require('./tracked_change')
const TrackingProps = require('../file_data/tracking_props') const TrackingProps = require('../file_data/tracking_props')
const { InsertOp, RemoveOp, RetainOp } = require('../operation/scan_op')
/** /**
* @import { TrackingDirective, TrackedChangeRawData } from "../types" * @import { TrackingDirective, TrackedChangeRawData } from "../types"
* @import TextOperation from "../operation/text_operation"
*/ */
class TrackedChangeList { class TrackedChangeList {
@ -58,6 +60,22 @@ class TrackedChangeList {
return this._trackedChanges.filter(change => range.contains(change.range)) return this._trackedChanges.filter(change => range.contains(change.range))
} }
/**
* Returns tracked changes that overlap with the given range
* @param {Range} range
* @returns {TrackedChange[]}
*/
intersectRange(range) {
const changes = []
for (const change of this._trackedChanges) {
const intersection = change.intersectRange(range)
if (intersection != null) {
changes.push(intersection)
}
}
return changes
}
/** /**
* Returns the tracking props for a given range. * Returns the tracking props for a given range.
* @param {Range} range * @param {Range} range
@ -89,6 +107,8 @@ class TrackedChangeList {
/** /**
* Collapses consecutive (and compatible) ranges * Collapses consecutive (and compatible) ranges
*
* @private
* @returns {void} * @returns {void}
*/ */
_mergeRanges() { _mergeRanges() {
@ -117,12 +137,28 @@ class TrackedChangeList {
} }
/** /**
* Apply an insert operation
* *
* @param {number} cursor * @param {number} cursor
* @param {string} insertedText * @param {string} insertedText
* @param {{tracking?: TrackingProps}} opts * @param {{tracking?: TrackingProps}} opts
*/ */
applyInsert(cursor, insertedText, opts = {}) { applyInsert(cursor, insertedText, opts = {}) {
this._applyInsert(cursor, insertedText, opts)
this._mergeRanges()
}
/**
* Apply an insert operation
*
* This method will not merge ranges at the end
*
* @private
* @param {number} cursor
* @param {string} insertedText
* @param {{tracking?: TrackingProps}} [opts]
*/
_applyInsert(cursor, insertedText, opts = {}) {
const newTrackedChanges = [] const newTrackedChanges = []
for (const trackedChange of this._trackedChanges) { for (const trackedChange of this._trackedChanges) {
if ( if (
@ -171,15 +207,29 @@ class TrackedChangeList {
newTrackedChanges.push(newTrackedChange) newTrackedChanges.push(newTrackedChange)
} }
this._trackedChanges = newTrackedChanges this._trackedChanges = newTrackedChanges
this._mergeRanges()
} }
/** /**
* Apply a delete operation to the list of tracked changes
* *
* @param {number} cursor * @param {number} cursor
* @param {number} length * @param {number} length
*/ */
applyDelete(cursor, length) { applyDelete(cursor, length) {
this._applyDelete(cursor, length)
this._mergeRanges()
}
/**
* Apply a delete operation to the list of tracked changes
*
* This method will not merge ranges at the end
*
* @private
* @param {number} cursor
* @param {number} length
*/
_applyDelete(cursor, length) {
const newTrackedChanges = [] const newTrackedChanges = []
for (const trackedChange of this._trackedChanges) { for (const trackedChange of this._trackedChanges) {
const deletedRange = new Range(cursor, length) const deletedRange = new Range(cursor, length)
@ -205,15 +255,31 @@ class TrackedChangeList {
} }
} }
this._trackedChanges = newTrackedChanges this._trackedChanges = newTrackedChanges
}
/**
* Apply a retain operation to the list of tracked changes
*
* @param {number} cursor
* @param {number} length
* @param {{tracking?: TrackingDirective}} [opts]
*/
applyRetain(cursor, length, opts = {}) {
this._applyRetain(cursor, length, opts)
this._mergeRanges() this._mergeRanges()
} }
/** /**
* Apply a retain operation to the list of tracked changes
*
* This method will not merge ranges at the end
*
* @private
* @param {number} cursor * @param {number} cursor
* @param {number} length * @param {number} length
* @param {{tracking?: TrackingDirective}} opts * @param {{tracking?: TrackingDirective}} opts
*/ */
applyRetain(cursor, length, opts = {}) { _applyRetain(cursor, length, opts = {}) {
// If there's no tracking info, leave everything as-is // If there's no tracking info, leave everything as-is
if (!opts.tracking) { if (!opts.tracking) {
return return
@ -269,6 +335,31 @@ class TrackedChangeList {
newTrackedChanges.push(newTrackedChange) newTrackedChanges.push(newTrackedChange)
} }
this._trackedChanges = newTrackedChanges this._trackedChanges = newTrackedChanges
}
/**
* Apply a text operation to the list of tracked changes
*
* Ranges are merged only once at the end, for performance and to avoid
* problematic edge cases where intermediate ranges get incorrectly merged.
*
* @param {TextOperation} operation
*/
applyTextOperation(operation) {
// this cursor tracks the destination document that gets modified as
// operations are applied to it.
let cursor = 0
for (const op of operation.ops) {
if (op instanceof InsertOp) {
this._applyInsert(cursor, op.insertion, { tracking: op.tracking })
cursor += op.insertion.length
} else if (op instanceof RemoveOp) {
this._applyDelete(cursor, op.length)
} else if (op instanceof RetainOp) {
this._applyRetain(cursor, op.length, { tracking: op.tracking })
cursor += op.length
}
}
this._mergeRanges() this._mergeRanges()
} }
} }

View file

@ -62,6 +62,35 @@ class TrackingProps {
this.ts.getTime() === other.ts.getTime() this.ts.getTime() === other.ts.getTime()
) )
} }
/**
* Are these tracking props compatible with the other tracking props for merging
* ranges?
*
* @param {TrackingDirective} other
* @returns {other is TrackingProps}
*/
canMergeWith(other) {
if (!(other instanceof TrackingProps)) {
return false
}
return this.type === other.type && this.userId === other.userId
}
/**
* Merge two tracking props
*
* Assumes that `canMerge(other)` returns true
*
* @param {TrackingDirective} other
*/
mergeWith(other) {
if (!this.canMergeWith(other)) {
throw new Error('Cannot merge with incompatible tracking props')
}
const ts = this.ts <= other.ts ? this.ts : other.ts
return new TrackingProps(this.type, this.userId, ts)
}
} }
module.exports = TrackingProps module.exports = TrackingProps

View file

@ -175,7 +175,7 @@ class InsertOp extends ScanOp {
return false return false
} }
if (this.tracking) { if (this.tracking) {
if (!this.tracking.equals(other.tracking)) { if (!other.tracking || !this.tracking.canMergeWith(other.tracking)) {
return false return false
} }
} else if (other.tracking) { } else if (other.tracking) {
@ -198,7 +198,10 @@ class InsertOp extends ScanOp {
throw new Error('Cannot merge with incompatible operation') throw new Error('Cannot merge with incompatible operation')
} }
this.insertion += other.insertion this.insertion += other.insertion
// We already have the same tracking info and commentIds if (this.tracking != null && other.tracking != null) {
this.tracking = this.tracking.mergeWith(other.tracking)
}
// We already have the same commentIds
} }
/** /**
@ -306,9 +309,13 @@ class RetainOp extends ScanOp {
return false return false
} }
if (this.tracking) { if (this.tracking) {
return this.tracking.equals(other.tracking) if (!other.tracking || !this.tracking.canMergeWith(other.tracking)) {
return false
}
} else if (other.tracking) {
return false
} }
return !other.tracking return true
} }
/** /**
@ -319,6 +326,9 @@ class RetainOp extends ScanOp {
throw new Error('Cannot merge with incompatible operation') throw new Error('Cannot merge with incompatible operation')
} }
this.length += other.length this.length += other.length
if (this.tracking != null && other.tracking != null) {
this.tracking = this.tracking.mergeWith(other.tracking)
}
} }
/** /**

View file

@ -314,25 +314,18 @@ class TextOperation extends EditOperation {
str str
) )
} }
file.trackedChanges.applyRetain(result.length, op.length, {
tracking: op.tracking,
})
result += str.slice(inputCursor, inputCursor + op.length) result += str.slice(inputCursor, inputCursor + op.length)
inputCursor += op.length inputCursor += op.length
} else if (op instanceof InsertOp) { } else if (op instanceof InsertOp) {
if (containsNonBmpChars(op.insertion)) { if (containsNonBmpChars(op.insertion)) {
throw new InvalidInsertionError(str, op.toJSON()) throw new InvalidInsertionError(str, op.toJSON())
} }
file.trackedChanges.applyInsert(result.length, op.insertion, {
tracking: op.tracking,
})
file.comments.applyInsert( file.comments.applyInsert(
new Range(result.length, op.insertion.length), new Range(result.length, op.insertion.length),
{ commentIds: op.commentIds } { commentIds: op.commentIds }
) )
result += op.insertion result += op.insertion
} else if (op instanceof RemoveOp) { } else if (op instanceof RemoveOp) {
file.trackedChanges.applyDelete(result.length, op.length)
file.comments.applyDelete(new Range(result.length, op.length)) file.comments.applyDelete(new Range(result.length, op.length))
inputCursor += op.length inputCursor += op.length
} else { } else {
@ -352,6 +345,8 @@ class TextOperation extends EditOperation {
throw new TextOperation.TooLongError(operation, result.length) throw new TextOperation.TooLongError(operation, result.length)
} }
file.trackedChanges.applyTextOperation(this)
file.content = result file.content = result
} }
@ -400,44 +395,36 @@ class TextOperation extends EditOperation {
for (let i = 0, l = ops.length; i < l; i++) { for (let i = 0, l = ops.length; i < l; i++) {
const op = ops[i] const op = ops[i]
if (op instanceof RetainOp) { if (op instanceof RetainOp) {
// Where we need to end up after the retains
const target = strIndex + op.length
// A previous retain could have overriden some tracking info. Now we
// need to restore it.
const previousRanges = previousState.trackedChanges.inRange(
new Range(strIndex, op.length)
)
let removeTrackingInfoIfNeeded
if (op.tracking) { if (op.tracking) {
removeTrackingInfoIfNeeded = new ClearTrackingProps() // Where we need to end up after the retains
} const target = strIndex + op.length
// A previous retain could have overriden some tracking info. Now we
// need to restore it.
const previousChanges = previousState.trackedChanges.intersectRange(
new Range(strIndex, op.length)
)
for (const trackedChange of previousRanges) { for (const change of previousChanges) {
if (strIndex < trackedChange.range.start) { if (strIndex < change.range.start) {
inverse.retain(trackedChange.range.start - strIndex, { inverse.retain(change.range.start - strIndex, {
tracking: removeTrackingInfoIfNeeded, tracking: new ClearTrackingProps(),
})
strIndex = change.range.start
}
inverse.retain(change.range.length, {
tracking: change.tracking,
}) })
strIndex = trackedChange.range.start strIndex += change.range.length
} }
if (trackedChange.range.end < strIndex + op.length) { if (strIndex < target) {
inverse.retain(trackedChange.range.length, { inverse.retain(target - strIndex, {
tracking: trackedChange.tracking, tracking: new ClearTrackingProps(),
}) })
strIndex = trackedChange.range.end strIndex = target
} }
if (trackedChange.range.end !== strIndex) { } else {
// No need to split the range at the end inverse.retain(op.length)
const [left] = trackedChange.range.splitAt(strIndex) strIndex += op.length
inverse.retain(left.length, { tracking: trackedChange.tracking })
strIndex = left.end
}
}
if (strIndex < target) {
inverse.retain(target - strIndex, {
tracking: removeTrackingInfoIfNeeded,
})
strIndex = target
} }
} else if (op instanceof InsertOp) { } else if (op instanceof InsertOp) {
inverse.remove(op.insertion.length) inverse.remove(op.insertion.length)

View file

@ -86,10 +86,32 @@ class Range {
} }
/** /**
* @param {Range} range * Does this range overlap another range?
*
* Overlapping means that the two ranges have at least one character in common
*
* @param {Range} other - the other range
*/ */
overlaps(range) { overlaps(other) {
return this.start < range.end && this.end > range.start return this.start < other.end && this.end > other.start
}
/**
* Does this range overlap the start of another range?
*
* @param {Range} other - the other range
*/
overlapsStart(other) {
return this.start <= other.start && this.end > other.start
}
/**
* Does this range overlap the end of another range?
*
* @param {Range} other - the other range
*/
overlapsEnd(other) {
return this.start < other.end && this.end >= other.end
} }
/** /**
@ -227,6 +249,26 @@ class Range {
) )
return [rangeUpToCursor, rangeAfterCursor] return [rangeUpToCursor, rangeAfterCursor]
} }
/**
* Returns the intersection of this range with another range
*
* @param {Range} other - the other range
* @return {Range | null} the intersection or null if the intersection is empty
*/
intersect(other) {
if (this.contains(other)) {
return other
} else if (other.contains(this)) {
return this
} else if (other.overlapsStart(this)) {
return new Range(this.pos, other.end - this.start)
} else if (other.overlapsEnd(this)) {
return new Range(other.pos, this.end - other.start)
} else {
return null
}
}
} }
module.exports = Range module.exports = Range

View file

@ -193,4 +193,13 @@ describe('LazyStringFileData', function () {
expect(fileData.getStringLength()).to.equal(longString.length) expect(fileData.getStringLength()).to.equal(longString.length)
expect(fileData.getOperations()).to.have.length(1) expect(fileData.getOperations()).to.have.length(1)
}) })
it('truncates its operations after being stored', async function () {
const testHash = File.EMPTY_FILE_HASH
const fileData = new LazyStringFileData(testHash, undefined, 0)
fileData.edit(new TextOperation().insert('abc'))
const stored = await fileData.store(this.blobStore)
expect(fileData.hash).to.equal(stored.hash)
expect(fileData.operations).to.deep.equal([])
})
}) })

View file

@ -1,4 +1,3 @@
// @ts-check
'use strict' 'use strict'
const { expect } = require('chai') const { expect } = require('chai')
@ -449,4 +448,44 @@ describe('Range', function () {
expect(() => range.insertAt(16, 3)).to.throw() expect(() => range.insertAt(16, 3)).to.throw()
}) })
}) })
describe('intersect', function () {
it('should handle partially overlapping ranges', function () {
const range1 = new Range(5, 10)
const range2 = new Range(3, 6)
const intersection1 = range1.intersect(range2)
expect(intersection1.pos).to.equal(5)
expect(intersection1.length).to.equal(4)
const intersection2 = range2.intersect(range1)
expect(intersection2.pos).to.equal(5)
expect(intersection2.length).to.equal(4)
})
it('should intersect with itself', function () {
const range = new Range(5, 10)
const intersection = range.intersect(range)
expect(intersection.pos).to.equal(5)
expect(intersection.length).to.equal(10)
})
it('should handle nested ranges', function () {
const range1 = new Range(5, 10)
const range2 = new Range(7, 2)
const intersection1 = range1.intersect(range2)
expect(intersection1.pos).to.equal(7)
expect(intersection1.length).to.equal(2)
const intersection2 = range2.intersect(range1)
expect(intersection2.pos).to.equal(7)
expect(intersection2.length).to.equal(2)
})
it('should handle disconnected ranges', function () {
const range1 = new Range(5, 10)
const range2 = new Range(20, 30)
const intersection1 = range1.intersect(range2)
expect(intersection1).to.be.null
const intersection2 = range2.intersect(range1)
expect(intersection2).to.be.null
})
})
}) })

View file

@ -107,7 +107,7 @@ describe('RetainOp', function () {
expect(op1.equals(new RetainOp(3))).to.be.true expect(op1.equals(new RetainOp(3))).to.be.true
}) })
it('cannot merge with another RetainOp if tracking info is different', function () { it('cannot merge with another RetainOp if the tracking user is different', function () {
const op1 = new RetainOp( const op1 = new RetainOp(
4, 4,
new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z')) new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z'))
@ -120,14 +120,14 @@ describe('RetainOp', function () {
expect(() => op1.mergeWith(op2)).to.throw(Error) expect(() => op1.mergeWith(op2)).to.throw(Error)
}) })
it('can merge with another RetainOp if tracking info is the same', function () { it('can merge with another RetainOp if the tracking user is the same', function () {
const op1 = new RetainOp( const op1 = new RetainOp(
4, 4,
new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z')) new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z'))
) )
const op2 = new RetainOp( const op2 = new RetainOp(
4, 4,
new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z')) new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:01.000Z'))
) )
op1.mergeWith(op2) op1.mergeWith(op2)
expect( expect(
@ -310,7 +310,7 @@ describe('InsertOp', function () {
expect(() => op1.mergeWith(op2)).to.throw(Error) expect(() => op1.mergeWith(op2)).to.throw(Error)
}) })
it('cannot merge with another InsertOp if tracking info is different', function () { it('cannot merge with another InsertOp if tracking user is different', function () {
const op1 = new InsertOp( const op1 = new InsertOp(
'a', 'a',
new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z')) new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z'))
@ -323,7 +323,7 @@ describe('InsertOp', function () {
expect(() => op1.mergeWith(op2)).to.throw(Error) expect(() => op1.mergeWith(op2)).to.throw(Error)
}) })
it('can merge with another InsertOp if tracking and comment info is the same', function () { it('can merge with another InsertOp if tracking user and comment info is the same', function () {
const op1 = new InsertOp( const op1 = new InsertOp(
'a', 'a',
new TrackingProps( new TrackingProps(
@ -338,7 +338,7 @@ describe('InsertOp', function () {
new TrackingProps( new TrackingProps(
'insert', 'insert',
'user1', 'user1',
new Date('2024-01-01T00:00:00.000Z') new Date('2024-01-01T00:00:01.000Z')
), ),
['1', '2'] ['1', '2']
) )

View file

@ -322,6 +322,47 @@ describe('TextOperation', function () {
new TextOperation().retain(4).remove(4).retain(3) new TextOperation().retain(4).remove(4).retain(3)
) )
}) })
it('undoing a tracked delete restores the tracked changes', function () {
expectInverseToLeadToInitialState(
new StringFileData(
'the quick brown fox jumps over the lazy dog',
undefined,
[
{
range: { pos: 5, length: 5 },
tracking: {
ts: '2023-01-01T00:00:00.000Z',
type: 'insert',
userId: 'user1',
},
},
{
range: { pos: 12, length: 3 },
tracking: {
ts: '2023-01-01T00:00:00.000Z',
type: 'delete',
userId: 'user1',
},
},
{
range: { pos: 18, length: 5 },
tracking: {
ts: '2023-01-01T00:00:00.000Z',
type: 'insert',
userId: 'user1',
},
},
]
),
new TextOperation()
.retain(7)
.retain(13, {
tracking: new TrackingProps('delete', 'user1', new Date()),
})
.retain(23)
)
})
}) })
describe('compose', function () { describe('compose', function () {

View file

@ -1,6 +1,6 @@
promise-utils promise-utils
--dependencies=None --dependencies=None
--docker-repos=gcr.io/overleaf-ops --docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
--env-add= --env-add=
--env-pass-through= --env-pass-through=
--esmock-loader=False --esmock-loader=False

View file

@ -1,6 +1,6 @@
ranges-tracker ranges-tracker
--dependencies=None --dependencies=None
--docker-repos=gcr.io/overleaf-ops --docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
--env-add= --env-add=
--env-pass-through= --env-pass-through=
--esmock-loader=False --esmock-loader=False

View file

@ -1,6 +1,6 @@
redis-wrapper redis-wrapper
--dependencies=None --dependencies=None
--docker-repos=gcr.io/overleaf-ops --docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
--env-add= --env-add=
--env-pass-through= --env-pass-through=
--esmock-loader=False --esmock-loader=False

View file

@ -1,6 +1,6 @@
settings settings
--dependencies=None --dependencies=None
--docker-repos=gcr.io/overleaf-ops --docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
--env-add= --env-add=
--env-pass-through= --env-pass-through=
--esmock-loader=False --esmock-loader=False

View file

@ -1,6 +1,6 @@
stream-utils stream-utils
--dependencies=None --dependencies=None
--docker-repos=gcr.io/overleaf-ops --docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
--env-add= --env-add=
--env-pass-through= --env-pass-through=
--esmock-loader=False --esmock-loader=False

1717
package-lock.json generated

File diff suppressed because it is too large Load diff

View file

@ -37,7 +37,7 @@
}, },
"swagger-tools": { "swagger-tools": {
"body-parser": "1.20.3", "body-parser": "1.20.3",
"multer": "2.0.0", "multer": "2.0.1",
"path-to-regexp": "3.3.0", "path-to-regexp": "3.3.0",
"qs": "6.13.0" "qs": "6.13.0"
} }

View file

@ -0,0 +1,23 @@
diff --git a/node_modules/@node-saml/node-saml/lib/saml.js b/node_modules/@node-saml/node-saml/lib/saml.js
index fba15b9..a5778cb 100644
--- a/node_modules/@node-saml/node-saml/lib/saml.js
+++ b/node_modules/@node-saml/node-saml/lib/saml.js
@@ -336,7 +336,8 @@ class SAML {
const requestOrResponse = request || response;
(0, utility_1.assertRequired)(requestOrResponse, "either request or response is required");
let buffer;
- if (this.options.skipRequestCompression) {
+ // logout requestOrResponse must be compressed anyway
+ if (this.options.skipRequestCompression && operation !== "logout") {
buffer = Buffer.from(requestOrResponse, "utf8");
}
else {
@@ -495,7 +496,7 @@ class SAML {
try {
xml = Buffer.from(container.SAMLResponse, "base64").toString("utf8");
doc = await (0, xml_1.parseDomFromString)(xml);
- const inResponseToNodes = xml_1.xpath.selectAttributes(doc, "/*[local-name()='Response']/@InResponseTo");
+ const inResponseToNodes = xml_1.xpath.selectAttributes(doc, "/*[local-name()='Response' or local-name()='LogoutResponse']/@InResponseTo");
if (inResponseToNodes) {
inResponseTo = inResponseToNodes.length ? inResponseToNodes[0].nodeValue : null;
await this.validateInResponseTo(inResponseTo);

View file

@ -0,0 +1,64 @@
diff --git a/node_modules/ldapauth-fork/lib/ldapauth.js b/node_modules/ldapauth-fork/lib/ldapauth.js
index 85ecf36a8b..a7d07e0f78 100644
--- a/node_modules/ldapauth-fork/lib/ldapauth.js
+++ b/node_modules/ldapauth-fork/lib/ldapauth.js
@@ -69,6 +69,7 @@ function LdapAuth(opts) {
this.opts.bindProperty || (this.opts.bindProperty = 'dn');
this.opts.groupSearchScope || (this.opts.groupSearchScope = 'sub');
this.opts.groupDnProperty || (this.opts.groupDnProperty = 'dn');
+ this.opts.tlsStarted = false;
EventEmitter.call(this);
@@ -108,21 +109,7 @@ function LdapAuth(opts) {
this._userClient.on('error', this._handleError.bind(this));
var self = this;
- if (this.opts.starttls) {
- // When starttls is enabled, this callback supplants the 'connect' callback
- this._adminClient.starttls(this.opts.tlsOptions, this._adminClient.controls, function(err) {
- if (err) {
- self._handleError(err);
- } else {
- self._onConnectAdmin();
- }
- });
- this._userClient.starttls(this.opts.tlsOptions, this._userClient.controls, function(err) {
- if (err) {
- self._handleError(err);
- }
- });
- } else if (opts.reconnect) {
+ if (opts.reconnect && !this.opts.starttls) {
this.once('_installReconnectListener', function() {
self.log && self.log.trace('install reconnect listener');
self._adminClient.on('connect', function() {
@@ -384,6 +371,28 @@ LdapAuth.prototype._findGroups = function(user, callback) {
*/
LdapAuth.prototype.authenticate = function(username, password, callback) {
var self = this;
+ if (this.opts.starttls && !this.opts.tlsStarted) {
+ // When starttls is enabled, this callback supplants the 'connect' callback
+ this._adminClient.starttls(this.opts.tlsOptions, this._adminClient.controls, function (err) {
+ if (err) {
+ self._handleError(err);
+ } else {
+ self._onConnectAdmin(function(){self._handleAuthenticate(username, password, callback);});
+ }
+ });
+ this._userClient.starttls(this.opts.tlsOptions, this._userClient.controls, function (err) {
+ if (err) {
+ self._handleError(err);
+ }
+ });
+ } else {
+ self._handleAuthenticate(username, password, callback);
+ }
+};
+
+LdapAuth.prototype._handleAuthenticate = function (username, password, callback) {
+ this.opts.tlsStarted = true;
+ var self = this;
if (typeof password === 'undefined' || password === null || password === '') {
return callback(new Error('no password given'));

View file

@ -24,6 +24,7 @@ build-base:
--cache-from $(OVERLEAF_BASE_BRANCH) \ --cache-from $(OVERLEAF_BASE_BRANCH) \
--tag $(OVERLEAF_BASE_TAG) \ --tag $(OVERLEAF_BASE_TAG) \
--tag $(OVERLEAF_BASE_BRANCH) \ --tag $(OVERLEAF_BASE_BRANCH) \
--network=host \
$(MONOREPO_ROOT) $(MONOREPO_ROOT)
@ -39,6 +40,7 @@ build-community:
--file Dockerfile \ --file Dockerfile \
--tag $(OVERLEAF_TAG) \ --tag $(OVERLEAF_TAG) \
--tag $(OVERLEAF_BRANCH) \ --tag $(OVERLEAF_BRANCH) \
--network=host \
$(MONOREPO_ROOT) $(MONOREPO_ROOT)
SHELLCHECK_OPTS = \ SHELLCHECK_OPTS = \

View file

@ -9,5 +9,6 @@ export HISTORY_V1_HOST=127.0.0.1
export NOTIFICATIONS_HOST=127.0.0.1 export NOTIFICATIONS_HOST=127.0.0.1
export PROJECT_HISTORY_HOST=127.0.0.1 export PROJECT_HISTORY_HOST=127.0.0.1
export REALTIME_HOST=127.0.0.1 export REALTIME_HOST=127.0.0.1
export REFERENCES_HOST=127.0.0.1
export WEB_HOST=127.0.0.1 export WEB_HOST=127.0.0.1
export WEB_API_HOST=127.0.0.1 export WEB_API_HOST=127.0.0.1

View file

@ -0,0 +1,28 @@
FROM sharelatex/sharelatex:5.5.0
# fix tls configuration in redis for history-v1
COPY pr_25168.patch .
RUN patch -p1 < pr_25168.patch && rm pr_25168.patch
# improve logging in history system
COPY pr_26086.patch .
RUN patch -p1 < pr_26086.patch && rm pr_26086.patch
# fix create-user.mjs script
COPY pr_26152.patch .
RUN patch -p1 < pr_26152.patch && rm pr_26152.patch
# check mongo featureCompatibilityVersion
COPY pr_26091.patch .
RUN patch -p1 < pr_26091.patch && rm pr_26091.patch
# update multer and tar-fs
RUN sed -i 's/"multer": "2.0.0"/"multer": "2.0.1"/g' package.json
RUN sed -i 's/"dockerode": "^4.0.5"/"dockerode": "^4.0.7"/g' services/clsi/package.json
RUN sed -i 's/"tar-fs": "^3.0.4"/"tar-fs": "^3.0.9"/g' services/clsi/package.json
RUN sed -i 's/199c5ff05bd375c508f4074498237baead7f5148/4dbceda355efc3fc8ac3cf5c66c3778c8a6fdb23/g' services/web/package.json
COPY package-lock.json.diff .
RUN patch package-lock.json < package-lock.json.diff
RUN npm install --omit=dev
RUN npm install @paralleldrive/cuid2@2.2.2 -w services/history-v1

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,19 @@
--- a/services/history-v1/config/custom-environment-variables.json
+++ b/services/history-v1/config/custom-environment-variables.json
@@ -50,12 +50,14 @@
"history": {
"host": "OVERLEAF_REDIS_HOST",
"password": "OVERLEAF_REDIS_PASS",
- "port": "OVERLEAF_REDIS_PORT"
+ "port": "OVERLEAF_REDIS_PORT",
+ "tls": "OVERLEAF_REDIS_TLS"
},
"lock": {
"host": "OVERLEAF_REDIS_HOST",
"password": "OVERLEAF_REDIS_PASS",
- "port": "OVERLEAF_REDIS_PORT"
+ "port": "OVERLEAF_REDIS_PORT",
+ "tls": "OVERLEAF_REDIS_TLS"
}
}
}

View file

@ -0,0 +1,200 @@
--- a/services/history-v1/api/controllers/project_import.js
+++ b/services/history-v1/api/controllers/project_import.js
@@ -35,6 +35,7 @@ async function importSnapshot(req, res) {
try {
snapshot = Snapshot.fromRaw(rawSnapshot)
} catch (err) {
+ logger.warn({ err, projectId }, 'failed to import snapshot')
return render.unprocessableEntity(res)
}
@@ -43,6 +44,7 @@ async function importSnapshot(req, res) {
historyId = await chunkStore.initializeProject(projectId, snapshot)
} catch (err) {
if (err instanceof chunkStore.AlreadyInitialized) {
+ logger.warn({ err, projectId }, 'already initialized')
return render.conflict(res)
} else {
throw err
--- a/services/history-v1/api/controllers/projects.js
+++ b/services/history-v1/api/controllers/projects.js
@@ -34,6 +34,7 @@ async function initializeProject(req, res, next) {
res.status(HTTPStatus.OK).json({ projectId })
} catch (err) {
if (err instanceof chunkStore.AlreadyInitialized) {
+ logger.warn({ err, projectId }, 'failed to initialize')
render.conflict(res)
} else {
throw err
@@ -242,11 +243,15 @@ async function createProjectBlob(req, res, next) {
const sizeLimit = new StreamSizeLimit(maxUploadSize)
await pipeline(req, sizeLimit, fs.createWriteStream(tmpPath))
if (sizeLimit.sizeLimitExceeded) {
+ logger.warn(
+ { projectId, expectedHash, maxUploadSize },
+ 'blob exceeds size threshold'
+ )
return render.requestEntityTooLarge(res)
}
const hash = await blobHash.fromFile(tmpPath)
if (hash !== expectedHash) {
- logger.debug({ hash, expectedHash }, 'Hash mismatch')
+ logger.warn({ projectId, hash, expectedHash }, 'Hash mismatch')
return render.conflict(res, 'File hash mismatch')
}
@@ -343,6 +348,10 @@ async function copyProjectBlob(req, res, next) {
targetBlobStore.getBlob(blobHash),
])
if (!sourceBlob) {
+ logger.warn(
+ { sourceProjectId, targetProjectId, blobHash },
+ 'missing source blob when copying across projects'
+ )
return render.notFound(res)
}
// Exit early if the blob exists in the target project.
--- a/services/history-v1/app.js
+++ b/services/history-v1/app.js
@@ -100,11 +100,13 @@ function setupErrorHandling() {
})
}
if (err.code === 'ENUM_MISMATCH') {
+ logger.warn({ err, projectId }, err.message)
return res.status(HTTPStatus.UNPROCESSABLE_ENTITY).json({
message: 'invalid enum value: ' + err.paramName,
})
}
if (err.code === 'REQUIRED') {
+ logger.warn({ err, projectId }, err.message)
return res.status(HTTPStatus.UNPROCESSABLE_ENTITY).json({
message: err.message,
})
--- a/services/project-history/app/js/HistoryStoreManager.js
+++ b/services/project-history/app/js/HistoryStoreManager.js
@@ -35,7 +35,10 @@ class StringStream extends stream.Readable {
_mocks.getMostRecentChunk = (projectId, historyId, callback) => {
const path = `projects/${historyId}/latest/history`
logger.debug({ projectId, historyId }, 'getting chunk from history service')
- _requestChunk({ path, json: true }, callback)
+ _requestChunk({ path, json: true }, (err, chunk) => {
+ if (err) return callback(OError.tag(err))
+ callback(null, chunk)
+ })
}
/**
@@ -54,7 +57,10 @@ export function getChunkAtVersion(projectId, historyId, version, callback) {
{ projectId, historyId, version },
'getting chunk from history service for version'
)
- _requestChunk({ path, json: true }, callback)
+ _requestChunk({ path, json: true }, (err, chunk) => {
+ if (err) return callback(OError.tag(err))
+ callback(null, chunk)
+ })
}
export function getMostRecentVersion(projectId, historyId, callback) {
@@ -68,8 +74,10 @@ export function getMostRecentVersion(projectId, historyId, callback) {
_.sortBy(chunk.chunk.history.changes || [], x => x.timestamp)
)
// find the latest project and doc versions in the chunk
- _getLatestProjectVersion(projectId, chunk, (err1, projectVersion) =>
+ _getLatestProjectVersion(projectId, chunk, (err1, projectVersion) => {
+ if (err1) err1 = OError.tag(err1)
_getLatestV2DocVersions(projectId, chunk, (err2, v2DocVersions) => {
+ if (err2) err2 = OError.tag(err2)
// return the project and doc versions
const projectStructureAndDocVersions = {
project: projectVersion,
@@ -83,7 +91,7 @@ export function getMostRecentVersion(projectId, historyId, callback) {
chunk
)
})
- )
+ })
})
}
@@ -211,7 +219,10 @@ export function getProjectBlob(historyId, blobHash, callback) {
logger.debug({ historyId, blobHash }, 'getting blob from history service')
_requestHistoryService(
{ path: `projects/${historyId}/blobs/${blobHash}` },
- callback
+ (err, blob) => {
+ if (err) return callback(OError.tag(err))
+ callback(null, blob)
+ }
)
}
@@ -277,7 +288,10 @@ function createBlobFromString(historyId, data, fileId, callback) {
(fsPath, cb) => {
_createBlob(historyId, fsPath, cb)
},
- callback
+ (err, hash) => {
+ if (err) return callback(OError.tag(err))
+ callback(null, hash)
+ }
)
}
@@ -330,7 +344,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) {
try {
ranges = HistoryBlobTranslator.createRangeBlobDataFromUpdate(update)
} catch (error) {
- return callback(error)
+ return callback(OError.tag(error))
}
createBlobFromString(
historyId,
@@ -338,7 +352,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) {
`project-${projectId}-doc-${update.doc}`,
(err, fileHash) => {
if (err) {
- return callback(err)
+ return callback(OError.tag(err))
}
if (ranges) {
createBlobFromString(
@@ -347,7 +361,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) {
`project-${projectId}-doc-${update.doc}-ranges`,
(err, rangesHash) => {
if (err) {
- return callback(err)
+ return callback(OError.tag(err))
}
logger.debug(
{ fileHash, rangesHash },
@@ -415,7 +429,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) {
},
(err, fileHash) => {
if (err) {
- return callback(err)
+ return callback(OError.tag(err))
}
if (update.hash && update.hash !== fileHash) {
logger.warn(
@@ -447,7 +461,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) {
},
(err, fileHash) => {
if (err) {
- return callback(err)
+ return callback(OError.tag(err))
}
logger.debug({ fileHash }, 'created empty blob for file')
callback(null, { file: fileHash })
@@ -520,7 +534,10 @@ export function initializeProject(historyId, callback) {
export function deleteProject(projectId, callback) {
_requestHistoryService(
{ method: 'DELETE', path: `projects/${projectId}` },
- callback
+ err => {
+ if (err) return callback(OError.tag(err))
+ callback(null)
+ }
)
}

View file

@ -0,0 +1,60 @@
--- a/services/web/modules/server-ce-scripts/scripts/check-mongodb.mjs
+++ b/services/web/modules/server-ce-scripts/scripts/check-mongodb.mjs
@@ -7,6 +7,7 @@ import {
const { ObjectId } = mongodb
const MIN_MONGO_VERSION = [6, 0]
+const MIN_MONGO_FEATURE_COMPATIBILITY_VERSION = [6, 0]
async function main() {
let mongoClient
@@ -18,6 +19,7 @@ async function main() {
}
await checkMongoVersion(mongoClient)
+ await checkFeatureCompatibilityVersion(mongoClient)
try {
await testTransactions(mongoClient)
@@ -53,6 +55,41 @@ async function checkMongoVersion(mongoClient) {
}
}
+async function checkFeatureCompatibilityVersion(mongoClient) {
+ const {
+ featureCompatibilityVersion: { version },
+ } = await mongoClient
+ .db()
+ .admin()
+ .command({ getParameter: 1, featureCompatibilityVersion: 1 })
+ const [major, minor] = version.split('.').map(v => parseInt(v))
+ const [minMajor, minMinor] = MIN_MONGO_FEATURE_COMPATIBILITY_VERSION
+
+ if (major < minMajor || (major === minMajor && minor < minMinor)) {
+ const minVersion = MIN_MONGO_FEATURE_COMPATIBILITY_VERSION.join('.')
+ console.error(`
+The MongoDB server has featureCompatibilityVersion=${version}, but Overleaf requires at least version ${minVersion}.
+
+Open a mongo shell:
+- Overleaf Toolkit deployments: $ bin/mongo
+- Legacy docker-compose.yml deployments: $ docker exec -it mongo mongosh localhost/sharelatex
+
+In the mongo shell:
+> db.adminCommand( { setFeatureCompatibilityVersion: "${minMajor}.${minMinor}" } )
+
+Verify the new value:
+> db.adminCommand( { getParameter: 1, featureCompatibilityVersion: 1 } )
+ ...
+ {
+ featureCompatibilityVersion: { version: ${minMajor}.${minMinor}' },
+...
+
+Aborting.
+`)
+ process.exit(1)
+ }
+}
+
main()
.then(() => {
console.error('Mongodb is up.')

View file

@ -0,0 +1,16 @@
--- a/services/web/modules/server-ce-scripts/scripts/create-user.mjs
+++ b/services/web/modules/server-ce-scripts/scripts/create-user.mjs
@@ -48,3 +48,13 @@ Please visit the following URL to set a password for ${email} and log in:
)
})
}
+
+if (filename === process.argv[1]) {
+ try {
+ await main()
+ process.exit(0)
+ } catch (error) {
+ console.error({ error })
+ process.exit(1)
+ }
+}

View file

@ -0,0 +1,12 @@
#!/bin/bash
NODE_PARAMS=""
if [ "$DEBUG_NODE" == "true" ]; then
echo "running debug - references"
NODE_PARAMS="--inspect=0.0.0.0:30560"
fi
source /etc/overleaf/env.sh
export LISTEN_ADDRESS=127.0.0.1
exec /sbin/setuser www-data /usr/bin/node $NODE_PARAMS /overleaf/services/references/app.js >> /var/log/overleaf/references.log 2>&1

View file

@ -29,6 +29,9 @@ module.exports = [
{ {
name: 'project-history', name: 'project-history',
}, },
{
name: 'references',
},
{ {
name: 'history-v1', name: 'history-v1',
}, },

View file

@ -6,8 +6,8 @@ all: test-e2e
# Notable the container labels com.docker.compose.project.working_dir and com.docker.compose.project.config_files need to match when creating containers from the docker host (how you started things) and from host-admin (how tests reconfigure the instance). # Notable the container labels com.docker.compose.project.working_dir and com.docker.compose.project.config_files need to match when creating containers from the docker host (how you started things) and from host-admin (how tests reconfigure the instance).
export PWD = $(shell pwd) export PWD = $(shell pwd)
export TEX_LIVE_DOCKER_IMAGE ?= gcr.io/overleaf-ops/texlive-full:2023.1 export TEX_LIVE_DOCKER_IMAGE ?= us-east1-docker.pkg.dev/overleaf-ops/ol-docker/texlive-full:2023.1
export ALL_TEX_LIVE_DOCKER_IMAGES ?= gcr.io/overleaf-ops/texlive-full:2023.1,gcr.io/overleaf-ops/texlive-full:2022.1 export ALL_TEX_LIVE_DOCKER_IMAGES ?= us-east1-docker.pkg.dev/overleaf-ops/ol-docker/texlive-full:2023.1,us-east1-docker.pkg.dev/overleaf-ops/ol-docker/texlive-full:2022.1
export IMAGE_TAG_PRO ?= us-east1-docker.pkg.dev/overleaf-ops/ol-docker/pro:latest export IMAGE_TAG_PRO ?= us-east1-docker.pkg.dev/overleaf-ops/ol-docker/pro:latest
export CYPRESS_SHARD ?= export CYPRESS_SHARD ?=
export COMPOSE_PROJECT_NAME ?= test export COMPOSE_PROJECT_NAME ?= test
@ -20,6 +20,7 @@ test-e2e-native:
npm run cypress:open npm run cypress:open
test-e2e: test-e2e:
docker compose build host-admin
docker compose up --no-log-prefix --exit-code-from=e2e e2e docker compose up --no-log-prefix --exit-code-from=e2e e2e
test-e2e-open: test-e2e-open:
@ -45,7 +46,7 @@ prefetch_custom_compose_pull:
prefetch_custom: prefetch_custom_texlive prefetch_custom: prefetch_custom_texlive
prefetch_custom_texlive: prefetch_custom_texlive:
echo "$$ALL_TEX_LIVE_DOCKER_IMAGES" | tr ',' '\n' | xargs -I% \ echo "$$ALL_TEX_LIVE_DOCKER_IMAGES" | tr ',' '\n' | xargs -I% \
sh -exc 'tag=%; re_tag=quay.io/sharelatex/$${tag#*/*/}; docker pull $$tag; docker tag $$tag $$re_tag' sh -exc 'tag=%; re_tag=quay.io/sharelatex/$${tag#*/*/*/}; docker pull $$tag; docker tag $$tag $$re_tag'
prefetch_custom: prefetch_old prefetch_custom: prefetch_old
prefetch_old: prefetch_old:

View file

@ -179,6 +179,21 @@ describe('admin panel', function () {
cy.get('nav').findByText('Manage Users').click() cy.get('nav').findByText('Manage Users').click()
}) })
it('displays expected tabs', () => {
const tabs = ['Users', 'License Usage']
cy.get('[role="tab"]').each((el, index) => {
cy.wrap(el).findByText(tabs[index]).click()
})
cy.get('[role="tab"]').should('have.length', tabs.length)
})
it('license usage tab', () => {
cy.get('a').contains('License Usage').click()
cy.findByText(
'An active user is one who has opened a project in this Server Pro instance in the last 12 months.'
)
})
describe('create users', () => { describe('create users', () => {
beforeEach(() => { beforeEach(() => {
cy.get('a').contains('New User').click() cy.get('a').contains('New User').click()

View file

@ -20,7 +20,7 @@ services:
OVERLEAF_EMAIL_SMTP_HOST: 'mailtrap' OVERLEAF_EMAIL_SMTP_HOST: 'mailtrap'
OVERLEAF_EMAIL_SMTP_PORT: '25' OVERLEAF_EMAIL_SMTP_PORT: '25'
OVERLEAF_EMAIL_SMTP_IGNORE_TLS: 'true' OVERLEAF_EMAIL_SMTP_IGNORE_TLS: 'true'
ENABLED_LINKED_FILE_TYPES: 'project_file,project_output_file' ENABLED_LINKED_FILE_TYPES: 'project_file,project_output_file,url'
ENABLE_CONVERSIONS: 'true' ENABLE_CONVERSIONS: 'true'
EMAIL_CONFIRMATION_DISABLED: 'true' EMAIL_CONFIRMATION_DISABLED: 'true'
healthcheck: healthcheck:
@ -131,7 +131,7 @@ services:
saml: saml:
restart: always restart: always
image: gcr.io/overleaf-ops/saml-test image: us-east1-docker.pkg.dev/overleaf-ops/ol-docker/saml-test
environment: environment:
SAML_TEST_SP_ENTITY_ID: 'sharelatex-test-saml' SAML_TEST_SP_ENTITY_ID: 'sharelatex-test-saml'
SAML_BASE_URL_PATH: 'http://saml/simplesaml/' SAML_BASE_URL_PATH: 'http://saml/simplesaml/'

View file

@ -24,10 +24,13 @@ services:
MOCHA_GREP: ${MOCHA_GREP} MOCHA_GREP: ${MOCHA_GREP}
NODE_ENV: test NODE_ENV: test
NODE_OPTIONS: "--unhandled-rejections=strict" NODE_OPTIONS: "--unhandled-rejections=strict"
volumes:
- ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it
depends_on: depends_on:
mongo: mongo:
condition: service_started condition: service_started
user: node user: node
entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 --
command: npm run test:acceptance command: npm run test:acceptance

View file

@ -26,6 +26,7 @@ services:
- .:/overleaf/services/chat - .:/overleaf/services/chat
- ../../node_modules:/overleaf/node_modules - ../../node_modules:/overleaf/node_modules
- ../../libraries:/overleaf/libraries - ../../libraries:/overleaf/libraries
- ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it
working_dir: /overleaf/services/chat working_dir: /overleaf/services/chat
environment: environment:
ELASTIC_SEARCH_DSN: es:9200 ELASTIC_SEARCH_DSN: es:9200
@ -39,6 +40,7 @@ services:
depends_on: depends_on:
mongo: mongo:
condition: service_started condition: service_started
entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 --
command: npm run --silent test:acceptance command: npm run --silent test:acceptance
mongo: mongo:

View file

@ -19,18 +19,18 @@ The CLSI can be configured through the following environment variables:
* `ALLOWED_IMAGES` - Space separated list of allowed Docker TeX Live images * `ALLOWED_IMAGES` - Space separated list of allowed Docker TeX Live images
* `CATCH_ERRORS` - Set to `true` to log uncaught exceptions * `CATCH_ERRORS` - Set to `true` to log uncaught exceptions
* `COMPILE_GROUP_DOCKER_CONFIGS` - JSON string of Docker configs for compile groups * `COMPILE_GROUP_DOCKER_CONFIGS` - JSON string of Docker configs for compile groups
* `COMPILES_HOST_DIR` - Working directory for LaTeX compiles * `SANDBOXED_COMPILES` - Set to true to use sibling containers
* `OUTPUT_HOST_DIR` - Output directory for LaTeX compiles * `SANDBOXED_COMPILES_HOST_DIR_COMPILES` - Working directory for LaTeX compiles
* `SANDBOXED_COMPILES_HOST_DIR_OUTPUT` - Output directory for LaTeX compiles
* `COMPILE_SIZE_LIMIT` - Sets the body-parser [limit](https://github.com/expressjs/body-parser#limit) * `COMPILE_SIZE_LIMIT` - Sets the body-parser [limit](https://github.com/expressjs/body-parser#limit)
* `DOCKER_RUNNER` - Set to true to use sibling containers
* `DOCKER_RUNTIME` - * `DOCKER_RUNTIME` -
* `FILESTORE_DOMAIN_OVERRIDE` - The url for the filestore service e.g.`http://$FILESTORE_HOST:3009` * `FILESTORE_DOMAIN_OVERRIDE` - The url for the filestore service e.g.`http://$FILESTORE_HOST:3009`
* `FILESTORE_PARALLEL_FILE_DOWNLOADS` - Number of parallel file downloads * `FILESTORE_PARALLEL_FILE_DOWNLOADS` - Number of parallel file downloads
* `LISTEN_ADDRESS` - The address for the RESTful service to listen on. Set to `0.0.0.0` to listen on all network interfaces * `LISTEN_ADDRESS` - The address for the RESTful service to listen on. Set to `0.0.0.0` to listen on all network interfaces
* `PROCESS_LIFE_SPAN_LIMIT_MS` - Process life span limit in milliseconds * `PROCESS_LIFE_SPAN_LIMIT_MS` - Process life span limit in milliseconds
* `SMOKE_TEST` - Whether to run smoke tests * `SMOKE_TEST` - Whether to run smoke tests
* `TEXLIVE_IMAGE` - The TeX Live Docker image to use for sibling containers, e.g. `gcr.io/overleaf-ops/texlive-full:2017.1` * `TEXLIVE_IMAGE` - The TeX Live Docker image to use for sibling containers, e.g. `us-east1-docker.pkg.dev/overleaf-ops/ol-docker/texlive-full:2017.1`
* `TEX_LIVE_IMAGE_NAME_OVERRIDE` - The name of the registry for the Docker image e.g. `gcr.io/overleaf-ops` * `TEX_LIVE_IMAGE_NAME_OVERRIDE` - The name of the registry for the Docker image e.g. `us-east1-docker.pkg.dev/overleaf-ops/ol-docker`
* `TEXLIVE_IMAGE_USER` - When using sibling containers, the user to run as in the TeX Live image. Defaults to `tex` * `TEXLIVE_IMAGE_USER` - When using sibling containers, the user to run as in the TeX Live image. Defaults to `tex`
* `TEXLIVE_OPENOUT_ANY` - Sets the `openout_any` environment variable for TeX Live (see the `\openout` primitive [documentation](http://tug.org/texinfohtml/web2c.html#tex-invocation)) * `TEXLIVE_OPENOUT_ANY` - Sets the `openout_any` environment variable for TeX Live (see the `\openout` primitive [documentation](http://tug.org/texinfohtml/web2c.html#tex-invocation))
@ -63,10 +63,10 @@ Then start the Docker container:
docker run --rm \ docker run --rm \
-p 127.0.0.1:3013:3013 \ -p 127.0.0.1:3013:3013 \
-e LISTEN_ADDRESS=0.0.0.0 \ -e LISTEN_ADDRESS=0.0.0.0 \
-e DOCKER_RUNNER=true \ -e SANDBOXED_COMPILES=true \
-e TEXLIVE_IMAGE=texlive/texlive \ -e TEXLIVE_IMAGE=texlive/texlive \
-e TEXLIVE_IMAGE_USER=root \ -e TEXLIVE_IMAGE_USER=root \
-e COMPILES_HOST_DIR="$PWD/compiles" \ -e SANDBOXED_COMPILES_HOST_DIR_COMPILES="$PWD/compiles" \
-v "$PWD/compiles:/overleaf/services/clsi/compiles" \ -v "$PWD/compiles:/overleaf/services/clsi/compiles" \
-v "$PWD/cache:/overleaf/services/clsi/cache" \ -v "$PWD/cache:/overleaf/services/clsi/cache" \
-v /var/run/docker.sock:/var/run/docker.sock \ -v /var/run/docker.sock:/var/run/docker.sock \

View file

@ -232,8 +232,8 @@ const DockerRunner = {
} }
} }
// set the path based on the image year // set the path based on the image year
const match = image.match(/:([0-9]+)\.[0-9]+/) const match = image.match(/:([0-9]+)\.[0-9]+|:TL([0-9]+)/)
const year = match ? match[1] : '2014' const year = match ? match[1] || match[2] : '2014'
env.PATH = `/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/texlive/${year}/bin/x86_64-linux/` env.PATH = `/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/texlive/${year}/bin/x86_64-linux/`
const options = { const options = {
Cmd: command, Cmd: command,

View file

@ -2,7 +2,7 @@ clsi
--data-dirs=cache,compiles,output --data-dirs=cache,compiles,output
--dependencies= --dependencies=
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker --docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
--env-add=ENABLE_PDF_CACHING="true",PDF_CACHING_ENABLE_WORKER_POOL="true",ALLOWED_IMAGES=quay.io/sharelatex/texlive-full:2017.1,TEXLIVE_IMAGE=quay.io/sharelatex/texlive-full:2017.1,TEX_LIVE_IMAGE_NAME_OVERRIDE=us-east1-docker.pkg.dev/overleaf-ops/ol-docker,TEXLIVE_IMAGE_USER="tex",DOCKER_RUNNER="true",COMPILES_HOST_DIR=$PWD/compiles,OUTPUT_HOST_DIR=$PWD/output --env-add=ENABLE_PDF_CACHING="true",PDF_CACHING_ENABLE_WORKER_POOL="true",ALLOWED_IMAGES=quay.io/sharelatex/texlive-full:2017.1,TEXLIVE_IMAGE=quay.io/sharelatex/texlive-full:2017.1,TEX_LIVE_IMAGE_NAME_OVERRIDE=us-east1-docker.pkg.dev/overleaf-ops/ol-docker,TEXLIVE_IMAGE_USER="tex",SANDBOXED_COMPILES="true",SANDBOXED_COMPILES_HOST_DIR_COMPILES=$PWD/compiles,SANDBOXED_COMPILES_HOST_DIR_OUTPUT=$PWD/output
--env-pass-through= --env-pass-through=
--esmock-loader=False --esmock-loader=False
--node-version=22.15.1 --node-version=22.15.1

View file

@ -107,7 +107,7 @@ if ((process.env.DOCKER_RUNNER || process.env.SANDBOXED_COMPILES) === 'true') {
CLSI: 1, CLSI: 1,
}, },
socketPath: '/var/run/docker.sock', socketPath: '/var/run/docker.sock',
user: process.env.TEXLIVE_IMAGE_USER || 'tex', user: process.env.TEXLIVE_IMAGE_USER || 'www-data',
}, },
optimiseInDocker: true, optimiseInDocker: true,
expireProjectAfterIdleMs: 24 * 60 * 60 * 1000, expireProjectAfterIdleMs: 24 * 60 * 60 * 1000,

View file

@ -29,9 +29,9 @@ services:
TEXLIVE_IMAGE: quay.io/sharelatex/texlive-full:2017.1 TEXLIVE_IMAGE: quay.io/sharelatex/texlive-full:2017.1
TEX_LIVE_IMAGE_NAME_OVERRIDE: us-east1-docker.pkg.dev/overleaf-ops/ol-docker TEX_LIVE_IMAGE_NAME_OVERRIDE: us-east1-docker.pkg.dev/overleaf-ops/ol-docker
TEXLIVE_IMAGE_USER: "tex" TEXLIVE_IMAGE_USER: "tex"
DOCKER_RUNNER: "true" SANDBOXED_COMPILES: "true"
COMPILES_HOST_DIR: $PWD/compiles SANDBOXED_COMPILES_HOST_DIR_COMPILES: $PWD/compiles
OUTPUT_HOST_DIR: $PWD/output SANDBOXED_COMPILES_HOST_DIR_OUTPUT: $PWD/output
volumes: volumes:
- ./compiles:/overleaf/services/clsi/compiles - ./compiles:/overleaf/services/clsi/compiles
- /var/run/docker.sock:/var/run/docker.sock - /var/run/docker.sock:/var/run/docker.sock

View file

@ -47,8 +47,8 @@ services:
TEXLIVE_IMAGE: quay.io/sharelatex/texlive-full:2017.1 TEXLIVE_IMAGE: quay.io/sharelatex/texlive-full:2017.1
TEX_LIVE_IMAGE_NAME_OVERRIDE: us-east1-docker.pkg.dev/overleaf-ops/ol-docker TEX_LIVE_IMAGE_NAME_OVERRIDE: us-east1-docker.pkg.dev/overleaf-ops/ol-docker
TEXLIVE_IMAGE_USER: "tex" TEXLIVE_IMAGE_USER: "tex"
DOCKER_RUNNER: "true" SANDBOXED_COMPILES: "true"
COMPILES_HOST_DIR: $PWD/compiles SANDBOXED_COMPILES_HOST_DIR_COMPILES: $PWD/compiles
OUTPUT_HOST_DIR: $PWD/output SANDBOXED_COMPILES_HOST_DIR_OUTPUT: $PWD/output
command: npm run --silent test:acceptance command: npm run --silent test:acceptance

View file

@ -27,13 +27,13 @@
"async": "^3.2.5", "async": "^3.2.5",
"body-parser": "^1.20.3", "body-parser": "^1.20.3",
"bunyan": "^1.8.15", "bunyan": "^1.8.15",
"dockerode": "^4.0.5", "dockerode": "^4.0.7",
"express": "^4.21.2", "express": "^4.21.2",
"lodash": "^4.17.21", "lodash": "^4.17.21",
"p-limit": "^3.1.0", "p-limit": "^3.1.0",
"request": "^2.88.2", "request": "^2.88.2",
"send": "^0.19.0", "send": "^0.19.0",
"tar-fs": "^3.0.4", "tar-fs": "^3.0.9",
"workerpool": "^6.1.5" "workerpool": "^6.1.5"
}, },
"devDependencies": { "devDependencies": {

View file

@ -829,13 +829,19 @@
"args": [] "args": []
}, },
{ {
"name": "gettimeofday", "name": "gettimeofday",
"action": "SCMP_ACT_ALLOW", "action": "SCMP_ACT_ALLOW",
"args": [] "args": []
}, { },
"name": "epoll_pwait", {
"action": "SCMP_ACT_ALLOW", "name": "epoll_pwait",
"args": [] "action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "poll",
"action": "SCMP_ACT_ALLOW",
"args": []
} }
] ]
} }

View file

@ -24,10 +24,13 @@ services:
MOCHA_GREP: ${MOCHA_GREP} MOCHA_GREP: ${MOCHA_GREP}
NODE_ENV: test NODE_ENV: test
NODE_OPTIONS: "--unhandled-rejections=strict" NODE_OPTIONS: "--unhandled-rejections=strict"
volumes:
- ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it
depends_on: depends_on:
mongo: mongo:
condition: service_started condition: service_started
user: node user: node
entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 --
command: npm run test:acceptance command: npm run test:acceptance

View file

@ -26,6 +26,7 @@ services:
- .:/overleaf/services/contacts - .:/overleaf/services/contacts
- ../../node_modules:/overleaf/node_modules - ../../node_modules:/overleaf/node_modules
- ../../libraries:/overleaf/libraries - ../../libraries:/overleaf/libraries
- ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it
working_dir: /overleaf/services/contacts working_dir: /overleaf/services/contacts
environment: environment:
ELASTIC_SEARCH_DSN: es:9200 ELASTIC_SEARCH_DSN: es:9200
@ -39,6 +40,7 @@ services:
depends_on: depends_on:
mongo: mongo:
condition: service_started condition: service_started
entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 --
command: npm run --silent test:acceptance command: npm run --silent test:acceptance
mongo: mongo:

View file

@ -6,9 +6,9 @@
"main": "app.js", "main": "app.js",
"scripts": { "scripts": {
"start": "node app.js", "start": "node app.js",
"test:acceptance:_run": "LOG_LEVEL=fatal mocha --loader=esmock --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js", "test:acceptance:_run": "mocha --loader=esmock --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js",
"test:acceptance": "npm run test:acceptance:_run -- --grep=$MOCHA_GREP", "test:acceptance": "npm run test:acceptance:_run -- --grep=$MOCHA_GREP",
"test:unit:_run": "LOG_LEVEL=fatal mocha --loader=esmock --recursive --reporter spec $@ test/unit/js", "test:unit:_run": "mocha --loader=esmock --recursive --reporter spec $@ test/unit/js",
"test:unit": "npm run test:unit:_run -- --grep=$MOCHA_GREP", "test:unit": "npm run test:unit:_run -- --grep=$MOCHA_GREP",
"nodemon": "node --watch app.js", "nodemon": "node --watch app.js",
"lint": "eslint --max-warnings 0 --format unix .", "lint": "eslint --max-warnings 0 --format unix .",

View file

@ -50,6 +50,14 @@ app.param('doc_id', function (req, res, next, docId) {
app.get('/project/:project_id/doc-deleted', HttpController.getAllDeletedDocs) app.get('/project/:project_id/doc-deleted', HttpController.getAllDeletedDocs)
app.get('/project/:project_id/doc', HttpController.getAllDocs) app.get('/project/:project_id/doc', HttpController.getAllDocs)
app.get('/project/:project_id/ranges', HttpController.getAllRanges) app.get('/project/:project_id/ranges', HttpController.getAllRanges)
app.get(
'/project/:project_id/comment-thread-ids',
HttpController.getCommentThreadIds
)
app.get(
'/project/:project_id/tracked-changes-user-ids',
HttpController.getTrackedChangesUserIds
)
app.get('/project/:project_id/has-ranges', HttpController.projectHasRanges) app.get('/project/:project_id/has-ranges', HttpController.projectHasRanges)
app.get('/project/:project_id/doc/:doc_id', HttpController.getDoc) app.get('/project/:project_id/doc/:doc_id', HttpController.getDoc)
app.get('/project/:project_id/doc/:doc_id/deleted', HttpController.isDocDeleted) app.get('/project/:project_id/doc/:doc_id/deleted', HttpController.isDocDeleted)

View file

@ -1,5 +1,4 @@
const { callbackify } = require('node:util') const MongoManager = require('./MongoManager')
const MongoManager = require('./MongoManager').promises
const Errors = require('./Errors') const Errors = require('./Errors')
const logger = require('@overleaf/logger') const logger = require('@overleaf/logger')
const Settings = require('@overleaf/settings') const Settings = require('@overleaf/settings')
@ -8,29 +7,12 @@ const { ReadableString } = require('@overleaf/stream-utils')
const RangeManager = require('./RangeManager') const RangeManager = require('./RangeManager')
const PersistorManager = require('./PersistorManager') const PersistorManager = require('./PersistorManager')
const pMap = require('p-map') const pMap = require('p-map')
const { streamToBuffer } = require('./StreamToBuffer').promises const { streamToBuffer } = require('./StreamToBuffer')
const { BSON } = require('mongodb-legacy') const { BSON } = require('mongodb-legacy')
const PARALLEL_JOBS = Settings.parallelArchiveJobs const PARALLEL_JOBS = Settings.parallelArchiveJobs
const UN_ARCHIVE_BATCH_SIZE = Settings.unArchiveBatchSize const UN_ARCHIVE_BATCH_SIZE = Settings.unArchiveBatchSize
module.exports = {
archiveAllDocs: callbackify(archiveAllDocs),
archiveDoc: callbackify(archiveDoc),
unArchiveAllDocs: callbackify(unArchiveAllDocs),
unarchiveDoc: callbackify(unarchiveDoc),
destroyProject: callbackify(destroyProject),
getDoc: callbackify(getDoc),
promises: {
archiveAllDocs,
archiveDoc,
unArchiveAllDocs,
unarchiveDoc,
destroyProject,
getDoc,
},
}
async function archiveAllDocs(projectId) { async function archiveAllDocs(projectId) {
if (!_isArchivingEnabled()) { if (!_isArchivingEnabled()) {
return return
@ -62,6 +44,8 @@ async function archiveDoc(projectId, docId) {
throw new Error('doc has no lines') throw new Error('doc has no lines')
} }
RangeManager.fixCommentIds(doc)
// warn about any oversized docs already in mongo // warn about any oversized docs already in mongo
const linesSize = BSON.calculateObjectSize(doc.lines || {}) const linesSize = BSON.calculateObjectSize(doc.lines || {})
const rangesSize = BSON.calculateObjectSize(doc.ranges || {}) const rangesSize = BSON.calculateObjectSize(doc.ranges || {})
@ -225,3 +209,12 @@ function _isArchivingEnabled() {
return true return true
} }
module.exports = {
archiveAllDocs,
archiveDoc,
unArchiveAllDocs,
unarchiveDoc,
destroyProject,
getDoc,
}

View file

@ -5,7 +5,6 @@ const _ = require('lodash')
const DocArchive = require('./DocArchiveManager') const DocArchive = require('./DocArchiveManager')
const RangeManager = require('./RangeManager') const RangeManager = require('./RangeManager')
const Settings = require('@overleaf/settings') const Settings = require('@overleaf/settings')
const { callbackifyAll } = require('@overleaf/promise-utils')
const { setTimeout } = require('node:timers/promises') const { setTimeout } = require('node:timers/promises')
/** /**
@ -29,7 +28,7 @@ const DocManager = {
throw new Error('must include inS3 when getting doc') throw new Error('must include inS3 when getting doc')
} }
const doc = await MongoManager.promises.findDoc(projectId, docId, filter) const doc = await MongoManager.findDoc(projectId, docId, filter)
if (doc == null) { if (doc == null) {
throw new Errors.NotFoundError( throw new Errors.NotFoundError(
@ -38,15 +37,19 @@ const DocManager = {
} }
if (doc.inS3) { if (doc.inS3) {
await DocArchive.promises.unarchiveDoc(projectId, docId) await DocArchive.unarchiveDoc(projectId, docId)
return await DocManager._getDoc(projectId, docId, filter) return await DocManager._getDoc(projectId, docId, filter)
} }
if (filter.ranges) {
RangeManager.fixCommentIds(doc)
}
return doc return doc
}, },
async isDocDeleted(projectId, docId) { async isDocDeleted(projectId, docId) {
const doc = await MongoManager.promises.findDoc(projectId, docId, { const doc = await MongoManager.findDoc(projectId, docId, {
deleted: true, deleted: true,
}) })
@ -74,7 +77,7 @@ const DocManager = {
// returns the doc without any version information // returns the doc without any version information
async _peekRawDoc(projectId, docId) { async _peekRawDoc(projectId, docId) {
const doc = await MongoManager.promises.findDoc(projectId, docId, { const doc = await MongoManager.findDoc(projectId, docId, {
lines: true, lines: true,
rev: true, rev: true,
deleted: true, deleted: true,
@ -91,7 +94,7 @@ const DocManager = {
if (doc.inS3) { if (doc.inS3) {
// skip the unarchiving to mongo when getting a doc // skip the unarchiving to mongo when getting a doc
const archivedDoc = await DocArchive.promises.getDoc(projectId, docId) const archivedDoc = await DocArchive.getDoc(projectId, docId)
Object.assign(doc, archivedDoc) Object.assign(doc, archivedDoc)
} }
@ -102,7 +105,7 @@ const DocManager = {
// without unarchiving it (avoids unnecessary writes to mongo) // without unarchiving it (avoids unnecessary writes to mongo)
async peekDoc(projectId, docId) { async peekDoc(projectId, docId) {
const doc = await DocManager._peekRawDoc(projectId, docId) const doc = await DocManager._peekRawDoc(projectId, docId)
await MongoManager.promises.checkRevUnchanged(doc) await MongoManager.checkRevUnchanged(doc)
return doc return doc
}, },
@ -111,16 +114,18 @@ const DocManager = {
lines: true, lines: true,
inS3: true, inS3: true,
}) })
return doc if (!doc) throw new Errors.NotFoundError()
if (!Array.isArray(doc.lines)) throw new Errors.DocWithoutLinesError()
return doc.lines.join('\n')
}, },
async getAllDeletedDocs(projectId, filter) { async getAllDeletedDocs(projectId, filter) {
return await MongoManager.promises.getProjectsDeletedDocs(projectId, filter) return await MongoManager.getProjectsDeletedDocs(projectId, filter)
}, },
async getAllNonDeletedDocs(projectId, filter) { async getAllNonDeletedDocs(projectId, filter) {
await DocArchive.promises.unArchiveAllDocs(projectId) await DocArchive.unArchiveAllDocs(projectId)
const docs = await MongoManager.promises.getProjectsDocs( const docs = await MongoManager.getProjectsDocs(
projectId, projectId,
{ include_deleted: false }, { include_deleted: false },
filter filter
@ -128,15 +133,46 @@ const DocManager = {
if (docs == null) { if (docs == null) {
throw new Errors.NotFoundError(`No docs for project ${projectId}`) throw new Errors.NotFoundError(`No docs for project ${projectId}`)
} }
if (filter.ranges) {
for (const doc of docs) {
RangeManager.fixCommentIds(doc)
}
}
return docs return docs
}, },
async getCommentThreadIds(projectId) {
const docs = await DocManager.getAllNonDeletedDocs(projectId, {
_id: true,
ranges: true,
})
const byDoc = new Map()
for (const doc of docs) {
const ids = new Set()
for (const comment of doc.ranges?.comments || []) {
ids.add(comment.op.t)
}
if (ids.size > 0) byDoc.set(doc._id.toString(), Array.from(ids))
}
return Object.fromEntries(byDoc.entries())
},
async getTrackedChangesUserIds(projectId) {
const docs = await DocManager.getAllNonDeletedDocs(projectId, {
ranges: true,
})
const userIds = new Set()
for (const doc of docs) {
for (const change of doc.ranges?.changes || []) {
if (change.metadata.user_id === 'anonymous-user') continue
userIds.add(change.metadata.user_id)
}
}
return Array.from(userIds)
},
async projectHasRanges(projectId) { async projectHasRanges(projectId) {
const docs = await MongoManager.promises.getProjectsDocs( const docs = await MongoManager.getProjectsDocs(projectId, {}, { _id: 1 })
projectId,
{},
{ _id: 1 }
)
const docIds = docs.map(doc => doc._id) const docIds = docs.map(doc => doc._id)
for (const docId of docIds) { for (const docId of docIds) {
const doc = await DocManager.peekDoc(projectId, docId) const doc = await DocManager.peekDoc(projectId, docId)
@ -247,7 +283,7 @@ const DocManager = {
} }
modified = true modified = true
await MongoManager.promises.upsertIntoDocCollection( await MongoManager.upsertIntoDocCollection(
projectId, projectId,
docId, docId,
doc?.rev, doc?.rev,
@ -262,11 +298,7 @@ const DocManager = {
async patchDoc(projectId, docId, meta) { async patchDoc(projectId, docId, meta) {
const projection = { _id: 1, deleted: true } const projection = { _id: 1, deleted: true }
const doc = await MongoManager.promises.findDoc( const doc = await MongoManager.findDoc(projectId, docId, projection)
projectId,
docId,
projection
)
if (!doc) { if (!doc) {
throw new Errors.NotFoundError( throw new Errors.NotFoundError(
`No such project/doc to delete: ${projectId}/${docId}` `No such project/doc to delete: ${projectId}/${docId}`
@ -275,7 +307,7 @@ const DocManager = {
if (meta.deleted && Settings.docstore.archiveOnSoftDelete) { if (meta.deleted && Settings.docstore.archiveOnSoftDelete) {
// The user will not read this doc anytime soon. Flush it out of mongo. // The user will not read this doc anytime soon. Flush it out of mongo.
DocArchive.promises.archiveDoc(projectId, docId).catch(err => { DocArchive.archiveDoc(projectId, docId).catch(err => {
logger.warn( logger.warn(
{ projectId, docId, err }, { projectId, docId, err },
'archiving a single doc in the background failed' 'archiving a single doc in the background failed'
@ -283,15 +315,8 @@ const DocManager = {
}) })
} }
await MongoManager.promises.patchDoc(projectId, docId, meta) await MongoManager.patchDoc(projectId, docId, meta)
}, },
} }
module.exports = { module.exports = DocManager
...callbackifyAll(DocManager, {
multiResult: {
updateDoc: ['modified', 'rev'],
},
}),
promises: DocManager,
}

View file

@ -10,10 +10,13 @@ class DocRevValueError extends OError {}
class DocVersionDecrementedError extends OError {} class DocVersionDecrementedError extends OError {}
class DocWithoutLinesError extends OError {}
module.exports = { module.exports = {
Md5MismatchError, Md5MismatchError,
DocModifiedError, DocModifiedError,
DocRevValueError, DocRevValueError,
DocVersionDecrementedError, DocVersionDecrementedError,
DocWithoutLinesError,
...Errors, ...Errors,
} }

View file

@ -1,67 +1,35 @@
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
const { db, ObjectId } = require('./mongodb') const { db, ObjectId } = require('./mongodb')
const request = require('request')
const async = require('async')
const _ = require('lodash') const _ = require('lodash')
const crypto = require('node:crypto') const crypto = require('node:crypto')
const settings = require('@overleaf/settings') const settings = require('@overleaf/settings')
const { port } = settings.internal.docstore const { port } = settings.internal.docstore
const logger = require('@overleaf/logger') const logger = require('@overleaf/logger')
const { fetchNothing, fetchJson } = require('@overleaf/fetch-utils')
module.exports = { async function check() {
check(callback) { const docId = new ObjectId()
const docId = new ObjectId() const projectId = new ObjectId(settings.docstore.healthCheck.project_id)
const projectId = new ObjectId(settings.docstore.healthCheck.project_id) const url = `http://127.0.0.1:${port}/project/${projectId}/doc/${docId}`
const url = `http://127.0.0.1:${port}/project/${projectId}/doc/${docId}` const lines = [
const lines = [ 'smoke test - delete me',
'smoke test - delete me', `${crypto.randomBytes(32).toString('hex')}`,
`${crypto.randomBytes(32).toString('hex')}`, ]
] logger.debug({ lines, url, docId, projectId }, 'running health check')
const getOpts = () => ({ let body
url, try {
timeout: 3000, await fetchNothing(url, {
method: 'POST',
json: { lines, version: 42, ranges: {} },
signal: AbortSignal.timeout(3_000),
}) })
logger.debug({ lines, url, docId, projectId }, 'running health check') body = await fetchJson(url, { signal: AbortSignal.timeout(3_000) })
const jobs = [ } finally {
function (cb) { await db.docs.deleteOne({ _id: docId, project_id: projectId })
const opts = getOpts() }
opts.json = { lines, version: 42, ranges: {} } if (!_.isEqual(body?.lines, lines)) {
return request.post(opts, cb) throw new Error(`health check lines not equal ${body.lines} != ${lines}`)
}, }
function (cb) { }
const opts = getOpts() module.exports = {
opts.json = true check,
return request.get(opts, function (err, res, body) {
if (err != null) {
logger.err({ err }, 'docstore returned a error in health check get')
return cb(err)
} else if (res == null) {
return cb(new Error('no response from docstore with get check'))
} else if ((res != null ? res.statusCode : undefined) !== 200) {
return cb(new Error(`status code not 200, its ${res.statusCode}`))
} else if (
_.isEqual(body != null ? body.lines : undefined, lines) &&
(body != null ? body._id : undefined) === docId.toString()
) {
return cb()
} else {
return cb(
new Error(
`health check lines not equal ${body.lines} != ${lines}`
)
)
}
})
},
cb => db.docs.deleteOne({ _id: docId, project_id: projectId }, cb),
]
return async.series(jobs, callback)
},
} }

View file

@ -4,143 +4,104 @@ const DocArchive = require('./DocArchiveManager')
const HealthChecker = require('./HealthChecker') const HealthChecker = require('./HealthChecker')
const Errors = require('./Errors') const Errors = require('./Errors')
const Settings = require('@overleaf/settings') const Settings = require('@overleaf/settings')
const { expressify } = require('@overleaf/promise-utils')
function getDoc(req, res, next) { async function getDoc(req, res) {
const { doc_id: docId, project_id: projectId } = req.params const { doc_id: docId, project_id: projectId } = req.params
const includeDeleted = req.query.include_deleted === 'true' const includeDeleted = req.query.include_deleted === 'true'
logger.debug({ projectId, docId }, 'getting doc') logger.debug({ projectId, docId }, 'getting doc')
DocManager.getFullDoc(projectId, docId, function (error, doc) { const doc = await DocManager.getFullDoc(projectId, docId)
if (error) { logger.debug({ docId, projectId }, 'got doc')
return next(error) if (doc.deleted && !includeDeleted) {
} res.sendStatus(404)
logger.debug({ docId, projectId }, 'got doc') } else {
if (doc == null) { res.json(_buildDocView(doc))
res.sendStatus(404) }
} else if (doc.deleted && !includeDeleted) {
res.sendStatus(404)
} else {
res.json(_buildDocView(doc))
}
})
} }
function peekDoc(req, res, next) { async function peekDoc(req, res) {
const { doc_id: docId, project_id: projectId } = req.params const { doc_id: docId, project_id: projectId } = req.params
logger.debug({ projectId, docId }, 'peeking doc') logger.debug({ projectId, docId }, 'peeking doc')
DocManager.peekDoc(projectId, docId, function (error, doc) { const doc = await DocManager.peekDoc(projectId, docId)
if (error) { res.setHeader('x-doc-status', doc.inS3 ? 'archived' : 'active')
return next(error) res.json(_buildDocView(doc))
}
if (doc == null) {
res.sendStatus(404)
} else {
res.setHeader('x-doc-status', doc.inS3 ? 'archived' : 'active')
res.json(_buildDocView(doc))
}
})
} }
function isDocDeleted(req, res, next) { async function isDocDeleted(req, res) {
const { doc_id: docId, project_id: projectId } = req.params const { doc_id: docId, project_id: projectId } = req.params
DocManager.isDocDeleted(projectId, docId, function (error, deleted) { const deleted = await DocManager.isDocDeleted(projectId, docId)
if (error) { res.json({ deleted })
return next(error)
}
res.json({ deleted })
})
} }
function getRawDoc(req, res, next) { async function getRawDoc(req, res) {
const { doc_id: docId, project_id: projectId } = req.params const { doc_id: docId, project_id: projectId } = req.params
logger.debug({ projectId, docId }, 'getting raw doc') logger.debug({ projectId, docId }, 'getting raw doc')
DocManager.getDocLines(projectId, docId, function (error, doc) { const content = await DocManager.getDocLines(projectId, docId)
if (error) { res.setHeader('content-type', 'text/plain')
return next(error) res.send(content)
}
if (doc == null) {
res.sendStatus(404)
} else {
res.setHeader('content-type', 'text/plain')
res.send(_buildRawDocView(doc))
}
})
} }
function getAllDocs(req, res, next) { async function getAllDocs(req, res) {
const { project_id: projectId } = req.params const { project_id: projectId } = req.params
logger.debug({ projectId }, 'getting all docs') logger.debug({ projectId }, 'getting all docs')
DocManager.getAllNonDeletedDocs( const docs = await DocManager.getAllNonDeletedDocs(projectId, {
projectId, lines: true,
{ lines: true, rev: true }, rev: true,
function (error, docs) { })
if (docs == null) { const docViews = _buildDocsArrayView(projectId, docs)
docs = [] for (const docView of docViews) {
} if (!docView.lines) {
if (error) { logger.warn({ projectId, docId: docView._id }, 'missing doc lines')
return next(error) docView.lines = []
}
const docViews = _buildDocsArrayView(projectId, docs)
for (const docView of docViews) {
if (!docView.lines) {
logger.warn({ projectId, docId: docView._id }, 'missing doc lines')
docView.lines = []
}
}
res.json(docViews)
} }
) }
res.json(docViews)
} }
function getAllDeletedDocs(req, res, next) { async function getAllDeletedDocs(req, res) {
const { project_id: projectId } = req.params const { project_id: projectId } = req.params
logger.debug({ projectId }, 'getting all deleted docs') logger.debug({ projectId }, 'getting all deleted docs')
DocManager.getAllDeletedDocs( const docs = await DocManager.getAllDeletedDocs(projectId, {
projectId, name: true,
{ name: true, deletedAt: true }, deletedAt: true,
function (error, docs) { })
if (error) { res.json(
return next(error) docs.map(doc => ({
} _id: doc._id.toString(),
res.json( name: doc.name,
docs.map(doc => ({ deletedAt: doc.deletedAt,
_id: doc._id.toString(), }))
name: doc.name,
deletedAt: doc.deletedAt,
}))
)
}
) )
} }
function getAllRanges(req, res, next) { async function getAllRanges(req, res) {
const { project_id: projectId } = req.params const { project_id: projectId } = req.params
logger.debug({ projectId }, 'getting all ranges') logger.debug({ projectId }, 'getting all ranges')
DocManager.getAllNonDeletedDocs( const docs = await DocManager.getAllNonDeletedDocs(projectId, {
projectId, ranges: true,
{ ranges: true },
function (error, docs) {
if (docs == null) {
docs = []
}
if (error) {
return next(error)
}
res.json(_buildDocsArrayView(projectId, docs))
}
)
}
function projectHasRanges(req, res, next) {
const { project_id: projectId } = req.params
DocManager.projectHasRanges(projectId, (err, projectHasRanges) => {
if (err) {
return next(err)
}
res.json({ projectHasRanges })
}) })
res.json(_buildDocsArrayView(projectId, docs))
} }
function updateDoc(req, res, next) { async function getCommentThreadIds(req, res) {
const { project_id: projectId } = req.params
const threadIds = await DocManager.getCommentThreadIds(projectId)
res.json(threadIds)
}
async function getTrackedChangesUserIds(req, res) {
const { project_id: projectId } = req.params
const userIds = await DocManager.getTrackedChangesUserIds(projectId)
res.json(userIds)
}
async function projectHasRanges(req, res) {
const { project_id: projectId } = req.params
const projectHasRanges = await DocManager.projectHasRanges(projectId)
res.json({ projectHasRanges })
}
async function updateDoc(req, res) {
const { doc_id: docId, project_id: projectId } = req.params const { doc_id: docId, project_id: projectId } = req.params
const lines = req.body?.lines const lines = req.body?.lines
const version = req.body?.version const version = req.body?.version
@ -172,25 +133,20 @@ function updateDoc(req, res, next) {
} }
logger.debug({ projectId, docId }, 'got http request to update doc') logger.debug({ projectId, docId }, 'got http request to update doc')
DocManager.updateDoc( const { modified, rev } = await DocManager.updateDoc(
projectId, projectId,
docId, docId,
lines, lines,
version, version,
ranges, ranges
function (error, modified, rev) {
if (error) {
return next(error)
}
res.json({
modified,
rev,
})
}
) )
res.json({
modified,
rev,
})
} }
function patchDoc(req, res, next) { async function patchDoc(req, res) {
const { doc_id: docId, project_id: projectId } = req.params const { doc_id: docId, project_id: projectId } = req.params
logger.debug({ projectId, docId }, 'patching doc') logger.debug({ projectId, docId }, 'patching doc')
@ -203,12 +159,8 @@ function patchDoc(req, res, next) {
logger.fatal({ field }, 'joi validation for pathDoc is broken') logger.fatal({ field }, 'joi validation for pathDoc is broken')
} }
}) })
DocManager.patchDoc(projectId, docId, meta, function (error) { await DocManager.patchDoc(projectId, docId, meta)
if (error) { res.sendStatus(204)
return next(error)
}
res.sendStatus(204)
})
} }
function _buildDocView(doc) { function _buildDocView(doc) {
@ -221,10 +173,6 @@ function _buildDocView(doc) {
return docView return docView
} }
function _buildRawDocView(doc) {
return (doc?.lines ?? []).join('\n')
}
function _buildDocsArrayView(projectId, docs) { function _buildDocsArrayView(projectId, docs) {
const docViews = [] const docViews = []
for (const doc of docs) { for (const doc of docs) {
@ -241,79 +189,69 @@ function _buildDocsArrayView(projectId, docs) {
return docViews return docViews
} }
function archiveAllDocs(req, res, next) { async function archiveAllDocs(req, res) {
const { project_id: projectId } = req.params const { project_id: projectId } = req.params
logger.debug({ projectId }, 'archiving all docs') logger.debug({ projectId }, 'archiving all docs')
DocArchive.archiveAllDocs(projectId, function (error) { await DocArchive.archiveAllDocs(projectId)
if (error) { res.sendStatus(204)
return next(error)
}
res.sendStatus(204)
})
} }
function archiveDoc(req, res, next) { async function archiveDoc(req, res) {
const { doc_id: docId, project_id: projectId } = req.params const { doc_id: docId, project_id: projectId } = req.params
logger.debug({ projectId, docId }, 'archiving a doc') logger.debug({ projectId, docId }, 'archiving a doc')
DocArchive.archiveDoc(projectId, docId, function (error) { await DocArchive.archiveDoc(projectId, docId)
if (error) { res.sendStatus(204)
return next(error)
}
res.sendStatus(204)
})
} }
function unArchiveAllDocs(req, res, next) { async function unArchiveAllDocs(req, res) {
const { project_id: projectId } = req.params const { project_id: projectId } = req.params
logger.debug({ projectId }, 'unarchiving all docs') logger.debug({ projectId }, 'unarchiving all docs')
DocArchive.unArchiveAllDocs(projectId, function (err) { try {
if (err) { await DocArchive.unArchiveAllDocs(projectId)
if (err instanceof Errors.DocRevValueError) { } catch (err) {
logger.warn({ err }, 'Failed to unarchive doc') if (err instanceof Errors.DocRevValueError) {
return res.sendStatus(409) logger.warn({ err }, 'Failed to unarchive doc')
} return res.sendStatus(409)
return next(err)
} }
res.sendStatus(200) throw err
}) }
res.sendStatus(200)
} }
function destroyProject(req, res, next) { async function destroyProject(req, res) {
const { project_id: projectId } = req.params const { project_id: projectId } = req.params
logger.debug({ projectId }, 'destroying all docs') logger.debug({ projectId }, 'destroying all docs')
DocArchive.destroyProject(projectId, function (error) { await DocArchive.destroyProject(projectId)
if (error) { res.sendStatus(204)
return next(error)
}
res.sendStatus(204)
})
} }
function healthCheck(req, res) { async function healthCheck(req, res) {
HealthChecker.check(function (err) { try {
if (err) { await HealthChecker.check()
logger.err({ err }, 'error performing health check') } catch (err) {
res.sendStatus(500) logger.err({ err }, 'error performing health check')
} else { res.sendStatus(500)
res.sendStatus(200) return
} }
}) res.sendStatus(200)
} }
module.exports = { module.exports = {
getDoc, getDoc: expressify(getDoc),
peekDoc, peekDoc: expressify(peekDoc),
isDocDeleted, isDocDeleted: expressify(isDocDeleted),
getRawDoc, getRawDoc: expressify(getRawDoc),
getAllDocs, getAllDocs: expressify(getAllDocs),
getAllDeletedDocs, getAllDeletedDocs: expressify(getAllDeletedDocs),
getAllRanges, getAllRanges: expressify(getAllRanges),
projectHasRanges, getTrackedChangesUserIds: expressify(getTrackedChangesUserIds),
updateDoc, getCommentThreadIds: expressify(getCommentThreadIds),
patchDoc, projectHasRanges: expressify(projectHasRanges),
archiveAllDocs, updateDoc: expressify(updateDoc),
archiveDoc, patchDoc: expressify(patchDoc),
unArchiveAllDocs, archiveAllDocs: expressify(archiveAllDocs),
destroyProject, archiveDoc: expressify(archiveDoc),
healthCheck, unArchiveAllDocs: expressify(unArchiveAllDocs),
destroyProject: expressify(destroyProject),
healthCheck: expressify(healthCheck),
} }

View file

@ -1,7 +1,6 @@
const { db, ObjectId } = require('./mongodb') const { db, ObjectId } = require('./mongodb')
const Settings = require('@overleaf/settings') const Settings = require('@overleaf/settings')
const Errors = require('./Errors') const Errors = require('./Errors')
const { callbackify } = require('node:util')
const ARCHIVING_LOCK_DURATION_MS = Settings.archivingLockDurationMs const ARCHIVING_LOCK_DURATION_MS = Settings.archivingLockDurationMs
@ -241,34 +240,17 @@ async function destroyProject(projectId) {
} }
module.exports = { module.exports = {
findDoc: callbackify(findDoc), findDoc,
getProjectsDeletedDocs: callbackify(getProjectsDeletedDocs), getProjectsDeletedDocs,
getProjectsDocs: callbackify(getProjectsDocs), getProjectsDocs,
getArchivedProjectDocs: callbackify(getArchivedProjectDocs), getArchivedProjectDocs,
getNonArchivedProjectDocIds: callbackify(getNonArchivedProjectDocIds), getNonArchivedProjectDocIds,
getNonDeletedArchivedProjectDocs: callbackify( getNonDeletedArchivedProjectDocs,
getNonDeletedArchivedProjectDocs upsertIntoDocCollection,
), restoreArchivedDoc,
upsertIntoDocCollection: callbackify(upsertIntoDocCollection), patchDoc,
restoreArchivedDoc: callbackify(restoreArchivedDoc), getDocForArchiving,
patchDoc: callbackify(patchDoc), markDocAsArchived,
getDocForArchiving: callbackify(getDocForArchiving), checkRevUnchanged,
markDocAsArchived: callbackify(markDocAsArchived), destroyProject,
checkRevUnchanged: callbackify(checkRevUnchanged),
destroyProject: callbackify(destroyProject),
promises: {
findDoc,
getProjectsDeletedDocs,
getProjectsDocs,
getArchivedProjectDocs,
getNonArchivedProjectDocIds,
getNonDeletedArchivedProjectDocs,
upsertIntoDocCollection,
restoreArchivedDoc,
patchDoc,
getDocForArchiving,
markDocAsArchived,
checkRevUnchanged,
destroyProject,
},
} }

View file

@ -49,15 +49,25 @@ module.exports = RangeManager = {
updateMetadata(change.metadata) updateMetadata(change.metadata)
} }
for (const comment of Array.from(ranges.comments || [])) { for (const comment of Array.from(ranges.comments || [])) {
comment.id = RangeManager._safeObjectId(comment.id) // Two bugs resulted in mismatched ids, prefer the thread id from the op: https://github.com/overleaf/internal/issues/23272
if ((comment.op != null ? comment.op.t : undefined) != null) { comment.id = RangeManager._safeObjectId(comment.op?.t || comment.id)
comment.op.t = RangeManager._safeObjectId(comment.op.t) if (comment.op) comment.op.t = comment.id
}
// resolved property is added to comments when they are obtained from history, but this state doesn't belong in mongo docs collection
// more info: https://github.com/overleaf/internal/issues/24371#issuecomment-2913095174
delete comment.op?.resolved
updateMetadata(comment.metadata) updateMetadata(comment.metadata)
} }
return ranges return ranges
}, },
fixCommentIds(doc) {
for (const comment of doc?.ranges?.comments || []) {
// Two bugs resulted in mismatched ids, prefer the thread id from the op: https://github.com/overleaf/internal/issues/23272
if (comment.op?.t) comment.id = comment.op.t
}
},
_safeObjectId(data) { _safeObjectId(data) {
try { try {
return new ObjectId(data) return new ObjectId(data)

View file

@ -2,13 +2,9 @@ const { LoggerStream, WritableBuffer } = require('@overleaf/stream-utils')
const Settings = require('@overleaf/settings') const Settings = require('@overleaf/settings')
const logger = require('@overleaf/logger/logging-manager') const logger = require('@overleaf/logger/logging-manager')
const { pipeline } = require('node:stream/promises') const { pipeline } = require('node:stream/promises')
const { callbackify } = require('node:util')
module.exports = { module.exports = {
streamToBuffer: callbackify(streamToBuffer), streamToBuffer,
promises: {
streamToBuffer,
},
} }
async function streamToBuffer(projectId, docId, stream) { async function streamToBuffer(projectId, docId, stream) {

View file

@ -27,12 +27,15 @@ services:
MOCHA_GREP: ${MOCHA_GREP} MOCHA_GREP: ${MOCHA_GREP}
NODE_ENV: test NODE_ENV: test
NODE_OPTIONS: "--unhandled-rejections=strict" NODE_OPTIONS: "--unhandled-rejections=strict"
volumes:
- ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it
depends_on: depends_on:
mongo: mongo:
condition: service_started condition: service_started
gcs: gcs:
condition: service_healthy condition: service_healthy
user: node user: node
entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 --
command: npm run test:acceptance command: npm run test:acceptance

View file

@ -26,6 +26,7 @@ services:
- .:/overleaf/services/docstore - .:/overleaf/services/docstore
- ../../node_modules:/overleaf/node_modules - ../../node_modules:/overleaf/node_modules
- ../../libraries:/overleaf/libraries - ../../libraries:/overleaf/libraries
- ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it
working_dir: /overleaf/services/docstore working_dir: /overleaf/services/docstore
environment: environment:
ELASTIC_SEARCH_DSN: es:9200 ELASTIC_SEARCH_DSN: es:9200
@ -44,6 +45,7 @@ services:
condition: service_started condition: service_started
gcs: gcs:
condition: service_healthy condition: service_healthy
entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 --
command: npm run --silent test:acceptance command: npm run --silent test:acceptance
mongo: mongo:

View file

@ -17,6 +17,7 @@
"types:check": "tsc --noEmit" "types:check": "tsc --noEmit"
}, },
"dependencies": { "dependencies": {
"@overleaf/fetch-utils": "*",
"@overleaf/logger": "*", "@overleaf/logger": "*",
"@overleaf/metrics": "*", "@overleaf/metrics": "*",
"@overleaf/o-error": "*", "@overleaf/o-error": "*",

View file

@ -1001,6 +1001,15 @@ describe('Archiving', function () {
}, },
version: 2, version: 2,
} }
this.fixedRanges = {
...this.doc.ranges,
comments: [
{
...this.doc.ranges.comments[0],
id: this.doc.ranges.comments[0].op.t,
},
],
}
return DocstoreClient.createDoc( return DocstoreClient.createDoc(
this.project_id, this.project_id,
this.doc._id, this.doc._id,
@ -1048,7 +1057,7 @@ describe('Archiving', function () {
throw error throw error
} }
s3Doc.lines.should.deep.equal(this.doc.lines) s3Doc.lines.should.deep.equal(this.doc.lines)
const ranges = JSON.parse(JSON.stringify(this.doc.ranges)) // ObjectId -> String const ranges = JSON.parse(JSON.stringify(this.fixedRanges)) // ObjectId -> String
s3Doc.ranges.should.deep.equal(ranges) s3Doc.ranges.should.deep.equal(ranges)
return done() return done()
} }
@ -1075,7 +1084,7 @@ describe('Archiving', function () {
throw error throw error
} }
doc.lines.should.deep.equal(this.doc.lines) doc.lines.should.deep.equal(this.doc.lines)
doc.ranges.should.deep.equal(this.doc.ranges) doc.ranges.should.deep.equal(this.fixedRanges)
expect(doc.inS3).not.to.exist expect(doc.inS3).not.to.exist
return done() return done()
}) })

View file

@ -20,30 +20,73 @@ const DocstoreClient = require('./helpers/DocstoreClient')
describe('Getting all docs', function () { describe('Getting all docs', function () {
beforeEach(function (done) { beforeEach(function (done) {
this.project_id = new ObjectId() this.project_id = new ObjectId()
this.threadId1 = new ObjectId().toString()
this.threadId2 = new ObjectId().toString()
this.docs = [ this.docs = [
{ {
_id: new ObjectId(), _id: new ObjectId(),
lines: ['one', 'two', 'three'], lines: ['one', 'two', 'three'],
ranges: { mock: 'one' }, ranges: {
comments: [
{ id: new ObjectId().toString(), op: { t: this.threadId1 } },
],
changes: [
{
id: new ObjectId().toString(),
metadata: { user_id: 'user-id-1' },
},
],
},
rev: 2, rev: 2,
}, },
{ {
_id: new ObjectId(), _id: new ObjectId(),
lines: ['aaa', 'bbb', 'ccc'], lines: ['aaa', 'bbb', 'ccc'],
ranges: { mock: 'two' }, ranges: {
changes: [
{
id: new ObjectId().toString(),
metadata: { user_id: 'user-id-2' },
},
],
},
rev: 4, rev: 4,
}, },
{ {
_id: new ObjectId(), _id: new ObjectId(),
lines: ['111', '222', '333'], lines: ['111', '222', '333'],
ranges: { mock: 'three' }, ranges: {
comments: [
{ id: new ObjectId().toString(), op: { t: this.threadId2 } },
],
changes: [
{
id: new ObjectId().toString(),
metadata: { user_id: 'anonymous-user' },
},
],
},
rev: 6, rev: 6,
}, },
] ]
this.fixedRanges = this.docs.map(doc => {
if (!doc.ranges?.comments?.length) return doc.ranges
return {
...doc.ranges,
comments: [
{ ...doc.ranges.comments[0], id: doc.ranges.comments[0].op.t },
],
}
})
this.deleted_doc = { this.deleted_doc = {
_id: new ObjectId(), _id: new ObjectId(),
lines: ['deleted'], lines: ['deleted'],
ranges: { mock: 'four' }, ranges: {
comments: [{ id: new ObjectId().toString(), op: { t: 'thread-id-3' } }],
changes: [
{ id: new ObjectId().toString(), metadata: { user_id: 'user-id-3' } },
],
},
rev: 8, rev: 8,
} }
const version = 42 const version = 42
@ -96,7 +139,7 @@ describe('Getting all docs', function () {
}) })
}) })
return it('getAllRanges should return all the (non-deleted) doc ranges', function (done) { it('getAllRanges should return all the (non-deleted) doc ranges', function (done) {
return DocstoreClient.getAllRanges(this.project_id, (error, res, docs) => { return DocstoreClient.getAllRanges(this.project_id, (error, res, docs) => {
if (error != null) { if (error != null) {
throw error throw error
@ -104,9 +147,38 @@ describe('Getting all docs', function () {
docs.length.should.equal(this.docs.length) docs.length.should.equal(this.docs.length)
for (let i = 0; i < docs.length; i++) { for (let i = 0; i < docs.length; i++) {
const doc = docs[i] const doc = docs[i]
doc.ranges.should.deep.equal(this.docs[i].ranges) doc.ranges.should.deep.equal(this.fixedRanges[i])
} }
return done() return done()
}) })
}) })
it('getTrackedChangesUserIds should return all the user ids from (non-deleted) ranges', function (done) {
DocstoreClient.getTrackedChangesUserIds(
this.project_id,
(error, res, userIds) => {
if (error != null) {
throw error
}
userIds.should.deep.equal(['user-id-1', 'user-id-2'])
done()
}
)
})
it('getCommentThreadIds should return all the thread ids from (non-deleted) ranges', function (done) {
DocstoreClient.getCommentThreadIds(
this.project_id,
(error, res, threadIds) => {
if (error != null) {
throw error
}
threadIds.should.deep.equal({
[this.docs[0]._id.toString()]: [this.threadId1],
[this.docs[2]._id.toString()]: [this.threadId2],
})
done()
}
)
})
}) })

View file

@ -28,10 +28,26 @@ describe('Getting a doc', function () {
op: { i: 'foo', p: 3 }, op: { i: 'foo', p: 3 },
meta: { meta: {
user_id: new ObjectId().toString(), user_id: new ObjectId().toString(),
ts: new Date().toString(), ts: new Date().toJSON(),
}, },
}, },
], ],
comments: [
{
id: new ObjectId().toString(),
op: { c: 'comment', p: 1, t: new ObjectId().toString() },
metadata: {
user_id: new ObjectId().toString(),
ts: new Date().toJSON(),
},
},
],
}
this.fixedRanges = {
...this.ranges,
comments: [
{ ...this.ranges.comments[0], id: this.ranges.comments[0].op.t },
],
} }
return DocstoreApp.ensureRunning(() => { return DocstoreApp.ensureRunning(() => {
return DocstoreClient.createDoc( return DocstoreClient.createDoc(
@ -60,7 +76,7 @@ describe('Getting a doc', function () {
if (error) return done(error) if (error) return done(error)
doc.lines.should.deep.equal(this.lines) doc.lines.should.deep.equal(this.lines)
doc.version.should.equal(this.version) doc.version.should.equal(this.version)
doc.ranges.should.deep.equal(this.ranges) doc.ranges.should.deep.equal(this.fixedRanges)
return done() return done()
} }
) )
@ -114,7 +130,7 @@ describe('Getting a doc', function () {
if (error) return done(error) if (error) return done(error)
doc.lines.should.deep.equal(this.lines) doc.lines.should.deep.equal(this.lines)
doc.version.should.equal(this.version) doc.version.should.equal(this.version)
doc.ranges.should.deep.equal(this.ranges) doc.ranges.should.deep.equal(this.fixedRanges)
doc.deleted.should.equal(true) doc.deleted.should.equal(true)
return done() return done()
} }

View file

@ -0,0 +1,28 @@
const { db } = require('../../../app/js/mongodb')
const DocstoreApp = require('./helpers/DocstoreApp')
const DocstoreClient = require('./helpers/DocstoreClient')
const { expect } = require('chai')
describe('HealthChecker', function () {
beforeEach('start', function (done) {
DocstoreApp.ensureRunning(done)
})
beforeEach('clear docs collection', async function () {
await db.docs.deleteMany({})
})
let res
beforeEach('run health check', function (done) {
DocstoreClient.healthCheck((err, _res) => {
res = _res
done(err)
})
})
it('should return 200', function () {
res.statusCode.should.equal(200)
})
it('should not leave any cruft behind', async function () {
expect(await db.docs.find({}).toArray()).to.deep.equal([])
})
})

View file

@ -100,6 +100,26 @@ module.exports = DocstoreClient = {
) )
}, },
getCommentThreadIds(projectId, callback) {
request.get(
{
url: `http://127.0.0.1:${settings.internal.docstore.port}/project/${projectId}/comment-thread-ids`,
json: true,
},
callback
)
},
getTrackedChangesUserIds(projectId, callback) {
request.get(
{
url: `http://127.0.0.1:${settings.internal.docstore.port}/project/${projectId}/tracked-changes-user-ids`,
json: true,
},
callback
)
},
updateDoc(projectId, docId, lines, version, ranges, callback) { updateDoc(projectId, docId, lines, version, ranges, callback) {
return request.post( return request.post(
{ {
@ -181,6 +201,13 @@ module.exports = DocstoreClient = {
) )
}, },
healthCheck(callback) {
request.get(
`http://127.0.0.1:${settings.internal.docstore.port}/health_check`,
callback
)
},
getS3Doc(projectId, docId, callback) { getS3Doc(projectId, docId, callback) {
getStringFromPersistor( getStringFromPersistor(
Persistor, Persistor,

View file

@ -4,7 +4,7 @@ const modulePath = '../../../app/js/DocArchiveManager.js'
const SandboxedModule = require('sandboxed-module') const SandboxedModule = require('sandboxed-module')
const { ObjectId } = require('mongodb-legacy') const { ObjectId } = require('mongodb-legacy')
const Errors = require('../../../app/js/Errors') const Errors = require('../../../app/js/Errors')
const StreamToBuffer = require('../../../app/js/StreamToBuffer').promises const StreamToBuffer = require('../../../app/js/StreamToBuffer')
describe('DocArchiveManager', function () { describe('DocArchiveManager', function () {
let DocArchiveManager, let DocArchiveManager,
@ -31,6 +31,7 @@ describe('DocArchiveManager', function () {
RangeManager = { RangeManager = {
jsonRangesToMongo: sinon.stub().returns({ mongo: 'ranges' }), jsonRangesToMongo: sinon.stub().returns({ mongo: 'ranges' }),
fixCommentIds: sinon.stub(),
} }
Settings = { Settings = {
docstore: { docstore: {
@ -142,37 +143,33 @@ describe('DocArchiveManager', function () {
} }
MongoManager = { MongoManager = {
promises: { markDocAsArchived: sinon.stub().resolves(),
markDocAsArchived: sinon.stub().resolves(), restoreArchivedDoc: sinon.stub().resolves(),
restoreArchivedDoc: sinon.stub().resolves(), upsertIntoDocCollection: sinon.stub().resolves(),
upsertIntoDocCollection: sinon.stub().resolves(), getProjectsDocs: sinon.stub().resolves(mongoDocs),
getProjectsDocs: sinon.stub().resolves(mongoDocs), getNonDeletedArchivedProjectDocs: getArchivedProjectDocs,
getNonDeletedArchivedProjectDocs: getArchivedProjectDocs, getNonArchivedProjectDocIds,
getNonArchivedProjectDocIds, getArchivedProjectDocs,
getArchivedProjectDocs, findDoc: sinon.stub().callsFake(fakeGetDoc),
findDoc: sinon.stub().callsFake(fakeGetDoc), getDocForArchiving: sinon.stub().callsFake(fakeGetDoc),
getDocForArchiving: sinon.stub().callsFake(fakeGetDoc), destroyProject: sinon.stub().resolves(),
destroyProject: sinon.stub().resolves(),
},
} }
// Wrap streamToBuffer so that we can pass in something that it expects (in // Wrap streamToBuffer so that we can pass in something that it expects (in
// this case, a Promise) rather than a stubbed stream object // this case, a Promise) rather than a stubbed stream object
streamToBuffer = { streamToBuffer = {
promises: { streamToBuffer: async () => {
streamToBuffer: async () => { const inputStream = new Promise(resolve => {
const inputStream = new Promise(resolve => { stream.on('data', data => resolve(data))
stream.on('data', data => resolve(data)) })
})
const value = await StreamToBuffer.streamToBuffer( const value = await StreamToBuffer.streamToBuffer(
'testProjectId', 'testProjectId',
'testDocId', 'testDocId',
inputStream inputStream
) )
return value return value
},
}, },
} }
@ -192,9 +189,13 @@ describe('DocArchiveManager', function () {
describe('archiveDoc', function () { describe('archiveDoc', function () {
it('should resolve when passed a valid document', async function () { it('should resolve when passed a valid document', async function () {
await expect( await expect(DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id)).to
DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id) .eventually.be.fulfilled
).to.eventually.be.fulfilled })
it('should fix comment ids', async function () {
await DocArchiveManager.archiveDoc(projectId, mongoDocs[1]._id)
expect(RangeManager.fixCommentIds).to.have.been.called
}) })
it('should throw an error if the doc has no lines', async function () { it('should throw an error if the doc has no lines', async function () {
@ -202,26 +203,26 @@ describe('DocArchiveManager', function () {
doc.lines = null doc.lines = null
await expect( await expect(
DocArchiveManager.promises.archiveDoc(projectId, doc._id) DocArchiveManager.archiveDoc(projectId, doc._id)
).to.eventually.be.rejectedWith('doc has no lines') ).to.eventually.be.rejectedWith('doc has no lines')
}) })
it('should add the schema version', async function () { it('should add the schema version', async function () {
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[1]._id) await DocArchiveManager.archiveDoc(projectId, mongoDocs[1]._id)
expect(StreamUtils.ReadableString).to.have.been.calledWith( expect(StreamUtils.ReadableString).to.have.been.calledWith(
sinon.match(/"schema_v":1/) sinon.match(/"schema_v":1/)
) )
}) })
it('should calculate the hex md5 sum of the content', async function () { it('should calculate the hex md5 sum of the content', async function () {
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id) await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id)
expect(Crypto.createHash).to.have.been.calledWith('md5') expect(Crypto.createHash).to.have.been.calledWith('md5')
expect(HashUpdate).to.have.been.calledWith(archivedDocJson) expect(HashUpdate).to.have.been.calledWith(archivedDocJson)
expect(HashDigest).to.have.been.calledWith('hex') expect(HashDigest).to.have.been.calledWith('hex')
}) })
it('should pass the md5 hash to the object persistor for verification', async function () { it('should pass the md5 hash to the object persistor for verification', async function () {
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id) await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id)
expect(PersistorManager.sendStream).to.have.been.calledWith( expect(PersistorManager.sendStream).to.have.been.calledWith(
sinon.match.any, sinon.match.any,
@ -232,7 +233,7 @@ describe('DocArchiveManager', function () {
}) })
it('should pass the correct bucket and key to the persistor', async function () { it('should pass the correct bucket and key to the persistor', async function () {
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id) await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id)
expect(PersistorManager.sendStream).to.have.been.calledWith( expect(PersistorManager.sendStream).to.have.been.calledWith(
Settings.docstore.bucket, Settings.docstore.bucket,
@ -241,7 +242,7 @@ describe('DocArchiveManager', function () {
}) })
it('should create a stream from the encoded json and send it', async function () { it('should create a stream from the encoded json and send it', async function () {
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id) await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id)
expect(StreamUtils.ReadableString).to.have.been.calledWith( expect(StreamUtils.ReadableString).to.have.been.calledWith(
archivedDocJson archivedDocJson
) )
@ -253,8 +254,8 @@ describe('DocArchiveManager', function () {
}) })
it('should mark the doc as archived', async function () { it('should mark the doc as archived', async function () {
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id) await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id)
expect(MongoManager.promises.markDocAsArchived).to.have.been.calledWith( expect(MongoManager.markDocAsArchived).to.have.been.calledWith(
projectId, projectId,
mongoDocs[0]._id, mongoDocs[0]._id,
mongoDocs[0].rev mongoDocs[0].rev
@ -267,8 +268,8 @@ describe('DocArchiveManager', function () {
}) })
it('should bail out early', async function () { it('should bail out early', async function () {
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id) await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id)
expect(MongoManager.promises.getDocForArchiving).to.not.have.been.called expect(MongoManager.getDocForArchiving).to.not.have.been.called
}) })
}) })
@ -285,7 +286,7 @@ describe('DocArchiveManager', function () {
it('should return an error', async function () { it('should return an error', async function () {
await expect( await expect(
DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id) DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id)
).to.eventually.be.rejectedWith('null bytes detected') ).to.eventually.be.rejectedWith('null bytes detected')
}) })
}) })
@ -296,21 +297,19 @@ describe('DocArchiveManager', function () {
describe('when the doc is in S3', function () { describe('when the doc is in S3', function () {
beforeEach(function () { beforeEach(function () {
MongoManager.promises.findDoc = sinon MongoManager.findDoc = sinon.stub().resolves({ inS3: true, rev })
.stub()
.resolves({ inS3: true, rev })
docId = mongoDocs[0]._id docId = mongoDocs[0]._id
lines = ['doc', 'lines'] lines = ['doc', 'lines']
rev = 123 rev = 123
}) })
it('should resolve when passed a valid document', async function () { it('should resolve when passed a valid document', async function () {
await expect(DocArchiveManager.promises.unarchiveDoc(projectId, docId)) await expect(DocArchiveManager.unarchiveDoc(projectId, docId)).to
.to.eventually.be.fulfilled .eventually.be.fulfilled
}) })
it('should test md5 validity with the raw buffer', async function () { it('should test md5 validity with the raw buffer', async function () {
await DocArchiveManager.promises.unarchiveDoc(projectId, docId) await DocArchiveManager.unarchiveDoc(projectId, docId)
expect(HashUpdate).to.have.been.calledWith( expect(HashUpdate).to.have.been.calledWith(
sinon.match.instanceOf(Buffer) sinon.match.instanceOf(Buffer)
) )
@ -319,15 +318,17 @@ describe('DocArchiveManager', function () {
it('should throw an error if the md5 does not match', async function () { it('should throw an error if the md5 does not match', async function () {
PersistorManager.getObjectMd5Hash.resolves('badf00d') PersistorManager.getObjectMd5Hash.resolves('badf00d')
await expect( await expect(
DocArchiveManager.promises.unarchiveDoc(projectId, docId) DocArchiveManager.unarchiveDoc(projectId, docId)
).to.eventually.be.rejected.and.be.instanceof(Errors.Md5MismatchError) ).to.eventually.be.rejected.and.be.instanceof(Errors.Md5MismatchError)
}) })
it('should restore the doc in Mongo', async function () { it('should restore the doc in Mongo', async function () {
await DocArchiveManager.promises.unarchiveDoc(projectId, docId) await DocArchiveManager.unarchiveDoc(projectId, docId)
expect( expect(MongoManager.restoreArchivedDoc).to.have.been.calledWith(
MongoManager.promises.restoreArchivedDoc projectId,
).to.have.been.calledWith(projectId, docId, archivedDoc) docId,
archivedDoc
)
}) })
describe('when archiving is not configured', function () { describe('when archiving is not configured', function () {
@ -337,15 +338,15 @@ describe('DocArchiveManager', function () {
it('should error out on archived doc', async function () { it('should error out on archived doc', async function () {
await expect( await expect(
DocArchiveManager.promises.unarchiveDoc(projectId, docId) DocArchiveManager.unarchiveDoc(projectId, docId)
).to.eventually.be.rejected.and.match( ).to.eventually.be.rejected.and.match(
/found archived doc, but archiving backend is not configured/ /found archived doc, but archiving backend is not configured/
) )
}) })
it('should return early on non-archived doc', async function () { it('should return early on non-archived doc', async function () {
MongoManager.promises.findDoc = sinon.stub().resolves({ rev }) MongoManager.findDoc = sinon.stub().resolves({ rev })
await DocArchiveManager.promises.unarchiveDoc(projectId, docId) await DocArchiveManager.unarchiveDoc(projectId, docId)
expect(PersistorManager.getObjectMd5Hash).to.not.have.been.called expect(PersistorManager.getObjectMd5Hash).to.not.have.been.called
}) })
}) })
@ -363,10 +364,12 @@ describe('DocArchiveManager', function () {
}) })
it('should return the docs lines', async function () { it('should return the docs lines', async function () {
await DocArchiveManager.promises.unarchiveDoc(projectId, docId) await DocArchiveManager.unarchiveDoc(projectId, docId)
expect( expect(MongoManager.restoreArchivedDoc).to.have.been.calledWith(
MongoManager.promises.restoreArchivedDoc projectId,
).to.have.been.calledWith(projectId, docId, { lines, rev }) docId,
{ lines, rev }
)
}) })
}) })
@ -385,14 +388,16 @@ describe('DocArchiveManager', function () {
}) })
it('should return the doc lines and ranges', async function () { it('should return the doc lines and ranges', async function () {
await DocArchiveManager.promises.unarchiveDoc(projectId, docId) await DocArchiveManager.unarchiveDoc(projectId, docId)
expect( expect(MongoManager.restoreArchivedDoc).to.have.been.calledWith(
MongoManager.promises.restoreArchivedDoc projectId,
).to.have.been.calledWith(projectId, docId, { docId,
lines, {
ranges: { mongo: 'ranges' }, lines,
rev: 456, ranges: { mongo: 'ranges' },
}) rev: 456,
}
)
}) })
}) })
@ -406,10 +411,12 @@ describe('DocArchiveManager', function () {
}) })
it('should return only the doc lines', async function () { it('should return only the doc lines', async function () {
await DocArchiveManager.promises.unarchiveDoc(projectId, docId) await DocArchiveManager.unarchiveDoc(projectId, docId)
expect( expect(MongoManager.restoreArchivedDoc).to.have.been.calledWith(
MongoManager.promises.restoreArchivedDoc projectId,
).to.have.been.calledWith(projectId, docId, { lines, rev: 456 }) docId,
{ lines, rev: 456 }
)
}) })
}) })
@ -423,10 +430,12 @@ describe('DocArchiveManager', function () {
}) })
it('should use the rev obtained from Mongo', async function () { it('should use the rev obtained from Mongo', async function () {
await DocArchiveManager.promises.unarchiveDoc(projectId, docId) await DocArchiveManager.unarchiveDoc(projectId, docId)
expect( expect(MongoManager.restoreArchivedDoc).to.have.been.calledWith(
MongoManager.promises.restoreArchivedDoc projectId,
).to.have.been.calledWith(projectId, docId, { lines, rev }) docId,
{ lines, rev }
)
}) })
}) })
@ -441,7 +450,7 @@ describe('DocArchiveManager', function () {
it('should throw an error', async function () { it('should throw an error', async function () {
await expect( await expect(
DocArchiveManager.promises.unarchiveDoc(projectId, docId) DocArchiveManager.unarchiveDoc(projectId, docId)
).to.eventually.be.rejectedWith( ).to.eventually.be.rejectedWith(
"I don't understand the doc format in s3" "I don't understand the doc format in s3"
) )
@ -451,8 +460,8 @@ describe('DocArchiveManager', function () {
}) })
it('should not do anything if the file is already unarchived', async function () { it('should not do anything if the file is already unarchived', async function () {
MongoManager.promises.findDoc.resolves({ inS3: false }) MongoManager.findDoc.resolves({ inS3: false })
await DocArchiveManager.promises.unarchiveDoc(projectId, docId) await DocArchiveManager.unarchiveDoc(projectId, docId)
expect(PersistorManager.getObjectStream).not.to.have.been.called expect(PersistorManager.getObjectStream).not.to.have.been.called
}) })
@ -461,7 +470,7 @@ describe('DocArchiveManager', function () {
.stub() .stub()
.rejects(new Errors.NotFoundError()) .rejects(new Errors.NotFoundError())
await expect( await expect(
DocArchiveManager.promises.unarchiveDoc(projectId, docId) DocArchiveManager.unarchiveDoc(projectId, docId)
).to.eventually.be.rejected.and.be.instanceof(Errors.NotFoundError) ).to.eventually.be.rejected.and.be.instanceof(Errors.NotFoundError)
}) })
}) })
@ -469,13 +478,11 @@ describe('DocArchiveManager', function () {
describe('destroyProject', function () { describe('destroyProject', function () {
describe('when archiving is enabled', function () { describe('when archiving is enabled', function () {
beforeEach(async function () { beforeEach(async function () {
await DocArchiveManager.promises.destroyProject(projectId) await DocArchiveManager.destroyProject(projectId)
}) })
it('should delete the project in Mongo', function () { it('should delete the project in Mongo', function () {
expect(MongoManager.promises.destroyProject).to.have.been.calledWith( expect(MongoManager.destroyProject).to.have.been.calledWith(projectId)
projectId
)
}) })
it('should delete the project in the persistor', function () { it('should delete the project in the persistor', function () {
@ -489,13 +496,11 @@ describe('DocArchiveManager', function () {
describe('when archiving is disabled', function () { describe('when archiving is disabled', function () {
beforeEach(async function () { beforeEach(async function () {
Settings.docstore.backend = '' Settings.docstore.backend = ''
await DocArchiveManager.promises.destroyProject(projectId) await DocArchiveManager.destroyProject(projectId)
}) })
it('should delete the project in Mongo', function () { it('should delete the project in Mongo', function () {
expect(MongoManager.promises.destroyProject).to.have.been.calledWith( expect(MongoManager.destroyProject).to.have.been.calledWith(projectId)
projectId
)
}) })
it('should not delete the project in the persistor', function () { it('should not delete the project in the persistor', function () {
@ -506,33 +511,35 @@ describe('DocArchiveManager', function () {
describe('archiveAllDocs', function () { describe('archiveAllDocs', function () {
it('should resolve with valid arguments', async function () { it('should resolve with valid arguments', async function () {
await expect(DocArchiveManager.promises.archiveAllDocs(projectId)).to await expect(DocArchiveManager.archiveAllDocs(projectId)).to.eventually.be
.eventually.be.fulfilled .fulfilled
}) })
it('should archive all project docs which are not in s3', async function () { it('should archive all project docs which are not in s3', async function () {
await DocArchiveManager.promises.archiveAllDocs(projectId) await DocArchiveManager.archiveAllDocs(projectId)
// not inS3 // not inS3
expect(MongoManager.promises.markDocAsArchived).to.have.been.calledWith( expect(MongoManager.markDocAsArchived).to.have.been.calledWith(
projectId, projectId,
mongoDocs[0]._id mongoDocs[0]._id
) )
expect(MongoManager.promises.markDocAsArchived).to.have.been.calledWith( expect(MongoManager.markDocAsArchived).to.have.been.calledWith(
projectId, projectId,
mongoDocs[1]._id mongoDocs[1]._id
) )
expect(MongoManager.promises.markDocAsArchived).to.have.been.calledWith( expect(MongoManager.markDocAsArchived).to.have.been.calledWith(
projectId, projectId,
mongoDocs[4]._id mongoDocs[4]._id
) )
// inS3 // inS3
expect( expect(MongoManager.markDocAsArchived).not.to.have.been.calledWith(
MongoManager.promises.markDocAsArchived projectId,
).not.to.have.been.calledWith(projectId, mongoDocs[2]._id) mongoDocs[2]._id
expect( )
MongoManager.promises.markDocAsArchived expect(MongoManager.markDocAsArchived).not.to.have.been.calledWith(
).not.to.have.been.calledWith(projectId, mongoDocs[3]._id) projectId,
mongoDocs[3]._id
)
}) })
describe('when archiving is not configured', function () { describe('when archiving is not configured', function () {
@ -541,21 +548,20 @@ describe('DocArchiveManager', function () {
}) })
it('should bail out early', async function () { it('should bail out early', async function () {
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id) await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id)
expect(MongoManager.promises.getNonArchivedProjectDocIds).to.not.have expect(MongoManager.getNonArchivedProjectDocIds).to.not.have.been.called
.been.called
}) })
}) })
}) })
describe('unArchiveAllDocs', function () { describe('unArchiveAllDocs', function () {
it('should resolve with valid arguments', async function () { it('should resolve with valid arguments', async function () {
await expect(DocArchiveManager.promises.unArchiveAllDocs(projectId)).to await expect(DocArchiveManager.unArchiveAllDocs(projectId)).to.eventually
.eventually.be.fulfilled .be.fulfilled
}) })
it('should unarchive all inS3 docs', async function () { it('should unarchive all inS3 docs', async function () {
await DocArchiveManager.promises.unArchiveAllDocs(projectId) await DocArchiveManager.unArchiveAllDocs(projectId)
for (const doc of archivedDocs) { for (const doc of archivedDocs) {
expect(PersistorManager.getObjectStream).to.have.been.calledWith( expect(PersistorManager.getObjectStream).to.have.been.calledWith(
@ -571,9 +577,9 @@ describe('DocArchiveManager', function () {
}) })
it('should bail out early', async function () { it('should bail out early', async function () {
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id) await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id)
expect(MongoManager.promises.getNonDeletedArchivedProjectDocs).to.not expect(MongoManager.getNonDeletedArchivedProjectDocs).to.not.have.been
.have.been.called .called
}) })
}) })
}) })

View file

@ -17,25 +17,22 @@ describe('DocManager', function () {
this.version = 42 this.version = 42
this.MongoManager = { this.MongoManager = {
promises: { findDoc: sinon.stub(),
findDoc: sinon.stub(), getProjectsDocs: sinon.stub(),
getProjectsDocs: sinon.stub(), patchDoc: sinon.stub().resolves(),
patchDoc: sinon.stub().resolves(), upsertIntoDocCollection: sinon.stub().resolves(),
upsertIntoDocCollection: sinon.stub().resolves(),
},
} }
this.DocArchiveManager = { this.DocArchiveManager = {
promises: { unarchiveDoc: sinon.stub(),
unarchiveDoc: sinon.stub(), unArchiveAllDocs: sinon.stub(),
unArchiveAllDocs: sinon.stub(), archiveDoc: sinon.stub().resolves(),
archiveDoc: sinon.stub().resolves(),
},
} }
this.RangeManager = { this.RangeManager = {
jsonRangesToMongo(r) { jsonRangesToMongo(r) {
return r return r
}, },
shouldUpdateRanges: sinon.stub().returns(false), shouldUpdateRanges: sinon.stub().returns(false),
fixCommentIds: sinon.stub(),
} }
this.settings = { docstore: {} } this.settings = { docstore: {} }
@ -52,7 +49,7 @@ describe('DocManager', function () {
describe('getFullDoc', function () { describe('getFullDoc', function () {
beforeEach(function () { beforeEach(function () {
this.DocManager.promises._getDoc = sinon.stub() this.DocManager._getDoc = sinon.stub()
this.doc = { this.doc = {
_id: this.doc_id, _id: this.doc_id,
lines: ['2134'], lines: ['2134'],
@ -60,13 +57,10 @@ describe('DocManager', function () {
}) })
it('should call get doc with a quick filter', async function () { it('should call get doc with a quick filter', async function () {
this.DocManager.promises._getDoc.resolves(this.doc) this.DocManager._getDoc.resolves(this.doc)
const doc = await this.DocManager.promises.getFullDoc( const doc = await this.DocManager.getFullDoc(this.project_id, this.doc_id)
this.project_id,
this.doc_id
)
doc.should.equal(this.doc) doc.should.equal(this.doc)
this.DocManager.promises._getDoc this.DocManager._getDoc
.calledWith(this.project_id, this.doc_id, { .calledWith(this.project_id, this.doc_id, {
lines: true, lines: true,
rev: true, rev: true,
@ -79,27 +73,27 @@ describe('DocManager', function () {
}) })
it('should return error when get doc errors', async function () { it('should return error when get doc errors', async function () {
this.DocManager.promises._getDoc.rejects(this.stubbedError) this.DocManager._getDoc.rejects(this.stubbedError)
await expect( await expect(
this.DocManager.promises.getFullDoc(this.project_id, this.doc_id) this.DocManager.getFullDoc(this.project_id, this.doc_id)
).to.be.rejectedWith(this.stubbedError) ).to.be.rejectedWith(this.stubbedError)
}) })
}) })
describe('getRawDoc', function () { describe('getRawDoc', function () {
beforeEach(function () { beforeEach(function () {
this.DocManager.promises._getDoc = sinon.stub() this.DocManager._getDoc = sinon.stub()
this.doc = { lines: ['2134'] } this.doc = { lines: ['2134'] }
}) })
it('should call get doc with a quick filter', async function () { it('should call get doc with a quick filter', async function () {
this.DocManager.promises._getDoc.resolves(this.doc) this.DocManager._getDoc.resolves(this.doc)
const doc = await this.DocManager.promises.getDocLines( const content = await this.DocManager.getDocLines(
this.project_id, this.project_id,
this.doc_id this.doc_id
) )
doc.should.equal(this.doc) content.should.equal(this.doc.lines.join('\n'))
this.DocManager.promises._getDoc this.DocManager._getDoc
.calledWith(this.project_id, this.doc_id, { .calledWith(this.project_id, this.doc_id, {
lines: true, lines: true,
inS3: true, inS3: true,
@ -108,11 +102,46 @@ describe('DocManager', function () {
}) })
it('should return error when get doc errors', async function () { it('should return error when get doc errors', async function () {
this.DocManager.promises._getDoc.rejects(this.stubbedError) this.DocManager._getDoc.rejects(this.stubbedError)
await expect( await expect(
this.DocManager.promises.getDocLines(this.project_id, this.doc_id) this.DocManager.getDocLines(this.project_id, this.doc_id)
).to.be.rejectedWith(this.stubbedError) ).to.be.rejectedWith(this.stubbedError)
}) })
it('should return error when get doc does not exist', async function () {
this.DocManager._getDoc.resolves(null)
await expect(
this.DocManager.getDocLines(this.project_id, this.doc_id)
).to.be.rejectedWith(Errors.NotFoundError)
})
it('should return error when get doc has no lines', async function () {
this.DocManager._getDoc.resolves({})
await expect(
this.DocManager.getDocLines(this.project_id, this.doc_id)
).to.be.rejectedWith(Errors.DocWithoutLinesError)
})
})
describe('_getDoc', function () {
it('should return error when get doc does not exist', async function () {
this.MongoManager.findDoc.resolves(null)
await expect(
this.DocManager._getDoc(this.project_id, this.doc_id, { inS3: true })
).to.be.rejectedWith(Errors.NotFoundError)
})
it('should fix comment ids', async function () {
this.MongoManager.findDoc.resolves({
_id: this.doc_id,
ranges: {},
})
await this.DocManager._getDoc(this.project_id, this.doc_id, {
inS3: true,
ranges: true,
})
expect(this.RangeManager.fixCommentIds).to.have.been.called
})
}) })
describe('getDoc', function () { describe('getDoc', function () {
@ -128,26 +157,25 @@ describe('DocManager', function () {
describe('when using a filter', function () { describe('when using a filter', function () {
beforeEach(function () { beforeEach(function () {
this.MongoManager.promises.findDoc.resolves(this.doc) this.MongoManager.findDoc.resolves(this.doc)
}) })
it('should error if inS3 is not set to true', async function () { it('should error if inS3 is not set to true', async function () {
await expect( await expect(
this.DocManager.promises._getDoc(this.project_id, this.doc_id, { this.DocManager._getDoc(this.project_id, this.doc_id, {
inS3: false, inS3: false,
}) })
).to.be.rejected ).to.be.rejected
}) })
it('should always get inS3 even when no filter is passed', async function () { it('should always get inS3 even when no filter is passed', async function () {
await expect( await expect(this.DocManager._getDoc(this.project_id, this.doc_id)).to
this.DocManager.promises._getDoc(this.project_id, this.doc_id) .be.rejected
).to.be.rejected this.MongoManager.findDoc.called.should.equal(false)
this.MongoManager.promises.findDoc.called.should.equal(false)
}) })
it('should not error if inS3 is set to true', async function () { it('should not error if inS3 is set to true', async function () {
await this.DocManager.promises._getDoc(this.project_id, this.doc_id, { await this.DocManager._getDoc(this.project_id, this.doc_id, {
inS3: true, inS3: true,
}) })
}) })
@ -155,8 +183,8 @@ describe('DocManager', function () {
describe('when the doc is in the doc collection', function () { describe('when the doc is in the doc collection', function () {
beforeEach(async function () { beforeEach(async function () {
this.MongoManager.promises.findDoc.resolves(this.doc) this.MongoManager.findDoc.resolves(this.doc)
this.result = await this.DocManager.promises._getDoc( this.result = await this.DocManager._getDoc(
this.project_id, this.project_id,
this.doc_id, this.doc_id,
{ version: true, inS3: true } { version: true, inS3: true }
@ -164,7 +192,7 @@ describe('DocManager', function () {
}) })
it('should get the doc from the doc collection', function () { it('should get the doc from the doc collection', function () {
this.MongoManager.promises.findDoc this.MongoManager.findDoc
.calledWith(this.project_id, this.doc_id) .calledWith(this.project_id, this.doc_id)
.should.equal(true) .should.equal(true)
}) })
@ -177,9 +205,9 @@ describe('DocManager', function () {
describe('when MongoManager.findDoc errors', function () { describe('when MongoManager.findDoc errors', function () {
it('should return the error', async function () { it('should return the error', async function () {
this.MongoManager.promises.findDoc.rejects(this.stubbedError) this.MongoManager.findDoc.rejects(this.stubbedError)
await expect( await expect(
this.DocManager.promises._getDoc(this.project_id, this.doc_id, { this.DocManager._getDoc(this.project_id, this.doc_id, {
version: true, version: true,
inS3: true, inS3: true,
}) })
@ -202,15 +230,15 @@ describe('DocManager', function () {
version: 2, version: 2,
inS3: false, inS3: false,
} }
this.MongoManager.promises.findDoc.resolves(this.doc) this.MongoManager.findDoc.resolves(this.doc)
this.DocArchiveManager.promises.unarchiveDoc.callsFake( this.DocArchiveManager.unarchiveDoc.callsFake(
async (projectId, docId) => { async (projectId, docId) => {
this.MongoManager.promises.findDoc.resolves({ this.MongoManager.findDoc.resolves({
...this.unarchivedDoc, ...this.unarchivedDoc,
}) })
} }
) )
this.result = await this.DocManager.promises._getDoc( this.result = await this.DocManager._getDoc(
this.project_id, this.project_id,
this.doc_id, this.doc_id,
{ {
@ -221,13 +249,13 @@ describe('DocManager', function () {
}) })
it('should call the DocArchive to unarchive the doc', function () { it('should call the DocArchive to unarchive the doc', function () {
this.DocArchiveManager.promises.unarchiveDoc this.DocArchiveManager.unarchiveDoc
.calledWith(this.project_id, this.doc_id) .calledWith(this.project_id, this.doc_id)
.should.equal(true) .should.equal(true)
}) })
it('should look up the doc twice', function () { it('should look up the doc twice', function () {
this.MongoManager.promises.findDoc.calledTwice.should.equal(true) this.MongoManager.findDoc.calledTwice.should.equal(true)
}) })
it('should return the doc', function () { it('should return the doc', function () {
@ -239,9 +267,9 @@ describe('DocManager', function () {
describe('when the doc does not exist in the docs collection', function () { describe('when the doc does not exist in the docs collection', function () {
it('should return a NotFoundError', async function () { it('should return a NotFoundError', async function () {
this.MongoManager.promises.findDoc.resolves(null) this.MongoManager.findDoc.resolves(null)
await expect( await expect(
this.DocManager.promises._getDoc(this.project_id, this.doc_id, { this.DocManager._getDoc(this.project_id, this.doc_id, {
version: true, version: true,
inS3: true, inS3: true,
}) })
@ -262,23 +290,27 @@ describe('DocManager', function () {
lines: ['mock-lines'], lines: ['mock-lines'],
}, },
] ]
this.MongoManager.promises.getProjectsDocs.resolves(this.docs) this.MongoManager.getProjectsDocs.resolves(this.docs)
this.DocArchiveManager.promises.unArchiveAllDocs.resolves(this.docs) this.DocArchiveManager.unArchiveAllDocs.resolves(this.docs)
this.filter = { lines: true } this.filter = { lines: true, ranges: true }
this.result = await this.DocManager.promises.getAllNonDeletedDocs( this.result = await this.DocManager.getAllNonDeletedDocs(
this.project_id, this.project_id,
this.filter this.filter
) )
}) })
it('should get the project from the database', function () { it('should get the project from the database', function () {
this.MongoManager.promises.getProjectsDocs.should.have.been.calledWith( this.MongoManager.getProjectsDocs.should.have.been.calledWith(
this.project_id, this.project_id,
{ include_deleted: false }, { include_deleted: false },
this.filter this.filter
) )
}) })
it('should fix comment ids', async function () {
expect(this.RangeManager.fixCommentIds).to.have.been.called
})
it('should return the docs', function () { it('should return the docs', function () {
expect(this.result).to.deep.equal(this.docs) expect(this.result).to.deep.equal(this.docs)
}) })
@ -286,13 +318,10 @@ describe('DocManager', function () {
describe('when there are no docs for the project', function () { describe('when there are no docs for the project', function () {
it('should return a NotFoundError', async function () { it('should return a NotFoundError', async function () {
this.MongoManager.promises.getProjectsDocs.resolves(null) this.MongoManager.getProjectsDocs.resolves(null)
this.DocArchiveManager.promises.unArchiveAllDocs.resolves(null) this.DocArchiveManager.unArchiveAllDocs.resolves(null)
await expect( await expect(
this.DocManager.promises.getAllNonDeletedDocs( this.DocManager.getAllNonDeletedDocs(this.project_id, this.filter)
this.project_id,
this.filter
)
).to.be.rejectedWith(`No docs for project ${this.project_id}`) ).to.be.rejectedWith(`No docs for project ${this.project_id}`)
}) })
}) })
@ -303,7 +332,7 @@ describe('DocManager', function () {
beforeEach(function () { beforeEach(function () {
this.lines = ['mock', 'doc', 'lines'] this.lines = ['mock', 'doc', 'lines']
this.rev = 77 this.rev = 77
this.MongoManager.promises.findDoc.resolves({ this.MongoManager.findDoc.resolves({
_id: new ObjectId(this.doc_id), _id: new ObjectId(this.doc_id),
}) })
this.meta = {} this.meta = {}
@ -311,7 +340,7 @@ describe('DocManager', function () {
describe('standard path', function () { describe('standard path', function () {
beforeEach(async function () { beforeEach(async function () {
await this.DocManager.promises.patchDoc( await this.DocManager.patchDoc(
this.project_id, this.project_id,
this.doc_id, this.doc_id,
this.meta this.meta
@ -319,14 +348,14 @@ describe('DocManager', function () {
}) })
it('should get the doc', function () { it('should get the doc', function () {
expect(this.MongoManager.promises.findDoc).to.have.been.calledWith( expect(this.MongoManager.findDoc).to.have.been.calledWith(
this.project_id, this.project_id,
this.doc_id this.doc_id
) )
}) })
it('should persist the meta', function () { it('should persist the meta', function () {
expect(this.MongoManager.promises.patchDoc).to.have.been.calledWith( expect(this.MongoManager.patchDoc).to.have.been.calledWith(
this.project_id, this.project_id,
this.doc_id, this.doc_id,
this.meta this.meta
@ -339,7 +368,7 @@ describe('DocManager', function () {
this.settings.docstore.archiveOnSoftDelete = false this.settings.docstore.archiveOnSoftDelete = false
this.meta.deleted = true this.meta.deleted = true
await this.DocManager.promises.patchDoc( await this.DocManager.patchDoc(
this.project_id, this.project_id,
this.doc_id, this.doc_id,
this.meta this.meta
@ -347,8 +376,7 @@ describe('DocManager', function () {
}) })
it('should not flush the doc out of mongo', function () { it('should not flush the doc out of mongo', function () {
expect(this.DocArchiveManager.promises.archiveDoc).to.not.have.been expect(this.DocArchiveManager.archiveDoc).to.not.have.been.called
.called
}) })
}) })
@ -356,7 +384,7 @@ describe('DocManager', function () {
beforeEach(async function () { beforeEach(async function () {
this.settings.docstore.archiveOnSoftDelete = false this.settings.docstore.archiveOnSoftDelete = false
this.meta.deleted = false this.meta.deleted = false
await this.DocManager.promises.patchDoc( await this.DocManager.patchDoc(
this.project_id, this.project_id,
this.doc_id, this.doc_id,
this.meta this.meta
@ -364,8 +392,7 @@ describe('DocManager', function () {
}) })
it('should not flush the doc out of mongo', function () { it('should not flush the doc out of mongo', function () {
expect(this.DocArchiveManager.promises.archiveDoc).to.not.have.been expect(this.DocArchiveManager.archiveDoc).to.not.have.been.called
.called
}) })
}) })
@ -377,7 +404,7 @@ describe('DocManager', function () {
describe('when the background flush succeeds', function () { describe('when the background flush succeeds', function () {
beforeEach(async function () { beforeEach(async function () {
await this.DocManager.promises.patchDoc( await this.DocManager.patchDoc(
this.project_id, this.project_id,
this.doc_id, this.doc_id,
this.meta this.meta
@ -389,17 +416,18 @@ describe('DocManager', function () {
}) })
it('should flush the doc out of mongo', function () { it('should flush the doc out of mongo', function () {
expect( expect(this.DocArchiveManager.archiveDoc).to.have.been.calledWith(
this.DocArchiveManager.promises.archiveDoc this.project_id,
).to.have.been.calledWith(this.project_id, this.doc_id) this.doc_id
)
}) })
}) })
describe('when the background flush fails', function () { describe('when the background flush fails', function () {
beforeEach(async function () { beforeEach(async function () {
this.err = new Error('foo') this.err = new Error('foo')
this.DocArchiveManager.promises.archiveDoc.rejects(this.err) this.DocArchiveManager.archiveDoc.rejects(this.err)
await this.DocManager.promises.patchDoc( await this.DocManager.patchDoc(
this.project_id, this.project_id,
this.doc_id, this.doc_id,
this.meta this.meta
@ -422,9 +450,9 @@ describe('DocManager', function () {
describe('when the doc does not exist', function () { describe('when the doc does not exist', function () {
it('should return a NotFoundError', async function () { it('should return a NotFoundError', async function () {
this.MongoManager.promises.findDoc.resolves(null) this.MongoManager.findDoc.resolves(null)
await expect( await expect(
this.DocManager.promises.patchDoc(this.project_id, this.doc_id, {}) this.DocManager.patchDoc(this.project_id, this.doc_id, {})
).to.be.rejectedWith( ).to.be.rejectedWith(
`No such project/doc to delete: ${this.project_id}/${this.doc_id}` `No such project/doc to delete: ${this.project_id}/${this.doc_id}`
) )
@ -470,13 +498,13 @@ describe('DocManager', function () {
ranges: this.originalRanges, ranges: this.originalRanges,
} }
this.DocManager.promises._getDoc = sinon.stub() this.DocManager._getDoc = sinon.stub()
}) })
describe('when only the doc lines have changed', function () { describe('when only the doc lines have changed', function () {
beforeEach(async function () { beforeEach(async function () {
this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc) this.DocManager._getDoc = sinon.stub().resolves(this.doc)
this.result = await this.DocManager.promises.updateDoc( this.result = await this.DocManager.updateDoc(
this.project_id, this.project_id,
this.doc_id, this.doc_id,
this.newDocLines, this.newDocLines,
@ -486,7 +514,7 @@ describe('DocManager', function () {
}) })
it('should get the existing doc', function () { it('should get the existing doc', function () {
this.DocManager.promises._getDoc this.DocManager._getDoc
.calledWith(this.project_id, this.doc_id, { .calledWith(this.project_id, this.doc_id, {
version: true, version: true,
rev: true, rev: true,
@ -498,7 +526,7 @@ describe('DocManager', function () {
}) })
it('should upsert the document to the doc collection', function () { it('should upsert the document to the doc collection', function () {
this.MongoManager.promises.upsertIntoDocCollection this.MongoManager.upsertIntoDocCollection
.calledWith(this.project_id, this.doc_id, this.rev, { .calledWith(this.project_id, this.doc_id, this.rev, {
lines: this.newDocLines, lines: this.newDocLines,
}) })
@ -512,9 +540,9 @@ describe('DocManager', function () {
describe('when the doc ranges have changed', function () { describe('when the doc ranges have changed', function () {
beforeEach(async function () { beforeEach(async function () {
this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc) this.DocManager._getDoc = sinon.stub().resolves(this.doc)
this.RangeManager.shouldUpdateRanges.returns(true) this.RangeManager.shouldUpdateRanges.returns(true)
this.result = await this.DocManager.promises.updateDoc( this.result = await this.DocManager.updateDoc(
this.project_id, this.project_id,
this.doc_id, this.doc_id,
this.oldDocLines, this.oldDocLines,
@ -524,7 +552,7 @@ describe('DocManager', function () {
}) })
it('should upsert the ranges', function () { it('should upsert the ranges', function () {
this.MongoManager.promises.upsertIntoDocCollection this.MongoManager.upsertIntoDocCollection
.calledWith(this.project_id, this.doc_id, this.rev, { .calledWith(this.project_id, this.doc_id, this.rev, {
ranges: this.newRanges, ranges: this.newRanges,
}) })
@ -538,8 +566,8 @@ describe('DocManager', function () {
describe('when only the version has changed', function () { describe('when only the version has changed', function () {
beforeEach(async function () { beforeEach(async function () {
this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc) this.DocManager._getDoc = sinon.stub().resolves(this.doc)
this.result = await this.DocManager.promises.updateDoc( this.result = await this.DocManager.updateDoc(
this.project_id, this.project_id,
this.doc_id, this.doc_id,
this.oldDocLines, this.oldDocLines,
@ -549,7 +577,7 @@ describe('DocManager', function () {
}) })
it('should update the version', function () { it('should update the version', function () {
this.MongoManager.promises.upsertIntoDocCollection.should.have.been.calledWith( this.MongoManager.upsertIntoDocCollection.should.have.been.calledWith(
this.project_id, this.project_id,
this.doc_id, this.doc_id,
this.rev, this.rev,
@ -564,8 +592,8 @@ describe('DocManager', function () {
describe('when the doc has not changed at all', function () { describe('when the doc has not changed at all', function () {
beforeEach(async function () { beforeEach(async function () {
this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc) this.DocManager._getDoc = sinon.stub().resolves(this.doc)
this.result = await this.DocManager.promises.updateDoc( this.result = await this.DocManager.updateDoc(
this.project_id, this.project_id,
this.doc_id, this.doc_id,
this.oldDocLines, this.oldDocLines,
@ -575,9 +603,7 @@ describe('DocManager', function () {
}) })
it('should not update the ranges or lines or version', function () { it('should not update the ranges or lines or version', function () {
this.MongoManager.promises.upsertIntoDocCollection.called.should.equal( this.MongoManager.upsertIntoDocCollection.called.should.equal(false)
false
)
}) })
it('should return the old rev and modified == false', function () { it('should return the old rev and modified == false', function () {
@ -588,7 +614,7 @@ describe('DocManager', function () {
describe('when the version is null', function () { describe('when the version is null', function () {
it('should return an error', async function () { it('should return an error', async function () {
await expect( await expect(
this.DocManager.promises.updateDoc( this.DocManager.updateDoc(
this.project_id, this.project_id,
this.doc_id, this.doc_id,
this.newDocLines, this.newDocLines,
@ -602,7 +628,7 @@ describe('DocManager', function () {
describe('when the lines are null', function () { describe('when the lines are null', function () {
it('should return an error', async function () { it('should return an error', async function () {
await expect( await expect(
this.DocManager.promises.updateDoc( this.DocManager.updateDoc(
this.project_id, this.project_id,
this.doc_id, this.doc_id,
null, null,
@ -616,7 +642,7 @@ describe('DocManager', function () {
describe('when the ranges are null', function () { describe('when the ranges are null', function () {
it('should return an error', async function () { it('should return an error', async function () {
await expect( await expect(
this.DocManager.promises.updateDoc( this.DocManager.updateDoc(
this.project_id, this.project_id,
this.doc_id, this.doc_id,
this.newDocLines, this.newDocLines,
@ -630,9 +656,9 @@ describe('DocManager', function () {
describe('when there is a generic error getting the doc', function () { describe('when there is a generic error getting the doc', function () {
beforeEach(async function () { beforeEach(async function () {
this.error = new Error('doc could not be found') this.error = new Error('doc could not be found')
this.DocManager.promises._getDoc = sinon.stub().rejects(this.error) this.DocManager._getDoc = sinon.stub().rejects(this.error)
await expect( await expect(
this.DocManager.promises.updateDoc( this.DocManager.updateDoc(
this.project_id, this.project_id,
this.doc_id, this.doc_id,
this.newDocLines, this.newDocLines,
@ -643,16 +669,15 @@ describe('DocManager', function () {
}) })
it('should not upsert the document to the doc collection', function () { it('should not upsert the document to the doc collection', function () {
this.MongoManager.promises.upsertIntoDocCollection.should.not.have.been this.MongoManager.upsertIntoDocCollection.should.not.have.been.called
.called
}) })
}) })
describe('when the version was decremented', function () { describe('when the version was decremented', function () {
it('should return an error', async function () { it('should return an error', async function () {
this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc) this.DocManager._getDoc = sinon.stub().resolves(this.doc)
await expect( await expect(
this.DocManager.promises.updateDoc( this.DocManager.updateDoc(
this.project_id, this.project_id,
this.doc_id, this.doc_id,
this.newDocLines, this.newDocLines,
@ -665,8 +690,8 @@ describe('DocManager', function () {
describe('when the doc lines have not changed', function () { describe('when the doc lines have not changed', function () {
beforeEach(async function () { beforeEach(async function () {
this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc) this.DocManager._getDoc = sinon.stub().resolves(this.doc)
this.result = await this.DocManager.promises.updateDoc( this.result = await this.DocManager.updateDoc(
this.project_id, this.project_id,
this.doc_id, this.doc_id,
this.oldDocLines.slice(), this.oldDocLines.slice(),
@ -676,9 +701,7 @@ describe('DocManager', function () {
}) })
it('should not update the doc', function () { it('should not update the doc', function () {
this.MongoManager.promises.upsertIntoDocCollection.called.should.equal( this.MongoManager.upsertIntoDocCollection.called.should.equal(false)
false
)
}) })
it('should return the existing rev', function () { it('should return the existing rev', function () {
@ -688,8 +711,8 @@ describe('DocManager', function () {
describe('when the doc does not exist', function () { describe('when the doc does not exist', function () {
beforeEach(async function () { beforeEach(async function () {
this.DocManager.promises._getDoc = sinon.stub().resolves(null) this.DocManager._getDoc = sinon.stub().resolves(null)
this.result = await this.DocManager.promises.updateDoc( this.result = await this.DocManager.updateDoc(
this.project_id, this.project_id,
this.doc_id, this.doc_id,
this.newDocLines, this.newDocLines,
@ -699,7 +722,7 @@ describe('DocManager', function () {
}) })
it('should upsert the document to the doc collection', function () { it('should upsert the document to the doc collection', function () {
this.MongoManager.promises.upsertIntoDocCollection.should.have.been.calledWith( this.MongoManager.upsertIntoDocCollection.should.have.been.calledWith(
this.project_id, this.project_id,
this.doc_id, this.doc_id,
undefined, undefined,
@ -718,12 +741,12 @@ describe('DocManager', function () {
describe('when another update is racing', function () { describe('when another update is racing', function () {
beforeEach(async function () { beforeEach(async function () {
this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc) this.DocManager._getDoc = sinon.stub().resolves(this.doc)
this.MongoManager.promises.upsertIntoDocCollection this.MongoManager.upsertIntoDocCollection
.onFirstCall() .onFirstCall()
.rejects(new Errors.DocRevValueError()) .rejects(new Errors.DocRevValueError())
this.RangeManager.shouldUpdateRanges.returns(true) this.RangeManager.shouldUpdateRanges.returns(true)
this.result = await this.DocManager.promises.updateDoc( this.result = await this.DocManager.updateDoc(
this.project_id, this.project_id,
this.doc_id, this.doc_id,
this.newDocLines, this.newDocLines,
@ -733,7 +756,7 @@ describe('DocManager', function () {
}) })
it('should upsert the doc twice', function () { it('should upsert the doc twice', function () {
this.MongoManager.promises.upsertIntoDocCollection.should.have.been.calledWith( this.MongoManager.upsertIntoDocCollection.should.have.been.calledWith(
this.project_id, this.project_id,
this.doc_id, this.doc_id,
this.rev, this.rev,
@ -743,8 +766,7 @@ describe('DocManager', function () {
version: this.version + 1, version: this.version + 1,
} }
) )
this.MongoManager.promises.upsertIntoDocCollection.should.have.been this.MongoManager.upsertIntoDocCollection.should.have.been.calledTwice
.calledTwice
}) })
it('should return the new rev', function () { it('should return the new rev', function () {

View file

@ -14,7 +14,7 @@ describe('HttpController', function () {
max_doc_length: 2 * 1024 * 1024, max_doc_length: 2 * 1024 * 1024,
} }
this.DocArchiveManager = { this.DocArchiveManager = {
unArchiveAllDocs: sinon.stub().yields(), unArchiveAllDocs: sinon.stub().returns(),
} }
this.DocManager = {} this.DocManager = {}
this.HttpController = SandboxedModule.require(modulePath, { this.HttpController = SandboxedModule.require(modulePath, {
@ -54,15 +54,13 @@ describe('HttpController', function () {
describe('getDoc', function () { describe('getDoc', function () {
describe('without deleted docs', function () { describe('without deleted docs', function () {
beforeEach(function () { beforeEach(async function () {
this.req.params = { this.req.params = {
project_id: this.projectId, project_id: this.projectId,
doc_id: this.docId, doc_id: this.docId,
} }
this.DocManager.getFullDoc = sinon this.DocManager.getFullDoc = sinon.stub().resolves(this.doc)
.stub() await this.HttpController.getDoc(this.req, this.res, this.next)
.callsArgWith(2, null, this.doc)
this.HttpController.getDoc(this.req, this.res, this.next)
}) })
it('should get the document with the version (including deleted)', function () { it('should get the document with the version (including deleted)', function () {
@ -89,26 +87,24 @@ describe('HttpController', function () {
project_id: this.projectId, project_id: this.projectId,
doc_id: this.docId, doc_id: this.docId,
} }
this.DocManager.getFullDoc = sinon this.DocManager.getFullDoc = sinon.stub().resolves(this.deletedDoc)
.stub()
.callsArgWith(2, null, this.deletedDoc)
}) })
it('should get the doc from the doc manager', function () { it('should get the doc from the doc manager', async function () {
this.HttpController.getDoc(this.req, this.res, this.next) await this.HttpController.getDoc(this.req, this.res, this.next)
this.DocManager.getFullDoc this.DocManager.getFullDoc
.calledWith(this.projectId, this.docId) .calledWith(this.projectId, this.docId)
.should.equal(true) .should.equal(true)
}) })
it('should return 404 if the query string delete is not set ', function () { it('should return 404 if the query string delete is not set ', async function () {
this.HttpController.getDoc(this.req, this.res, this.next) await this.HttpController.getDoc(this.req, this.res, this.next)
this.res.sendStatus.calledWith(404).should.equal(true) this.res.sendStatus.calledWith(404).should.equal(true)
}) })
it('should return the doc as JSON if include_deleted is set to true', function () { it('should return the doc as JSON if include_deleted is set to true', async function () {
this.req.query.include_deleted = 'true' this.req.query.include_deleted = 'true'
this.HttpController.getDoc(this.req, this.res, this.next) await this.HttpController.getDoc(this.req, this.res, this.next)
this.res.json this.res.json
.calledWith({ .calledWith({
_id: this.docId, _id: this.docId,
@ -123,13 +119,15 @@ describe('HttpController', function () {
}) })
describe('getRawDoc', function () { describe('getRawDoc', function () {
beforeEach(function () { beforeEach(async function () {
this.req.params = { this.req.params = {
project_id: this.projectId, project_id: this.projectId,
doc_id: this.docId, doc_id: this.docId,
} }
this.DocManager.getDocLines = sinon.stub().callsArgWith(2, null, this.doc) this.DocManager.getDocLines = sinon
this.HttpController.getRawDoc(this.req, this.res, this.next) .stub()
.resolves(this.doc.lines.join('\n'))
await this.HttpController.getRawDoc(this.req, this.res, this.next)
}) })
it('should get the document without the version', function () { it('should get the document without the version', function () {
@ -154,7 +152,7 @@ describe('HttpController', function () {
describe('getAllDocs', function () { describe('getAllDocs', function () {
describe('normally', function () { describe('normally', function () {
beforeEach(function () { beforeEach(async function () {
this.req.params = { project_id: this.projectId } this.req.params = { project_id: this.projectId }
this.docs = [ this.docs = [
{ {
@ -168,10 +166,8 @@ describe('HttpController', function () {
rev: 4, rev: 4,
}, },
] ]
this.DocManager.getAllNonDeletedDocs = sinon this.DocManager.getAllNonDeletedDocs = sinon.stub().resolves(this.docs)
.stub() await this.HttpController.getAllDocs(this.req, this.res, this.next)
.callsArgWith(2, null, this.docs)
this.HttpController.getAllDocs(this.req, this.res, this.next)
}) })
it('should get all the (non-deleted) docs', function () { it('should get all the (non-deleted) docs', function () {
@ -199,7 +195,7 @@ describe('HttpController', function () {
}) })
describe('with null lines', function () { describe('with null lines', function () {
beforeEach(function () { beforeEach(async function () {
this.req.params = { project_id: this.projectId } this.req.params = { project_id: this.projectId }
this.docs = [ this.docs = [
{ {
@ -213,10 +209,8 @@ describe('HttpController', function () {
rev: 4, rev: 4,
}, },
] ]
this.DocManager.getAllNonDeletedDocs = sinon this.DocManager.getAllNonDeletedDocs = sinon.stub().resolves(this.docs)
.stub() await this.HttpController.getAllDocs(this.req, this.res, this.next)
.callsArgWith(2, null, this.docs)
this.HttpController.getAllDocs(this.req, this.res, this.next)
}) })
it('should return the doc with fallback lines', function () { it('should return the doc with fallback lines', function () {
@ -238,7 +232,7 @@ describe('HttpController', function () {
}) })
describe('with a null doc', function () { describe('with a null doc', function () {
beforeEach(function () { beforeEach(async function () {
this.req.params = { project_id: this.projectId } this.req.params = { project_id: this.projectId }
this.docs = [ this.docs = [
{ {
@ -253,10 +247,8 @@ describe('HttpController', function () {
rev: 4, rev: 4,
}, },
] ]
this.DocManager.getAllNonDeletedDocs = sinon this.DocManager.getAllNonDeletedDocs = sinon.stub().resolves(this.docs)
.stub() await this.HttpController.getAllDocs(this.req, this.res, this.next)
.callsArgWith(2, null, this.docs)
this.HttpController.getAllDocs(this.req, this.res, this.next)
}) })
it('should return the non null docs as JSON', function () { it('should return the non null docs as JSON', function () {
@ -292,7 +284,7 @@ describe('HttpController', function () {
describe('getAllRanges', function () { describe('getAllRanges', function () {
describe('normally', function () { describe('normally', function () {
beforeEach(function () { beforeEach(async function () {
this.req.params = { project_id: this.projectId } this.req.params = { project_id: this.projectId }
this.docs = [ this.docs = [
{ {
@ -304,10 +296,8 @@ describe('HttpController', function () {
ranges: { mock_ranges: 'two' }, ranges: { mock_ranges: 'two' },
}, },
] ]
this.DocManager.getAllNonDeletedDocs = sinon this.DocManager.getAllNonDeletedDocs = sinon.stub().resolves(this.docs)
.stub() await this.HttpController.getAllRanges(this.req, this.res, this.next)
.callsArgWith(2, null, this.docs)
this.HttpController.getAllRanges(this.req, this.res, this.next)
}) })
it('should get all the (non-deleted) doc ranges', function () { it('should get all the (non-deleted) doc ranges', function () {
@ -342,16 +332,17 @@ describe('HttpController', function () {
}) })
describe('when the doc lines exist and were updated', function () { describe('when the doc lines exist and were updated', function () {
beforeEach(function () { beforeEach(async function () {
this.req.body = { this.req.body = {
lines: (this.lines = ['hello', 'world']), lines: (this.lines = ['hello', 'world']),
version: (this.version = 42), version: (this.version = 42),
ranges: (this.ranges = { changes: 'mock' }), ranges: (this.ranges = { changes: 'mock' }),
} }
this.rev = 5
this.DocManager.updateDoc = sinon this.DocManager.updateDoc = sinon
.stub() .stub()
.yields(null, true, (this.rev = 5)) .resolves({ modified: true, rev: this.rev })
this.HttpController.updateDoc(this.req, this.res, this.next) await this.HttpController.updateDoc(this.req, this.res, this.next)
}) })
it('should update the document', function () { it('should update the document', function () {
@ -374,16 +365,17 @@ describe('HttpController', function () {
}) })
describe('when the doc lines exist and were not updated', function () { describe('when the doc lines exist and were not updated', function () {
beforeEach(function () { beforeEach(async function () {
this.req.body = { this.req.body = {
lines: (this.lines = ['hello', 'world']), lines: (this.lines = ['hello', 'world']),
version: (this.version = 42), version: (this.version = 42),
ranges: {}, ranges: {},
} }
this.rev = 5
this.DocManager.updateDoc = sinon this.DocManager.updateDoc = sinon
.stub() .stub()
.yields(null, false, (this.rev = 5)) .resolves({ modified: false, rev: this.rev })
this.HttpController.updateDoc(this.req, this.res, this.next) await this.HttpController.updateDoc(this.req, this.res, this.next)
}) })
it('should return a modified status', function () { it('should return a modified status', function () {
@ -394,10 +386,12 @@ describe('HttpController', function () {
}) })
describe('when the doc lines are not provided', function () { describe('when the doc lines are not provided', function () {
beforeEach(function () { beforeEach(async function () {
this.req.body = { version: 42, ranges: {} } this.req.body = { version: 42, ranges: {} }
this.DocManager.updateDoc = sinon.stub().yields(null, false) this.DocManager.updateDoc = sinon
this.HttpController.updateDoc(this.req, this.res, this.next) .stub()
.resolves({ modified: false, rev: 0 })
await this.HttpController.updateDoc(this.req, this.res, this.next)
}) })
it('should not update the document', function () { it('should not update the document', function () {
@ -410,10 +404,12 @@ describe('HttpController', function () {
}) })
describe('when the doc version are not provided', function () { describe('when the doc version are not provided', function () {
beforeEach(function () { beforeEach(async function () {
this.req.body = { version: 42, lines: ['hello world'] } this.req.body = { version: 42, lines: ['hello world'] }
this.DocManager.updateDoc = sinon.stub().yields(null, false) this.DocManager.updateDoc = sinon
this.HttpController.updateDoc(this.req, this.res, this.next) .stub()
.resolves({ modified: false, rev: 0 })
await this.HttpController.updateDoc(this.req, this.res, this.next)
}) })
it('should not update the document', function () { it('should not update the document', function () {
@ -426,10 +422,12 @@ describe('HttpController', function () {
}) })
describe('when the doc ranges is not provided', function () { describe('when the doc ranges is not provided', function () {
beforeEach(function () { beforeEach(async function () {
this.req.body = { lines: ['foo'], version: 42 } this.req.body = { lines: ['foo'], version: 42 }
this.DocManager.updateDoc = sinon.stub().yields(null, false) this.DocManager.updateDoc = sinon
this.HttpController.updateDoc(this.req, this.res, this.next) .stub()
.resolves({ modified: false, rev: 0 })
await this.HttpController.updateDoc(this.req, this.res, this.next)
}) })
it('should not update the document', function () { it('should not update the document', function () {
@ -442,13 +440,20 @@ describe('HttpController', function () {
}) })
describe('when the doc body is too large', function () { describe('when the doc body is too large', function () {
beforeEach(function () { beforeEach(async function () {
this.req.body = { this.req.body = {
lines: (this.lines = Array(2049).fill('a'.repeat(1024))), lines: (this.lines = Array(2049).fill('a'.repeat(1024))),
version: (this.version = 42), version: (this.version = 42),
ranges: (this.ranges = { changes: 'mock' }), ranges: (this.ranges = { changes: 'mock' }),
} }
this.HttpController.updateDoc(this.req, this.res, this.next) this.DocManager.updateDoc = sinon
.stub()
.resolves({ modified: false, rev: 0 })
await this.HttpController.updateDoc(this.req, this.res, this.next)
})
it('should not update the document', function () {
this.DocManager.updateDoc.called.should.equal(false)
}) })
it('should return a 413 (too large) response', function () { it('should return a 413 (too large) response', function () {
@ -462,14 +467,14 @@ describe('HttpController', function () {
}) })
describe('patchDoc', function () { describe('patchDoc', function () {
beforeEach(function () { beforeEach(async function () {
this.req.params = { this.req.params = {
project_id: this.projectId, project_id: this.projectId,
doc_id: this.docId, doc_id: this.docId,
} }
this.req.body = { name: 'foo.tex' } this.req.body = { name: 'foo.tex' }
this.DocManager.patchDoc = sinon.stub().yields(null) this.DocManager.patchDoc = sinon.stub().resolves()
this.HttpController.patchDoc(this.req, this.res, this.next) await this.HttpController.patchDoc(this.req, this.res, this.next)
}) })
it('should delete the document', function () { it('should delete the document', function () {
@ -484,11 +489,11 @@ describe('HttpController', function () {
}) })
describe('with an invalid payload', function () { describe('with an invalid payload', function () {
beforeEach(function () { beforeEach(async function () {
this.req.body = { cannot: 'happen' } this.req.body = { cannot: 'happen' }
this.DocManager.patchDoc = sinon.stub().yields(null) this.DocManager.patchDoc = sinon.stub().resolves()
this.HttpController.patchDoc(this.req, this.res, this.next) await this.HttpController.patchDoc(this.req, this.res, this.next)
}) })
it('should log a message', function () { it('should log a message', function () {
@ -509,10 +514,10 @@ describe('HttpController', function () {
}) })
describe('archiveAllDocs', function () { describe('archiveAllDocs', function () {
beforeEach(function () { beforeEach(async function () {
this.req.params = { project_id: this.projectId } this.req.params = { project_id: this.projectId }
this.DocArchiveManager.archiveAllDocs = sinon.stub().callsArg(1) this.DocArchiveManager.archiveAllDocs = sinon.stub().resolves()
this.HttpController.archiveAllDocs(this.req, this.res, this.next) await this.HttpController.archiveAllDocs(this.req, this.res, this.next)
}) })
it('should archive the project', function () { it('should archive the project', function () {
@ -532,9 +537,12 @@ describe('HttpController', function () {
}) })
describe('on success', function () { describe('on success', function () {
beforeEach(function (done) { beforeEach(async function () {
this.res.sendStatus.callsFake(() => done()) await this.HttpController.unArchiveAllDocs(
this.HttpController.unArchiveAllDocs(this.req, this.res, this.next) this.req,
this.res,
this.next
)
}) })
it('returns a 200', function () { it('returns a 200', function () {
@ -543,12 +551,15 @@ describe('HttpController', function () {
}) })
describe("when the archived rev doesn't match", function () { describe("when the archived rev doesn't match", function () {
beforeEach(function (done) { beforeEach(async function () {
this.res.sendStatus.callsFake(() => done()) this.DocArchiveManager.unArchiveAllDocs.rejects(
this.DocArchiveManager.unArchiveAllDocs.yields(
new Errors.DocRevValueError('bad rev') new Errors.DocRevValueError('bad rev')
) )
this.HttpController.unArchiveAllDocs(this.req, this.res, this.next) await this.HttpController.unArchiveAllDocs(
this.req,
this.res,
this.next
)
}) })
it('returns a 409', function () { it('returns a 409', function () {
@ -558,10 +569,10 @@ describe('HttpController', function () {
}) })
describe('destroyProject', function () { describe('destroyProject', function () {
beforeEach(function () { beforeEach(async function () {
this.req.params = { project_id: this.projectId } this.req.params = { project_id: this.projectId }
this.DocArchiveManager.destroyProject = sinon.stub().callsArg(1) this.DocArchiveManager.destroyProject = sinon.stub().resolves()
this.HttpController.destroyProject(this.req, this.res, this.next) await this.HttpController.destroyProject(this.req, this.res, this.next)
}) })
it('should destroy the docs', function () { it('should destroy the docs', function () {

View file

@ -41,7 +41,7 @@ describe('MongoManager', function () {
this.doc = { name: 'mock-doc' } this.doc = { name: 'mock-doc' }
this.db.docs.findOne = sinon.stub().resolves(this.doc) this.db.docs.findOne = sinon.stub().resolves(this.doc)
this.filter = { lines: true } this.filter = { lines: true }
this.result = await this.MongoManager.promises.findDoc( this.result = await this.MongoManager.findDoc(
this.projectId, this.projectId,
this.docId, this.docId,
this.filter this.filter
@ -70,11 +70,7 @@ describe('MongoManager', function () {
describe('patchDoc', function () { describe('patchDoc', function () {
beforeEach(async function () { beforeEach(async function () {
this.meta = { name: 'foo.tex' } this.meta = { name: 'foo.tex' }
await this.MongoManager.promises.patchDoc( await this.MongoManager.patchDoc(this.projectId, this.docId, this.meta)
this.projectId,
this.docId,
this.meta
)
}) })
it('should pass the parameter along', function () { it('should pass the parameter along', function () {
@ -104,7 +100,7 @@ describe('MongoManager', function () {
describe('with included_deleted = false', function () { describe('with included_deleted = false', function () {
beforeEach(async function () { beforeEach(async function () {
this.result = await this.MongoManager.promises.getProjectsDocs( this.result = await this.MongoManager.getProjectsDocs(
this.projectId, this.projectId,
{ include_deleted: false }, { include_deleted: false },
this.filter this.filter
@ -132,7 +128,7 @@ describe('MongoManager', function () {
describe('with included_deleted = true', function () { describe('with included_deleted = true', function () {
beforeEach(async function () { beforeEach(async function () {
this.result = await this.MongoManager.promises.getProjectsDocs( this.result = await this.MongoManager.getProjectsDocs(
this.projectId, this.projectId,
{ include_deleted: true }, { include_deleted: true },
this.filter this.filter
@ -167,7 +163,7 @@ describe('MongoManager', function () {
this.db.docs.find = sinon.stub().returns({ this.db.docs.find = sinon.stub().returns({
toArray: sinon.stub().resolves([this.doc1, this.doc2, this.doc3]), toArray: sinon.stub().resolves([this.doc1, this.doc2, this.doc3]),
}) })
this.result = await this.MongoManager.promises.getProjectsDeletedDocs( this.result = await this.MongoManager.getProjectsDeletedDocs(
this.projectId, this.projectId,
this.filter this.filter
) )
@ -203,7 +199,7 @@ describe('MongoManager', function () {
}) })
it('should upsert the document', async function () { it('should upsert the document', async function () {
await this.MongoManager.promises.upsertIntoDocCollection( await this.MongoManager.upsertIntoDocCollection(
this.projectId, this.projectId,
this.docId, this.docId,
this.oldRev, this.oldRev,
@ -223,7 +219,7 @@ describe('MongoManager', function () {
it('should handle update error', async function () { it('should handle update error', async function () {
this.db.docs.updateOne.rejects(this.stubbedErr) this.db.docs.updateOne.rejects(this.stubbedErr)
await expect( await expect(
this.MongoManager.promises.upsertIntoDocCollection( this.MongoManager.upsertIntoDocCollection(
this.projectId, this.projectId,
this.docId, this.docId,
this.rev, this.rev,
@ -235,7 +231,7 @@ describe('MongoManager', function () {
}) })
it('should insert without a previous rev', async function () { it('should insert without a previous rev', async function () {
await this.MongoManager.promises.upsertIntoDocCollection( await this.MongoManager.upsertIntoDocCollection(
this.projectId, this.projectId,
this.docId, this.docId,
null, null,
@ -254,7 +250,7 @@ describe('MongoManager', function () {
it('should handle generic insert error', async function () { it('should handle generic insert error', async function () {
this.db.docs.insertOne.rejects(this.stubbedErr) this.db.docs.insertOne.rejects(this.stubbedErr)
await expect( await expect(
this.MongoManager.promises.upsertIntoDocCollection( this.MongoManager.upsertIntoDocCollection(
this.projectId, this.projectId,
this.docId, this.docId,
null, null,
@ -266,7 +262,7 @@ describe('MongoManager', function () {
it('should handle duplicate insert error', async function () { it('should handle duplicate insert error', async function () {
this.db.docs.insertOne.rejects({ code: 11000 }) this.db.docs.insertOne.rejects({ code: 11000 })
await expect( await expect(
this.MongoManager.promises.upsertIntoDocCollection( this.MongoManager.upsertIntoDocCollection(
this.projectId, this.projectId,
this.docId, this.docId,
null, null,
@ -280,7 +276,7 @@ describe('MongoManager', function () {
beforeEach(async function () { beforeEach(async function () {
this.projectId = new ObjectId() this.projectId = new ObjectId()
this.db.docs.deleteMany = sinon.stub().resolves() this.db.docs.deleteMany = sinon.stub().resolves()
await this.MongoManager.promises.destroyProject(this.projectId) await this.MongoManager.destroyProject(this.projectId)
}) })
it('should destroy all docs', function () { it('should destroy all docs', function () {
@ -297,13 +293,13 @@ describe('MongoManager', function () {
it('should not error when the rev has not changed', async function () { it('should not error when the rev has not changed', async function () {
this.db.docs.findOne = sinon.stub().resolves({ rev: 1 }) this.db.docs.findOne = sinon.stub().resolves({ rev: 1 })
await this.MongoManager.promises.checkRevUnchanged(this.doc) await this.MongoManager.checkRevUnchanged(this.doc)
}) })
it('should return an error when the rev has changed', async function () { it('should return an error when the rev has changed', async function () {
this.db.docs.findOne = sinon.stub().resolves({ rev: 2 }) this.db.docs.findOne = sinon.stub().resolves({ rev: 2 })
await expect( await expect(
this.MongoManager.promises.checkRevUnchanged(this.doc) this.MongoManager.checkRevUnchanged(this.doc)
).to.be.rejectedWith(Errors.DocModifiedError) ).to.be.rejectedWith(Errors.DocModifiedError)
}) })
@ -311,14 +307,14 @@ describe('MongoManager', function () {
this.db.docs.findOne = sinon.stub().resolves({ rev: 2 }) this.db.docs.findOne = sinon.stub().resolves({ rev: 2 })
this.doc = { _id: new ObjectId(), name: 'mock-doc', rev: NaN } this.doc = { _id: new ObjectId(), name: 'mock-doc', rev: NaN }
await expect( await expect(
this.MongoManager.promises.checkRevUnchanged(this.doc) this.MongoManager.checkRevUnchanged(this.doc)
).to.be.rejectedWith(Errors.DocRevValueError) ).to.be.rejectedWith(Errors.DocRevValueError)
}) })
it('should return a value error if checked doc rev is NaN', async function () { it('should return a value error if checked doc rev is NaN', async function () {
this.db.docs.findOne = sinon.stub().resolves({ rev: NaN }) this.db.docs.findOne = sinon.stub().resolves({ rev: NaN })
await expect( await expect(
this.MongoManager.promises.checkRevUnchanged(this.doc) this.MongoManager.checkRevUnchanged(this.doc)
).to.be.rejectedWith(Errors.DocRevValueError) ).to.be.rejectedWith(Errors.DocRevValueError)
}) })
}) })
@ -334,7 +330,7 @@ describe('MongoManager', function () {
describe('complete doc', function () { describe('complete doc', function () {
beforeEach(async function () { beforeEach(async function () {
await this.MongoManager.promises.restoreArchivedDoc( await this.MongoManager.restoreArchivedDoc(
this.projectId, this.projectId,
this.docId, this.docId,
this.archivedDoc this.archivedDoc
@ -364,7 +360,7 @@ describe('MongoManager', function () {
describe('without ranges', function () { describe('without ranges', function () {
beforeEach(async function () { beforeEach(async function () {
delete this.archivedDoc.ranges delete this.archivedDoc.ranges
await this.MongoManager.promises.restoreArchivedDoc( await this.MongoManager.restoreArchivedDoc(
this.projectId, this.projectId,
this.docId, this.docId,
this.archivedDoc this.archivedDoc
@ -395,7 +391,7 @@ describe('MongoManager', function () {
it('throws a DocRevValueError', async function () { it('throws a DocRevValueError', async function () {
this.db.docs.updateOne.resolves({ matchedCount: 0 }) this.db.docs.updateOne.resolves({ matchedCount: 0 })
await expect( await expect(
this.MongoManager.promises.restoreArchivedDoc( this.MongoManager.restoreArchivedDoc(
this.projectId, this.projectId,
this.docId, this.docId,
this.archivedDoc this.archivedDoc

View file

@ -30,7 +30,7 @@ describe('RangeManager', function () {
}) })
describe('jsonRangesToMongo', function () { describe('jsonRangesToMongo', function () {
it('should convert ObjectIds and dates to proper objects', function () { it('should convert ObjectIds and dates to proper objects and fix comment id', function () {
const changeId = new ObjectId().toString() const changeId = new ObjectId().toString()
const commentId = new ObjectId().toString() const commentId = new ObjectId().toString()
const userId = new ObjectId().toString() const userId = new ObjectId().toString()
@ -66,7 +66,7 @@ describe('RangeManager', function () {
], ],
comments: [ comments: [
{ {
id: new ObjectId(commentId), id: new ObjectId(threadId),
op: { c: 'foo', p: 3, t: new ObjectId(threadId) }, op: { c: 'foo', p: 3, t: new ObjectId(threadId) },
}, },
], ],
@ -110,7 +110,6 @@ describe('RangeManager', function () {
return it('should be consistent when transformed through json -> mongo -> json', function () { return it('should be consistent when transformed through json -> mongo -> json', function () {
const changeId = new ObjectId().toString() const changeId = new ObjectId().toString()
const commentId = new ObjectId().toString()
const userId = new ObjectId().toString() const userId = new ObjectId().toString()
const threadId = new ObjectId().toString() const threadId = new ObjectId().toString()
const ts = new Date().toJSON() const ts = new Date().toJSON()
@ -127,7 +126,7 @@ describe('RangeManager', function () {
], ],
comments: [ comments: [
{ {
id: commentId, id: threadId,
op: { c: 'foo', p: 3, t: threadId }, op: { c: 'foo', p: 3, t: threadId },
}, },
], ],
@ -142,6 +141,7 @@ describe('RangeManager', function () {
return describe('shouldUpdateRanges', function () { return describe('shouldUpdateRanges', function () {
beforeEach(function () { beforeEach(function () {
const threadId = new ObjectId()
this.ranges = { this.ranges = {
changes: [ changes: [
{ {
@ -155,8 +155,8 @@ describe('RangeManager', function () {
], ],
comments: [ comments: [
{ {
id: new ObjectId(), id: threadId,
op: { c: 'foo', p: 3, t: new ObjectId() }, op: { c: 'foo', p: 3, t: threadId },
}, },
], ],
} }

View file

@ -1,3 +1,4 @@
const OError = require('@overleaf/o-error')
const DMP = require('diff-match-patch') const DMP = require('diff-match-patch')
const { TextOperation } = require('overleaf-editor-core') const { TextOperation } = require('overleaf-editor-core')
const dmp = new DMP() const dmp = new DMP()
@ -38,23 +39,62 @@ module.exports = {
return ops return ops
}, },
diffAsHistoryV1EditOperation(before, after) { /**
const diffs = dmp.diff_main(before, after) * @param {import("overleaf-editor-core").StringFileData} file
* @param {string} after
* @return {TextOperation}
*/
diffAsHistoryOTEditOperation(file, after) {
const beforeWithoutTrackedDeletes = file.getContent({
filterTrackedDeletes: true,
})
const diffs = dmp.diff_main(beforeWithoutTrackedDeletes, after)
dmp.diff_cleanupSemantic(diffs) dmp.diff_cleanupSemantic(diffs)
const trackedChanges = file.trackedChanges.asSorted()
let nextTc = trackedChanges.shift()
const op = new TextOperation() const op = new TextOperation()
for (const diff of diffs) { for (const diff of diffs) {
const [type, content] = diff let [type, content] = diff
if (type === this.ADDED) { if (type === this.ADDED) {
op.insert(content) op.insert(content)
} else if (type === this.REMOVED) { } else if (type === this.REMOVED || type === this.UNCHANGED) {
op.remove(content.length) while (op.baseLength + content.length > nextTc?.range.start) {
} else if (type === this.UNCHANGED) { if (nextTc.tracking.type === 'delete') {
op.retain(content.length) const untilRange = nextTc.range.start - op.baseLength
if (type === this.REMOVED) {
op.remove(untilRange)
} else if (type === this.UNCHANGED) {
op.retain(untilRange)
}
op.retain(nextTc.range.end - nextTc.range.start)
content = content.slice(untilRange)
}
nextTc = trackedChanges.shift()
}
if (type === this.REMOVED) {
op.remove(content.length)
} else if (type === this.UNCHANGED) {
op.retain(content.length)
}
} else { } else {
throw new Error('Unknown type') throw new Error('Unknown type')
} }
} }
while (nextTc) {
if (
nextTc.tracking.type !== 'delete' ||
nextTc.range.start !== op.baseLength
) {
throw new OError(
'StringFileData.trackedChanges out of sync: unexpected range after end of diff',
{ nextTc, baseLength: op.baseLength }
)
}
op.retain(nextTc.range.end - nextTc.range.start)
nextTc = trackedChanges.shift()
}
return op return op
}, },
} }

View file

@ -194,9 +194,8 @@ const DocumentManager = {
let op let op
if (type === 'history-ot') { if (type === 'history-ot') {
const file = StringFileData.fromRaw(oldLines) const file = StringFileData.fromRaw(oldLines)
const operation = DiffCodec.diffAsHistoryV1EditOperation( const operation = DiffCodec.diffAsHistoryOTEditOperation(
// TODO(24596): tc support for history-ot file,
file.getContent({ filterTrackedDeletes: true }),
newLines.join('\n') newLines.join('\n')
) )
if (operation.isNoop()) { if (operation.isNoop()) {
@ -536,11 +535,6 @@ const DocumentManager = {
if (opts.historyRangesMigration) { if (opts.historyRangesMigration) {
historyRangesSupport = opts.historyRangesMigration === 'forwards' historyRangesSupport = opts.historyRangesMigration === 'forwards'
} }
if (!Array.isArray(lines)) {
const file = StringFileData.fromRaw(lines)
// TODO(24596): tc support for history-ot
lines = file.getLines()
}
await ProjectHistoryRedisManager.promises.queueResyncDocContent( await ProjectHistoryRedisManager.promises.queueResyncDocContent(
projectId, projectId,

View file

@ -28,4 +28,19 @@ module.exports = {
// since we didn't hit the limit in the loop, the document is within the allowed length // since we didn't hit the limit in the loop, the document is within the allowed length
return false return false
}, },
/**
* @param {StringFileRawData} raw
* @param {number} maxDocLength
*/
stringFileDataContentIsTooLarge(raw, maxDocLength) {
let n = raw.content.length
if (n <= maxDocLength) return false // definitely under the limit, no need to calculate the total size
for (const tc of raw.trackedChanges ?? []) {
if (tc.tracking.type !== 'delete') continue
n -= tc.range.length
if (n <= maxDocLength) return false // under the limit now, no need to calculate the exact size
}
return true
},
} }

View file

@ -8,13 +8,14 @@ const rclient = require('@overleaf/redis-wrapper').createClient(
) )
const logger = require('@overleaf/logger') const logger = require('@overleaf/logger')
const metrics = require('./Metrics') const metrics = require('./Metrics')
const { docIsTooLarge } = require('./Limits') const { docIsTooLarge, stringFileDataContentIsTooLarge } = require('./Limits')
const { addTrackedDeletesToContent, extractOriginOrSource } = require('./Utils') const { addTrackedDeletesToContent, extractOriginOrSource } = require('./Utils')
const HistoryConversions = require('./HistoryConversions') const HistoryConversions = require('./HistoryConversions')
const OError = require('@overleaf/o-error') const OError = require('@overleaf/o-error')
/** /**
* @import { Ranges } from './types' * @import { Ranges } from './types'
* @import { StringFileRawData } from 'overleaf-editor-core/lib/types'
*/ */
const ProjectHistoryRedisManager = { const ProjectHistoryRedisManager = {
@ -180,7 +181,7 @@ const ProjectHistoryRedisManager = {
* @param {string} projectId * @param {string} projectId
* @param {string} projectHistoryId * @param {string} projectHistoryId
* @param {string} docId * @param {string} docId
* @param {string[]} lines * @param {string[] | StringFileRawData} lines
* @param {Ranges} ranges * @param {Ranges} ranges
* @param {string[]} resolvedCommentIds * @param {string[]} resolvedCommentIds
* @param {number} version * @param {number} version
@ -204,13 +205,8 @@ const ProjectHistoryRedisManager = {
'queue doc content resync' 'queue doc content resync'
) )
let content = lines.join('\n')
if (historyRangesSupport) {
content = addTrackedDeletesToContent(content, ranges.changes ?? [])
}
const projectUpdate = { const projectUpdate = {
resyncDocContent: { content, version }, resyncDocContent: { version },
projectHistoryId, projectHistoryId,
path: pathname, path: pathname,
doc: docId, doc: docId,
@ -219,17 +215,38 @@ const ProjectHistoryRedisManager = {
}, },
} }
if (historyRangesSupport) { let content = ''
projectUpdate.resyncDocContent.ranges = if (Array.isArray(lines)) {
HistoryConversions.toHistoryRanges(ranges) content = lines.join('\n')
projectUpdate.resyncDocContent.resolvedCommentIds = resolvedCommentIds if (historyRangesSupport) {
content = addTrackedDeletesToContent(content, ranges.changes ?? [])
projectUpdate.resyncDocContent.ranges =
HistoryConversions.toHistoryRanges(ranges)
projectUpdate.resyncDocContent.resolvedCommentIds = resolvedCommentIds
}
} else {
content = lines.content
projectUpdate.resyncDocContent.historyOTRanges = {
comments: lines.comments,
trackedChanges: lines.trackedChanges,
}
} }
projectUpdate.resyncDocContent.content = content
const jsonUpdate = JSON.stringify(projectUpdate) const jsonUpdate = JSON.stringify(projectUpdate)
// Do an optimised size check on the docLines using the serialised // Do an optimised size check on the docLines using the serialised
// project update length as an upper bound // project update length as an upper bound
const sizeBound = jsonUpdate.length const sizeBound = jsonUpdate.length
if (docIsTooLarge(sizeBound, lines, Settings.max_doc_length)) { if (Array.isArray(lines)) {
if (docIsTooLarge(sizeBound, lines, Settings.max_doc_length)) {
throw new OError(
'blocking resync doc content insert into project history queue: doc is too large',
{ projectId, docId, docSize: sizeBound }
)
}
} else if (
stringFileDataContentIsTooLarge(lines, Settings.max_doc_length)
) {
throw new OError( throw new OError(
'blocking resync doc content insert into project history queue: doc is too large', 'blocking resync doc content insert into project history queue: doc is too large',
{ projectId, docId, docSize: sizeBound } { projectId, docId, docSize: sizeBound }

View file

@ -28,12 +28,15 @@ services:
MOCHA_GREP: ${MOCHA_GREP} MOCHA_GREP: ${MOCHA_GREP}
NODE_ENV: test NODE_ENV: test
NODE_OPTIONS: "--unhandled-rejections=strict" NODE_OPTIONS: "--unhandled-rejections=strict"
volumes:
- ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it
depends_on: depends_on:
mongo: mongo:
condition: service_started condition: service_started
redis: redis:
condition: service_healthy condition: service_healthy
user: node user: node
entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 --
command: npm run test:acceptance command: npm run test:acceptance
@ -45,7 +48,7 @@ services:
command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs . command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs .
user: root user: root
redis: redis:
image: redis image: redis:7.4.3
healthcheck: healthcheck:
test: ping="$$(redis-cli ping)" && [ "$$ping" = 'PONG' ] test: ping="$$(redis-cli ping)" && [ "$$ping" = 'PONG' ]
interval: 1s interval: 1s

View file

@ -26,6 +26,7 @@ services:
- .:/overleaf/services/document-updater - .:/overleaf/services/document-updater
- ../../node_modules:/overleaf/node_modules - ../../node_modules:/overleaf/node_modules
- ../../libraries:/overleaf/libraries - ../../libraries:/overleaf/libraries
- ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it
working_dir: /overleaf/services/document-updater working_dir: /overleaf/services/document-updater
environment: environment:
ELASTIC_SEARCH_DSN: es:9200 ELASTIC_SEARCH_DSN: es:9200
@ -45,10 +46,11 @@ services:
condition: service_started condition: service_started
redis: redis:
condition: service_healthy condition: service_healthy
entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 --
command: npm run --silent test:acceptance command: npm run --silent test:acceptance
redis: redis:
image: redis image: redis:7.4.3
healthcheck: healthcheck:
test: ping=$$(redis-cli ping) && [ "$$ping" = 'PONG' ] test: ping=$$(redis-cli ping) && [ "$$ping" = 'PONG' ]
interval: 1s interval: 1s

View file

@ -15,6 +15,7 @@ const request = require('requestretry').defaults({
retryDelay: 10, retryDelay: 10,
}) })
const ONLY_PROJECT_ID = process.env.ONLY_PROJECT_ID
const AUTO_FIX_VERSION_MISMATCH = const AUTO_FIX_VERSION_MISMATCH =
process.env.AUTO_FIX_VERSION_MISMATCH === 'true' process.env.AUTO_FIX_VERSION_MISMATCH === 'true'
const AUTO_FIX_PARTIALLY_DELETED_DOC_METADATA = const AUTO_FIX_PARTIALLY_DELETED_DOC_METADATA =
@ -319,10 +320,12 @@ async function processProject(projectId) {
* @return {Promise<{perIterationOutOfSync: number, done: boolean}>} * @return {Promise<{perIterationOutOfSync: number, done: boolean}>}
*/ */
async function scanOnce(processed, outOfSync) { async function scanOnce(processed, outOfSync) {
const projectIds = await ProjectFlusher.promises.flushAllProjects({ const projectIds = ONLY_PROJECT_ID
limit: LIMIT, ? [ONLY_PROJECT_ID]
dryRun: true, : await ProjectFlusher.promises.flushAllProjects({
}) limit: LIMIT,
dryRun: true,
})
let perIterationOutOfSync = 0 let perIterationOutOfSync = 0
for (const projectId of projectIds) { for (const projectId of projectIds) {

View file

@ -686,4 +686,285 @@ describe('Setting a document', function () {
}) })
}) })
}) })
describe('with track changes (history-ot)', function () {
const lines = ['one', 'one and a half', 'two', 'three']
const userId = DocUpdaterClient.randomId()
const ts = new Date().toISOString()
beforeEach(function (done) {
numberOfReceivedUpdates = 0
this.newLines = ['one', 'two', 'three']
this.project_id = DocUpdaterClient.randomId()
this.doc_id = DocUpdaterClient.randomId()
this.historyOTUpdate = {
doc: this.doc_id,
op: [
{
textOperation: [
4,
{
r: 'one and a half\n'.length,
tracking: {
type: 'delete',
userId,
ts,
},
},
9,
],
},
],
v: this.version,
meta: { source: 'random-publicId' },
}
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines,
version: this.version,
otMigrationStage: 1,
})
DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, error => {
if (error) {
throw error
}
DocUpdaterClient.sendUpdate(
this.project_id,
this.doc_id,
this.historyOTUpdate,
error => {
if (error) {
throw error
}
DocUpdaterClient.waitForPendingUpdates(
this.project_id,
this.doc_id,
done
)
}
)
})
})
afterEach(function () {
MockProjectHistoryApi.flushProject.resetHistory()
MockWebApi.setDocument.resetHistory()
})
it('should record tracked changes', function (done) {
docUpdaterRedis.get(
Keys.docLines({ doc_id: this.doc_id }),
(error, data) => {
if (error) {
throw error
}
expect(JSON.parse(data)).to.deep.equal({
content: lines.join('\n'),
trackedChanges: [
{
range: {
pos: 4,
length: 15,
},
tracking: {
ts,
type: 'delete',
userId,
},
},
],
})
done()
}
)
})
it('should apply the change', function (done) {
DocUpdaterClient.getDoc(
this.project_id,
this.doc_id,
(error, res, data) => {
if (error) {
throw error
}
expect(data.lines).to.deep.equal(this.newLines)
done()
}
)
})
const cases = [
{
name: 'when resetting the content',
lines,
want: {
content: 'one\none and a half\none and a half\ntwo\nthree',
trackedChanges: [
{
range: {
pos: 'one and a half\n'.length + 4,
length: 15,
},
tracking: {
ts,
type: 'delete',
userId,
},
},
],
},
},
{
name: 'when adding content before a tracked delete',
lines: ['one', 'INSERT', 'two', 'three'],
want: {
content: 'one\nINSERT\none and a half\ntwo\nthree',
trackedChanges: [
{
range: {
pos: 'INSERT\n'.length + 4,
length: 15,
},
tracking: {
ts,
type: 'delete',
userId,
},
},
],
},
},
{
name: 'when adding content after a tracked delete',
lines: ['one', 'two', 'INSERT', 'three'],
want: {
content: 'one\none and a half\ntwo\nINSERT\nthree',
trackedChanges: [
{
range: {
pos: 4,
length: 15,
},
tracking: {
ts,
type: 'delete',
userId,
},
},
],
},
},
{
name: 'when deleting content before a tracked delete',
lines: ['two', 'three'],
want: {
content: 'one and a half\ntwo\nthree',
trackedChanges: [
{
range: {
pos: 0,
length: 15,
},
tracking: {
ts,
type: 'delete',
userId,
},
},
],
},
},
{
name: 'when deleting content after a tracked delete',
lines: ['one', 'two'],
want: {
content: 'one\none and a half\ntwo',
trackedChanges: [
{
range: {
pos: 4,
length: 15,
},
tracking: {
ts,
type: 'delete',
userId,
},
},
],
},
},
{
name: 'when deleting content immediately after a tracked delete',
lines: ['one', 'three'],
want: {
content: 'one\none and a half\nthree',
trackedChanges: [
{
range: {
pos: 4,
length: 15,
},
tracking: {
ts,
type: 'delete',
userId,
},
},
],
},
},
{
name: 'when deleting content across a tracked delete',
lines: ['onethree'],
want: {
content: 'oneone and a half\nthree',
trackedChanges: [
{
range: {
pos: 3,
length: 15,
},
tracking: {
ts,
type: 'delete',
userId,
},
},
],
},
},
]
for (const { name, lines, want } of cases) {
describe(name, function () {
beforeEach(function (done) {
DocUpdaterClient.setDocLines(
this.project_id,
this.doc_id,
lines,
this.source,
userId,
false,
(error, res, body) => {
if (error) {
return done(error)
}
this.statusCode = res.statusCode
this.body = body
done()
}
)
})
it('should update accordingly', function (done) {
docUpdaterRedis.get(
Keys.docLines({ doc_id: this.doc_id }),
(error, data) => {
if (error) {
throw error
}
expect(JSON.parse(data)).to.deep.equal(want)
done()
}
)
})
})
}
})
}) })

View file

@ -81,4 +81,88 @@ describe('Limits', function () {
}) })
}) })
}) })
describe('stringFileDataContentIsTooLarge', function () {
it('should handle small docs', function () {
expect(
this.Limits.stringFileDataContentIsTooLarge({ content: '' }, 123)
).to.equal(false)
})
it('should handle docs at the limit', function () {
expect(
this.Limits.stringFileDataContentIsTooLarge(
{ content: 'x'.repeat(123) },
123
)
).to.equal(false)
})
it('should handle docs above the limit', function () {
expect(
this.Limits.stringFileDataContentIsTooLarge(
{ content: 'x'.repeat(123 + 1) },
123
)
).to.equal(true)
})
it('should handle docs above the limit and below with tracked-deletes removed', function () {
expect(
this.Limits.stringFileDataContentIsTooLarge(
{
content: 'x'.repeat(123 + 1),
trackedChanges: [
{
range: { pos: 1, length: 1 },
tracking: {
type: 'delete',
ts: '2025-06-16T14:31:44.910Z',
userId: 'user-id',
},
},
],
},
123
)
).to.equal(false)
})
it('should handle docs above the limit and above with tracked-deletes removed', function () {
expect(
this.Limits.stringFileDataContentIsTooLarge(
{
content: 'x'.repeat(123 + 2),
trackedChanges: [
{
range: { pos: 1, length: 1 },
tracking: {
type: 'delete',
ts: '2025-06-16T14:31:44.910Z',
userId: 'user-id',
},
},
],
},
123
)
).to.equal(true)
})
it('should handle docs above the limit and with tracked-inserts', function () {
expect(
this.Limits.stringFileDataContentIsTooLarge(
{
content: 'x'.repeat(123 + 1),
trackedChanges: [
{
range: { pos: 1, length: 1 },
tracking: {
type: 'insert',
ts: '2025-06-16T14:31:44.910Z',
userId: 'user-id',
},
},
],
},
123
)
).to.equal(true)
})
})
}) })

View file

@ -15,6 +15,7 @@ describe('ProjectHistoryRedisManager', function () {
this.Limits = { this.Limits = {
docIsTooLarge: sinon.stub().returns(false), docIsTooLarge: sinon.stub().returns(false),
stringFileDataContentIsTooLarge: sinon.stub().returns(false),
} }
this.ProjectHistoryRedisManager = SandboxedModule.require(modulePath, { this.ProjectHistoryRedisManager = SandboxedModule.require(modulePath, {
@ -61,22 +62,18 @@ describe('ProjectHistoryRedisManager', function () {
}) })
it('should queue an update', function () { it('should queue an update', function () {
this.multi.rpush this.multi.rpush.should.have.been.calledWithExactly(
.calledWithExactly( `ProjectHistory:Ops:${this.project_id}`,
`ProjectHistory:Ops:${this.project_id}`, this.ops[0],
this.ops[0], this.ops[1]
this.ops[1] )
)
.should.equal(true)
}) })
it('should set the queue timestamp if not present', function () { it('should set the queue timestamp if not present', function () {
this.multi.setnx this.multi.setnx.should.have.been.calledWithExactly(
.calledWithExactly( `ProjectHistory:FirstOpTimestamp:${this.project_id}`,
`ProjectHistory:FirstOpTimestamp:${this.project_id}`, Date.now()
Date.now() )
)
.should.equal(true)
}) })
}) })
@ -118,9 +115,10 @@ describe('ProjectHistoryRedisManager', function () {
file: this.file_id, file: this.file_id,
} }
this.ProjectHistoryRedisManager.promises.queueOps this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly(
.calledWithExactly(this.project_id, JSON.stringify(update)) this.project_id,
.should.equal(true) JSON.stringify(update)
)
}) })
}) })
@ -166,9 +164,10 @@ describe('ProjectHistoryRedisManager', function () {
doc: this.doc_id, doc: this.doc_id,
} }
this.ProjectHistoryRedisManager.promises.queueOps this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly(
.calledWithExactly(this.project_id, JSON.stringify(update)) this.project_id,
.should.equal(true) JSON.stringify(update)
)
}) })
it('should queue an update with file metadata', async function () { it('should queue an update with file metadata', async function () {
@ -350,9 +349,10 @@ describe('ProjectHistoryRedisManager', function () {
doc: this.doc_id, doc: this.doc_id,
} }
this.ProjectHistoryRedisManager.promises.queueOps this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly(
.calledWithExactly(this.project_id, JSON.stringify(update)) this.project_id,
.should.equal(true) JSON.stringify(update)
)
}) })
it('should not forward ranges if history ranges support is undefined', async function () { it('should not forward ranges if history ranges support is undefined', async function () {
@ -402,9 +402,10 @@ describe('ProjectHistoryRedisManager', function () {
doc: this.doc_id, doc: this.doc_id,
} }
this.ProjectHistoryRedisManager.promises.queueOps this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly(
.calledWithExactly(this.project_id, JSON.stringify(update)) this.project_id,
.should.equal(true) JSON.stringify(update)
)
}) })
it('should pass "false" as the createdBlob field if not provided', async function () { it('should pass "false" as the createdBlob field if not provided', async function () {
@ -432,9 +433,10 @@ describe('ProjectHistoryRedisManager', function () {
doc: this.doc_id, doc: this.doc_id,
} }
this.ProjectHistoryRedisManager.promises.queueOps this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly(
.calledWithExactly(this.project_id, JSON.stringify(update)) this.project_id,
.should.equal(true) JSON.stringify(update)
)
}) })
it('should pass through the value of the createdBlob field', async function () { it('should pass through the value of the createdBlob field', async function () {
@ -463,9 +465,10 @@ describe('ProjectHistoryRedisManager', function () {
doc: this.doc_id, doc: this.doc_id,
} }
this.ProjectHistoryRedisManager.promises.queueOps this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly(
.calledWithExactly(this.project_id, JSON.stringify(update)) this.project_id,
.should.equal(true) JSON.stringify(update)
)
}) })
}) })
@ -493,8 +496,8 @@ describe('ProjectHistoryRedisManager', function () {
beforeEach(async function () { beforeEach(async function () {
this.update = { this.update = {
resyncDocContent: { resyncDocContent: {
content: 'one\ntwo',
version: this.version, version: this.version,
content: 'one\ntwo',
}, },
projectHistoryId: this.projectHistoryId, projectHistoryId: this.projectHistoryId,
path: this.pathname, path: this.pathname,
@ -516,19 +519,18 @@ describe('ProjectHistoryRedisManager', function () {
}) })
it('should check if the doc is too large', function () { it('should check if the doc is too large', function () {
this.Limits.docIsTooLarge this.Limits.docIsTooLarge.should.have.been.calledWith(
.calledWith( JSON.stringify(this.update).length,
JSON.stringify(this.update).length, this.lines,
this.lines, this.settings.max_doc_length
this.settings.max_doc_length )
)
.should.equal(true)
}) })
it('should queue an update', function () { it('should queue an update', function () {
this.ProjectHistoryRedisManager.promises.queueOps this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly(
.calledWithExactly(this.project_id, JSON.stringify(this.update)) this.project_id,
.should.equal(true) JSON.stringify(this.update)
)
}) })
}) })
@ -551,9 +553,8 @@ describe('ProjectHistoryRedisManager', function () {
}) })
it('should not queue an update if the doc is too large', function () { it('should not queue an update if the doc is too large', function () {
this.ProjectHistoryRedisManager.promises.queueOps.called.should.equal( this.ProjectHistoryRedisManager.promises.queueOps.should.not.have.been
false .called
)
}) })
}) })
@ -561,10 +562,10 @@ describe('ProjectHistoryRedisManager', function () {
beforeEach(async function () { beforeEach(async function () {
this.update = { this.update = {
resyncDocContent: { resyncDocContent: {
content: 'onedeleted\ntwo',
version: this.version, version: this.version,
ranges: this.ranges, ranges: this.ranges,
resolvedCommentIds: this.resolvedCommentIds, resolvedCommentIds: this.resolvedCommentIds,
content: 'onedeleted\ntwo',
}, },
projectHistoryId: this.projectHistoryId, projectHistoryId: this.projectHistoryId,
path: this.pathname, path: this.pathname,
@ -601,9 +602,76 @@ describe('ProjectHistoryRedisManager', function () {
}) })
it('should queue an update', function () { it('should queue an update', function () {
this.ProjectHistoryRedisManager.promises.queueOps this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly(
.calledWithExactly(this.project_id, JSON.stringify(this.update)) this.project_id,
.should.equal(true) JSON.stringify(this.update)
)
})
})
describe('history-ot', function () {
beforeEach(async function () {
this.lines = {
content: 'onedeleted\ntwo',
comments: [{ id: 'id1', ranges: [{ pos: 0, length: 3 }] }],
trackedChanges: [
{
range: { pos: 3, length: 7 },
tracking: {
type: 'delete',
userId: 'user-id',
ts: '2025-06-16T14:31:44.910Z',
},
},
],
}
this.update = {
resyncDocContent: {
version: this.version,
historyOTRanges: {
comments: this.lines.comments,
trackedChanges: this.lines.trackedChanges,
},
content: this.lines.content,
},
projectHistoryId: this.projectHistoryId,
path: this.pathname,
doc: this.doc_id,
meta: { ts: new Date() },
}
await this.ProjectHistoryRedisManager.promises.queueResyncDocContent(
this.project_id,
this.projectHistoryId,
this.doc_id,
this.lines,
this.ranges,
this.resolvedCommentIds,
this.version,
this.pathname,
true
)
})
it('should include tracked deletes in the update', function () {
this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly(
this.project_id,
JSON.stringify(this.update)
)
})
it('should check the doc length without tracked deletes', function () {
this.Limits.stringFileDataContentIsTooLarge.should.have.been.calledWith(
this.lines,
this.settings.max_doc_length
)
})
it('should queue an update', function () {
this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly(
this.project_id,
JSON.stringify(this.update)
)
}) })
}) })
}) })

View file

@ -111,6 +111,11 @@ if (settings.filestore.stores.template_files) {
keyBuilder.templateFileKeyMiddleware, keyBuilder.templateFileKeyMiddleware,
fileController.insertFile fileController.insertFile
) )
app.delete(
'/template/:template_id/v/:version/:format',
keyBuilder.templateFileKeyMiddleware,
fileController.deleteFile
)
} }
app.get( app.get(

View file

@ -5,7 +5,7 @@ const { callbackify } = require('node:util')
const safeExec = require('./SafeExec').promises const safeExec = require('./SafeExec').promises
const { ConversionError } = require('./Errors') const { ConversionError } = require('./Errors')
const APPROVED_FORMATS = ['png'] const APPROVED_FORMATS = ['png', 'jpg']
const FOURTY_SECONDS = 40 * 1000 const FOURTY_SECONDS = 40 * 1000
const KILL_SIGNAL = 'SIGTERM' const KILL_SIGNAL = 'SIGTERM'
@ -34,16 +34,14 @@ async function convert(sourcePath, requestedFormat) {
} }
async function thumbnail(sourcePath) { async function thumbnail(sourcePath) {
const width = '260x' const width = '548x'
return await convert(sourcePath, 'png', [ return await _convert(sourcePath, 'jpg', [
'convert', 'convert',
'-flatten', '-flatten',
'-background', '-background',
'white', 'white',
'-density', '-density',
'300', '300',
'-define',
`pdf:fit-page=${width}`,
`${sourcePath}[0]`, `${sourcePath}[0]`,
'-resize', '-resize',
width, width,
@ -51,16 +49,14 @@ async function thumbnail(sourcePath) {
} }
async function preview(sourcePath) { async function preview(sourcePath) {
const width = '548x' const width = '794x'
return await convert(sourcePath, 'png', [ return await _convert(sourcePath, 'jpg', [
'convert', 'convert',
'-flatten', '-flatten',
'-background', '-background',
'white', 'white',
'-density', '-density',
'300', '300',
'-define',
`pdf:fit-page=${width}`,
`${sourcePath}[0]`, `${sourcePath}[0]`,
'-resize', '-resize',
width, width,

View file

@ -150,7 +150,9 @@ async function _getConvertedFileAndCache(bucket, key, convertedKey, opts) {
let convertedFsPath let convertedFsPath
try { try {
convertedFsPath = await _convertFile(bucket, key, opts) convertedFsPath = await _convertFile(bucket, key, opts)
await ImageOptimiser.promises.compressPng(convertedFsPath) if (convertedFsPath.toLowerCase().endsWith(".png")) {
await ImageOptimiser.promises.compressPng(convertedFsPath)
}
await PersistorManager.sendFile(bucket, convertedKey, convertedFsPath) await PersistorManager.sendFile(bucket, convertedKey, convertedFsPath)
} catch (err) { } catch (err) {
LocalFileWriter.deleteFile(convertedFsPath, () => {}) LocalFileWriter.deleteFile(convertedFsPath, () => {})

View file

@ -0,0 +1,76 @@
const crypto = require('node:crypto')
class Rollout {
constructor(config) {
// The history buffer level is used to determine whether to queue changes
// in Redis or persist them directly to the chunk store.
// If defaults to 0 (no queuing) if not set.
this.historyBufferLevel = config.has('historyBufferLevel')
? parseInt(config.get('historyBufferLevel'), 10)
: 0
// The forcePersistBuffer flag will ensure the buffer is fully persisted before
// any persist operation. Set this to true if you want to make the persisted-version
// in Redis match the endVersion of the latest chunk. This should be set to true
// when downgrading from a history buffer level that queues changes in Redis
// without persisting them immediately.
this.forcePersistBuffer = config.has('forcePersistBuffer')
? config.get('forcePersistBuffer') === 'true'
: false
// Support gradual rollout of the next history buffer level
// with a percentage of projects using it.
this.nextHistoryBufferLevel = config.has('nextHistoryBufferLevel')
? parseInt(config.get('nextHistoryBufferLevel'), 10)
: null
this.nextHistoryBufferLevelRolloutPercentage = config.has(
'nextHistoryBufferLevelRolloutPercentage'
)
? parseInt(config.get('nextHistoryBufferLevelRolloutPercentage'), 10)
: 0
}
report(logger) {
logger.info(
{
historyBufferLevel: this.historyBufferLevel,
forcePersistBuffer: this.forcePersistBuffer,
nextHistoryBufferLevel: this.nextHistoryBufferLevel,
nextHistoryBufferLevelRolloutPercentage:
this.nextHistoryBufferLevelRolloutPercentage,
},
this.historyBufferLevel > 0 || this.forcePersistBuffer
? 'using history buffer'
: 'history buffer disabled'
)
}
/**
* Get the history buffer level for a project.
* @param {string} projectId
* @returns {Object} - An object containing the history buffer level and force persist buffer flag.
* @property {number} historyBufferLevel - The history buffer level to use for processing changes.
* @property {boolean} forcePersistBuffer - If true, forces the buffer to be persisted before any operation.
*/
getHistoryBufferLevelOptions(projectId) {
if (
this.nextHistoryBufferLevel > this.historyBufferLevel &&
this.nextHistoryBufferLevelRolloutPercentage > 0
) {
const hash = crypto.createHash('sha1').update(projectId).digest('hex')
const percentage = parseInt(hash.slice(0, 8), 16) % 100
// If the project is in the rollout percentage, we use the next history buffer level.
if (percentage < this.nextHistoryBufferLevelRolloutPercentage) {
return {
historyBufferLevel: this.nextHistoryBufferLevel,
forcePersistBuffer: this.forcePersistBuffer,
}
}
}
return {
historyBufferLevel: this.historyBufferLevel,
forcePersistBuffer: this.forcePersistBuffer,
}
}
}
module.exports = Rollout

Some files were not shown because too many files have changed in this diff Show more