Compare commits

...

170 commits

Author SHA1 Message Date
CloudBuild
ffbb09e1d4 auto update translation
GitOrigin-RevId: a7a660549ae9997345e376977da0880a239541ea
2025-05-05 08:06:03 +00:00
Tim Down
057f4b4bb5 Merge pull request #25113 from overleaf/td-remove-button-info-variant
Change labs/new editor buttons to be based on secondary buttons

GitOrigin-RevId: f02565e60be33ff75c217c1ea1d0f24b3b619ed4
2025-05-05 08:05:40 +00:00
Tim Down
d9587e8b06 Merge pull request #25264 from overleaf/bg-td-account-deletion-logging
Add logging for each stage of user deletion

GitOrigin-RevId: 13f9575012fcd8f166c4b14eba2ee5910658072e
2025-05-05 08:05:36 +00:00
Mathias Jakobsen
50c9de6178 Merge pull request #25262 from overleaf/mj-project-search-signup-date-promo
[web] Only show full-project-search promotion to older users

GitOrigin-RevId: 086d904d2f78c2eba30e1db37ac8eb3c606f118f
2025-05-05 08:05:31 +00:00
Mathias Jakobsen
7a072164a2 Merge pull request #25127 from overleaf/mj-full-project-search-promotion
[web] Add promotion for full-project-search

GitOrigin-RevId: e102dbf7df8b63afc592c57ebf6dafa51efdf9ff
2025-05-05 08:05:23 +00:00
Mathias Jakobsen
930401541d Merge pull request #25190 from overleaf/mj-survey-signup-limits
[web] Add options to limit survey exposure based on signup date

GitOrigin-RevId: 5719997339b5040d5cc42ffe7bee6d7b66bff12d
2025-05-05 08:05:18 +00:00
Mathias Jakobsen
081fced4bd Merge pull request #25217 from overleaf/mj-beamer-arrow-links
[web] Fix arrow movement in beamer presentation mode after link click

GitOrigin-RevId: 85701d9c918889981faa34f1adb57fb901d7b9df
2025-05-05 08:05:14 +00:00
Jimmy Domagala-Tang
22ad3a86a9 Merge pull request #25148 from overleaf/jdt-bundle-price-update
Update prices and naming for AI Assist bundle

GitOrigin-RevId: ece300a9d009a9e17594d3c052b33321c9b17f82
2025-05-02 08:06:10 +00:00
M Fahru
dd3ae65bd2 Merge pull request #25166 from overleaf/kh-cancel-subscription
[web] support canceling Stripe subscription

GitOrigin-RevId: a72ccb20fbef9b6662cdfa1dcffacbd76dcb694c
2025-05-02 08:06:02 +00:00
Eric Mc Sween
2a88d7d9c9 Merge pull request #25152 from overleaf/em-ds-mobile-app
Endpoints for DS mobile app

GitOrigin-RevId: c7cf867bde60a0293c1c9f68f5a08515d0d2e904
2025-05-02 08:05:57 +00:00
Eric Mc Sween
6f05a43f32 Merge pull request #25064 from overleaf/em-oauth-no-secret
Do not require a secret in OAuth configurations

GitOrigin-RevId: 8d67436bc9366ef5991c02e89a81dbbf573196d7
2025-05-02 08:05:52 +00:00
David
79f9957b68 Merge pull request #25164 from overleaf/dp-editor-survey
Create Editor Survey

GitOrigin-RevId: dc11ef16c0a00aa846ac7a664dd88e9531e832f2
2025-05-02 08:05:47 +00:00
David
32a8142f9c Merge pull request #25211 from overleaf/dp-review-panel-mini-click
Fix bug where clicking on comment/change in mini panel would not open full panel in new editor

GitOrigin-RevId: e7db345e01b881255a1651b37dec637f04692f3e
2025-05-02 08:05:43 +00:00
David
32b30606e5 Merge pull request #25213 from overleaf/dp-switch-to-editor
Add SwitchToEditorButton to new editor

GitOrigin-RevId: 6ea546fbc208c0a815f895c33ce4b5fe67829083
2025-05-02 08:05:38 +00:00
Tim Down
7abafb01ea Merge pull request #23940 from overleaf/td-react-18
Upgrade to React 18

GitOrigin-RevId: 9b81936e6eea2bccd97fe5c2c5841f0b946371b8
2025-05-02 08:05:29 +00:00
Brian Gough
4464320757 Merge pull request #25224 from overleaf/em-disable-chunk-buffer
Bypass chunk buffer when loading the latest chunk

GitOrigin-RevId: 98a15b496b0d52802f9b61cefb60a7b8df653fb2
2025-05-02 08:05:17 +00:00
Mathias Jakobsen
4bbd5f32b9 Merge pull request #25205 from overleaf/mj-subeqnarray
[web] Add subeqnarray support

GitOrigin-RevId: a26fe362ec6e053134f9f4454979b773cae33241
2025-05-02 08:05:13 +00:00
Mathias Jakobsen
4077486b86 Merge pull request #25178 from overleaf/mj-ide-redesign-deleted-chat-user
[web] Editor redesign: Handle deleted user in chat

GitOrigin-RevId: 905896883657d6a39fd3c0de2306af18580d1be3
2025-05-02 08:05:08 +00:00
Antoine Clausse
666481d8b2 Merge pull request #25221 from overleaf/revert-25207-ac-promisify-compile-controller-2
Revert "[web] Promisify ClsiCookieManager and CompileController (reapply and fix)"

GitOrigin-RevId: 4495b0fdee22c9c2a7eb39250aef498883389427
2025-05-01 08:06:55 +00:00
Antoine Clausse
61db35ac8f Merge pull request #25207 from overleaf/ac-promisify-compile-controller-2
[web] Promisify ClsiCookieManager and CompileController (reapply and fix)

GitOrigin-RevId: 0737f30c24bf92b33327dc7d0e015ac2cd7d751d
2025-05-01 08:06:47 +00:00
Miguel Serrano
707e197625 Merge pull request #25008 from overleaf/msm-missing-close-handler-modal
[web] Fix `cancel` button in `UnlinkUserModal`.

GitOrigin-RevId: 5b84c9593ce59c6da9aae9cf4f8e763151202d34
2025-05-01 08:06:32 +00:00
Miguel Serrano
958e05a001 [web] script to update group members via CSV (#24861)
* [web] script to update group members via CSV

GitOrigin-RevId: 973d1bdb1180af008608e14e1ff31af83e47f630
2025-05-01 08:06:27 +00:00
David
5b499efd23 Merge pull request #25128 from overleaf/dp-synctex
Add synctex controls with buttons hidden to new editor

GitOrigin-RevId: 27566210444ca6d83fef977290fa7c2700f2bb62
2025-05-01 08:06:15 +00:00
Antoine Clausse
d7d60f9d4c Merge pull request #25200 from overleaf/revert-25023-ac-promisify-compile-controller
Revert "[web] Promisify ClsiCookieManager and CompileController"

GitOrigin-RevId: 190ee8d2be23687f092e762c5199a34bcdf37cf9
2025-05-01 08:06:00 +00:00
Jakob Ackermann
7256c99e29 [clsi-cache] scale writes (#25198)
* [k8s] clsi-cache: increase the number of workers

* [clsi-cache] add a global limit on concurrent writes

* [k8s] clsi-cache: increase timeouts for health checks

* [k8s] clsi-cache: align resource requests with current usage

GitOrigin-RevId: 2aba881ac0e581aa8db78a30d2c58afee6702318
2025-05-01 08:05:55 +00:00
Antoine Clausse
a8d6055b4e [web] Migrate user-activate module to BS5 (#25174)
* Revert-me: Add `user-activate` to SAAS modules

* Migrate user-activate module to BS5

* Add loading state to button

* Revert "Revert-me: Add `user-activate` to SAAS modules"

This reverts commit 0584005953bf470ab21697e5c5448c566d95ca5d.

* Remove `bootstrap5PageStatus` var in register.pug

GitOrigin-RevId: 45fffc902e69a0b8f6e2a1a9c0381c9e844fafca
2025-05-01 08:05:51 +00:00
Antoine Clausse
c51d6f46d4 Add script to remove emails with commas, taken from a CSV file (#25107)
* Add script to remove emails with commas and replace them emails to support with encoded former emails

* Enhance RemoveEmailsWithCommasScriptTest to verify unchanged user data

GitOrigin-RevId: 6961995f2a143ac1c53bc2eeb183808a4be7dd02
2025-05-01 08:05:46 +00:00
Antoine Clausse
73476180d4 [web] Promisify ClsiCookieManager and CompileController (#25023)
* Refactor `ClsiCookieManager` to use async/await

* Refactor `CompileController` to use async/await

* Fix tests: CompileControllerTests.js

* Fix tests: ClsiCookieManagerTests.js

* Fixup: keep old object key (`serverId`->`clsiServerId`)

* Prefix non-express methods with an underscore

* Use async versions of `rclient` methods

* Fix: `canContinue` must be `false` when rate limited (!!)

* Remove unused `ClsiCookieManager.getCookieJar`

* Remove bad comments

* Fix linting

* Replace `request` by `@overleaf/fetch-utils`

* Replace `callsFake` by `resolves`

* Update `catch` block after request->fetch update: check status code

* Re-add timeout of 30s (was removed in 3df75ab5ccc8a63bd69a6a6e6052ef0451b76220)

* `npm run format:fix`

* Don't throw on 4xx errors; keep functionality similar to the current implementation using `request`.

GitOrigin-RevId: ef161f6c252d13f407e9cf28029b62061d6b992f
2025-05-01 08:05:42 +00:00
CloudBuild
b3cc1fa582 auto update translation
GitOrigin-RevId: 07c6f33cf063a2ebf52304afd29adc807a7ca989
2025-05-01 08:05:33 +00:00
Brian Gough
19a804d5bf Merge pull request #25147 from overleaf/bg-history-buffer-use-persist-time
use persist time in history buffer

GitOrigin-RevId: 881c42f86c6cd3cc2ea8373af4371ccc1a89e9ed
2025-05-01 08:05:21 +00:00
Jakob Ackermann
478e264817 [fetch-utils] fix leak of abort event handlers in AbortSignal (#25172)
GitOrigin-RevId: 992496010eb1cbe571b2e87fab8e7227b0d64538
2025-04-30 08:06:05 +00:00
Rebeka Dekany
df3d9099b6 Add aria-label to indicate the current PDF page number to screen readers (#25034)
GitOrigin-RevId: c2d64928378c9919f8ab8480559418cc52338854
2025-04-30 08:05:31 +00:00
Rebeka Dekany
2731ffaf10 Make editor popover toolbar keyboard focusable (#25169)
* Remove redundant class conflicting with focus styling

* Make the toolbar in the popover focusable via keyboard

* Focus to the first context menu item via keyboard only

GitOrigin-RevId: 7d3e2af4ba96654b5b2312b3999483c2a439b406
2025-04-30 08:05:25 +00:00
Alf Eaton
14c82ac94d Merge pull request #25149 from overleaf/ae-disable-toggletabfocusmode
Disable toggleTabFocusMode

GitOrigin-RevId: 7f9f9a1a57e6656a3bd03c1986ae6e4d3be71f84
2025-04-30 08:05:13 +00:00
Alf Eaton
52e6a216f4 Set will-change: transform on PDF canvas container (#25153)
GitOrigin-RevId: 316a99ac99cbff321fc2ae5a737d0aaf134ff775
2025-04-30 08:05:09 +00:00
ilkin-overleaf
1a8c549389 Merge pull request #24412 from overleaf/ii-flexible-licensing-manually-collected-2
[web] Add seats feature for manually collected subscriptions

GitOrigin-RevId: f7cc6f8ce17163f10e175a06bb471de6e3a96e3c
2025-04-30 08:05:00 +00:00
Liangjun Song
62760a9bf5 Merge pull request #25117 from overleaf/ls-map-stripe-product-to-recurly-plan-code
Use metadata to map Stripe product to Recurly plan code

GitOrigin-RevId: 775eb39cedff81985fc72cb14d411575231ade8f
2025-04-29 08:06:28 +00:00
Tim Down
5d78229e1e Merge pull request #25093 from overleaf/td-upgrade-react-error-boundary-second-attempt
Upgrade react-error-boundary to version 5, second attempt

GitOrigin-RevId: 2b88334b66f0ace383211c147279ff88e9f956bb
2025-04-29 08:06:23 +00:00
Mathias Jakobsen
d7bd665bee Merge pull request #25060 from overleaf/mj-core-pug-bs5
[web] Convert various pug pages to BS5

GitOrigin-RevId: f42de89eca778db19d5faa8a19fa8a210c7a749b
2025-04-29 08:06:01 +00:00
Mathias Jakobsen
447be67f78 Merge pull request #25118 from overleaf/mj-coloneq
[web] Redefine coloneq to coloneqq in mathjax

GitOrigin-RevId: 1797acff022d45e89bcec518905cf53cbc45825e
2025-04-29 08:05:57 +00:00
Mathias Jakobsen
5fec16153b Merge pull request #25119 from overleaf/mj-ide-hide-project-search
[web] Hide full project search button in new editor

GitOrigin-RevId: badbed06ab311f63e18a3687771d209e7c853d42
2025-04-29 08:05:52 +00:00
Domagoj Kriskovic
53c34b5726 Add "referrer" tracking to payment flows for AI assist (#25002)
* Add "referrer" tracking to payment flows for AI assist

* move sendMB call

* fix conflict

GitOrigin-RevId: 0c4480816d5fe525a87223c0b1827093d853d474
2025-04-29 08:05:41 +00:00
Andrew Rumble
2b49653f21 Improve logging
This includes setting up the verbose mode appropriately

GitOrigin-RevId: 6d7499467ae1ca80ca88963b14360931c7eb35e6
2025-04-29 08:05:36 +00:00
Andrew Rumble
a0aa6b9cc7 Use correct identifier for updating history id
GitOrigin-RevId: d95c6e85a8fd530189c14e36fed1112a51fb8609
2025-04-29 08:05:32 +00:00
Andrew Rumble
f5a89cc38f Add a script for finding projects missing a history id in Redis
GitOrigin-RevId: 3606e82c8e0fc8e1118cd6ce1981610ec20b73af
2025-04-29 08:05:27 +00:00
Andrew Rumble
2c3eed8d96 Use assert.fail instead of comparing false and true
GitOrigin-RevId: c6cbaf0a424771cf2a037366fccd790a13d9b1bb
2025-04-29 08:05:23 +00:00
Andrew Rumble
2ad9f36706 Promisify tests
GitOrigin-RevId: 6f413f4c5ef8d034b4e94afacdf2d7b43c3a8830
2025-04-29 08:05:18 +00:00
Brian Gough
850da34778 Merge pull request #25086 from overleaf/bg-history-buffer-use-expire-time
add expire time to redis buffer in history-v1

GitOrigin-RevId: 3d74957c341e62e536dc60869a7ca71ac173e380
2025-04-29 08:05:14 +00:00
Kristina
23c1a0ba4d Merge pull request #25082 from overleaf/kh-prevent-pausing-and-group-plans
[web] prevent pausing or upgrading to group for Stripe subscriptions

GitOrigin-RevId: 4d194339282d8bc165ffa1b89e8e1cf298c2d343
2025-04-29 08:05:09 +00:00
Rebeka Dekany
71094cb283 Fix the React version of Facebook logo (#25070)
GitOrigin-RevId: 8357f6d3fef31fdea9d5d77b11093946a4a7ceba
2025-04-28 08:06:11 +00:00
Domagoj Kriskovic
35722acb3d Update review panel entry header user width in mini view (#25027)
GitOrigin-RevId: cbb62889f58b913e874fb947b95c16b0e0882671
2025-04-28 08:05:56 +00:00
Domagoj Kriskovic
8870aa6e63 Fix AI assist links based on annual/monthly switch (#25052)
* Fix AI assist links based on annual/monthly switch

* use translations for annually discount

GitOrigin-RevId: eaf10720eb162c8ecbd891e8f73475db0c02b9f9
2025-04-28 08:05:51 +00:00
Domagoj Kriskovic
5a4cf8a003 Refactor Writefull init to always include overleafLabels (#25055)
GitOrigin-RevId: 416b2d1ea3a24e7c879779078e4f2a13ca6c3555
2025-04-28 08:05:47 +00:00
Alf Eaton
c732a02b38 Use toast notifications for SyncTeX errors and handle missing file errors (#24579)
GitOrigin-RevId: 88c6658ff0d11fdb43cef19c48b542a3b2206666
2025-04-28 08:05:42 +00:00
Alf Eaton
c6ac06b51c Start adding client-side word count (#24892)
GitOrigin-RevId: 6c17d7bf7095794c003e17939a8302fc6b059262
2025-04-28 08:05:38 +00:00
Alf Eaton
c378f0961c Add footnote and endnote commands to the LaTeX grammar (#24939)
GitOrigin-RevId: f0342156fa22a1bcf73ae2e8afdcc6f330ba5d37
2025-04-28 08:05:33 +00:00
Jakob Ackermann
a9780ccf96 [clsi] merge sandboxed compiles config from Server Pro and SaaS (#25062)
* [clsi] merge sandboxed compiles config from Server Pro and SaaS

* [clsi] reorder fallback env vars

Co-authored-by: Mathew Evans <matt.evans@overleaf.com>

* [server-pro] bump version of expected release with these changes

---------

Co-authored-by: Mathew Evans <matt.evans@overleaf.com>
GitOrigin-RevId: bada93fec89bcc3f2bab85b6e60b2e27de88b9c2
2025-04-28 08:05:21 +00:00
Alf Eaton
247b4e274d Upgrade PDF.js to v5 (#24948)
* Reapply "Upgrade PDF.js to v5 (#24646)" (#24946)
* Upgrade core-js, caniuse-lite and babel

GitOrigin-RevId: 63398189301b5f5adc8a17b332d92dccfc26d612
2025-04-28 08:05:09 +00:00
Alf Eaton
9d290ae234 Add polyfills for AbortSignal.any and AbortSignal.timeout (#24958)
GitOrigin-RevId: d0fc041054e17f50b5b19343e06e857bd9635902
2025-04-28 08:05:04 +00:00
Brian Gough
ed9844b2ec Merge pull request #25088 from overleaf/bg-increase-verify-blob-timeout
increase script timeout to avoid flaky tests in backupVerifier

GitOrigin-RevId: 4a52178b0e03ca9a69f82f7ddfe866ce689592b7
2025-04-25 08:05:53 +00:00
M Fahru
87bca3601d Merge pull request #25065 from overleaf/mf-schedule-refresh-features-stripe
[web] Refresh user features for stripe subscripton

GitOrigin-RevId: e0600e80bfc264f2c0229090f0d5ff7ef845d28e
2025-04-25 08:05:49 +00:00
M Fahru
bb3a123b8d Merge pull request #25041 from overleaf/mf-update-careers-footer-link
[web] Update careers footer link

GitOrigin-RevId: 25b797b16fa5829ec970b2854f93c6ab270ecf24
2025-04-25 08:05:41 +00:00
Jakob Ackermann
86d310c741 [web] clsi-cache: fix download of .blg files (#25083)
GitOrigin-RevId: 69c8f789b8f8fa4b241c7563722e9a1cb6f86244
2025-04-25 08:05:37 +00:00
Jakob Ackermann
8ed650f57a [web] make clsi-cache a premium feature (#25075)
* [web] stopOnFirstError=true does not conflict with =false locally

Allow stopOnFirstError to be enabled in the compile from cache and
disabled locally.
Compiles that passed with stopOnFirstError=true will also pass with
stopOnFirstError=false. The inverse does not hold, and we need to
recompile.

* [web] record event when using compile from cache

* [web] record event when falling back to clsi-cache

* [web] make clsi-cache a premium feature

* [k8s] clsi-cache: increase disk size for beta rollout

NOTE: As this is a premium feature and paid servers run in zones c+d, we
do not need to scale up clsi-cache in zone b for now.

* [web] enable full sampling of compile-result-backend events

* [web] fix frontend tests

* [web] be more verbose when determining access to clsi-cache feature

GitOrigin-RevId: 6fd663e16085187876eb225f7f33eeeaf69d2b2a
2025-04-25 08:05:33 +00:00
Antoine Clausse
13270dee2d [web] Tear down sidebar-navigation-ui-update, Update project-list look in SP/CE (#24920)
* Remove hacks that conditionally hid `ds-nav` survey

* Remove `getAssignment` of `sidebar-navigation-ui-update`

* Remove `hasDsNav`: make it true everywhere

* Remove dead code

* Update Footer so thin footer is shown in SP/CE

* Run `web$ make cleanup_unused_locales` & `bin/run web npm run extract-translations`

* [server-pro] fix learn wiki tests following DS navigation changes

* [server-pro] tests: remove logout action before switching session

* [server-pro] tests: fix logout test

* [server-pro] tests: use new css class for sidebar on project dashboard

* Revert "should add a documentation entry to the nav bar" test change

---------

Co-authored-by: Jakob Ackermann <jakob.ackermann@overleaf.com>
GitOrigin-RevId: 93eb7a1b03bb4e54ad1770150d83778b8f7f6727
2025-04-25 08:05:29 +00:00
Tim Down
cf36767f03 Merge pull request #25080 from overleaf/revert-25073-td-upgrade-react-error-boundary
Revert "Upgrade react-error-boundary to version 5"

GitOrigin-RevId: 3301adb508eed26e4429c6aa912abf9e81c1d9f8
2025-04-25 08:05:25 +00:00
Tim Down
247f04557c Merge pull request #25073 from overleaf/td-upgrade-react-error-boundary
Upgrade react-error-boundary to version 5

GitOrigin-RevId: ebccd35e10084aa221c437c09ddfdb86f6272cf3
2025-04-25 08:05:20 +00:00
Rebeka Dekany
3c154955b2 [web] Ensure buttons and links have discernible text on the editor page (#25005)
* Use OLIconButton for buttons lacking visible text

* Ensure correct ARIA attr for the Layout dropdown

* Add a tooltip to Layout button

* Add "Open dev tool" aria-label

* Add accessible names to the rail tab items

* Remove unused IconProps export

GitOrigin-RevId: 185937384cf5ec87b32238111d6621ac07789fb4
2025-04-25 08:05:16 +00:00
Brian Gough
612981bedb Merge pull request #24993 from overleaf/bg-history-buffer-use-cache-in-persist-changes
use chunkBuffer in persistChanges

GitOrigin-RevId: dd4cdf39ba53c3becf306119fed7eacfe67de15d
2025-04-25 08:05:12 +00:00
Brian Gough
a7466a7291 Merge pull request #24966 from overleaf/bg-history-buffer-optimised-get
add getCurrentChunkIfValid function

GitOrigin-RevId: e947a99ac928b58048a87cea0be1da34fcf3a9f8
2025-04-25 08:05:04 +00:00
Tim Down
fb50d429b4 Merge pull request #24965 from overleaf/td-downshift-9-upgrade
Upgrade Downshift to version 9

GitOrigin-RevId: b36904ab0c82c09a633a25cd6fed651d7c8b19f7
2025-04-24 08:06:24 +00:00
Antoine Clausse
8ec9cd21b4 [latexqc] Update vite version (#25056)
Fixes https://github.com/overleaf/internal/security/dependabot/1398

GitOrigin-RevId: e759d529609df79c63e0c6c45cda941d45fe7ec8
2025-04-24 08:06:19 +00:00
Antoine Clausse
5861e4160c [latexqc] Continue the ESM migration (#24743)
* Set `"type": "module"` in package.json

* Update imports to include file extensions

* Update imports to include file extensions in test files

* Convert webpack files to ESM

* Update configureStore.js for ESM

* Make static-assets CJS, and rename with .cjs

* Misc: update imports to `node:path`

* Add vitest; Remove chai, mocha, sinon, expect, jsdom

* Return promises in conversion-controller, to make testing easier

* Update tests to vitest syntax

* Fix JSON import syntax and babel config

* Import combobo from NPM

* Rename `app.js` to `app.cjs`

This should prevent errors in the production app (https://console.cloud.google.com/cloud-build/builds;region=us-east1/8b42465c-0d07-4a08-b856-aa18c13fae46?project=overleaf-ops)

### Before

```
node@ea395ce612a8:/overleaf/services/latexqc$ node app.js
[...]
ReferenceError: require is not defined in ES module scope, you can use import instead
This file is being treated as an ES module because it has a '.js' file extension and '/overleaf/services/latexqc/package.json' contains "type": "module". To treat it as a CommonJS script, rename it to use the '.cjs' file extension.
    at file:///overleaf/services/latexqc/app.js:2:1
    at ModuleJob.run (node:internal/modules/esm/module_job:234:25)
    at async ModuleLoader.import (node:internal/modules/esm/loader:473:24)
    at async asyncRunEntryPointWithESMLoader (node:internal/modules/run_main:122:5)
```

### After

```
node@ea395ce612a8:/overleaf/services/latexqc$ node app.cjs
--------------------------
===>  😊 Starting Server . . .
===>  Environment: production
===>  Listening on port: 8082
--------------------------
```

* Rename `app.js` to `app.cjs` (Fixup forgotten one)

* Rename `app.js` to `app.cjs` (Fixup 2)

* Rename asset files to `.js`

Fixes:
```
Refused to execute script from 'https://staging-latexqc.ieee.org/assets/9d6cc24692a7f912ff06.cjs' because its MIME type ('application/octet-stream') is not executable, and strict MIME type checking is enabled.
```
GitOrigin-RevId: f0b618f7fc4062fb6bdc3779dfc5defc5f72d614
2025-04-24 08:06:14 +00:00
M Fahru
ec763c69a7 Merge pull request #24801 from overleaf/mf-swap-trial-cta-tear-down
[web] Tear down `swap-trial-cta` and use the `enabled` variant as the default

GitOrigin-RevId: 2ba2f5c864908871797420aff976918a4e224ad3
2025-04-24 08:06:06 +00:00
David
1cfd5ca948 Merge pull request #24950 from overleaf/dp-writeful-editor-switch
Expose isNewEditor in window.overleaf.unstable.store for use by writefull

GitOrigin-RevId: be68b3bc62ea1bfb631f349475888b1153e47cfd
2025-04-24 08:05:58 +00:00
Brian Gough
12b96e40a5 Merge pull request #24977 from overleaf/bg-history-buffer-improve-tests
add more tests for chunk buffer in history-v1

GitOrigin-RevId: 3cfa2492efd67597a2782ca7a5671889a67049d5
2025-04-24 08:05:53 +00:00
Brian Gough
626416ed02 Merge pull request #24945 from overleaf/bg-redis-buffer-stats-script
add script for listing buffer stats from redis

GitOrigin-RevId: 7836563d51a5c6ded264d0e709d5cfcda70596e3
2025-04-24 08:05:49 +00:00
Kristina
6166a51552 Merge pull request #24922 from overleaf/kh-add-customer-portal-links
[web] add stripe customer portal link

GitOrigin-RevId: 6baaf51d4dd89ef779229ad17603529db06cf396
2025-04-24 08:05:44 +00:00
Kristina
6f1f1ba744 Merge pull request #24848 from overleaf/mf-save-stripe-checkout-data-to-mongo-db
[web] Save stripe checkout data to mongodb

GitOrigin-RevId: 537778a041f92f43ccf6455c29a56c7a961ce765
2025-04-24 08:05:40 +00:00
Jakob Ackermann
dd3956f5f4 [document-updater] delete dead code for handling JSON documents (#25036)
GitOrigin-RevId: 59a2c5f0174cd041ebda1ea4898114e6fb6d41a5
2025-04-24 08:05:35 +00:00
David
52898ac83b Merge pull request #25031 from overleaf/dp-page-number-color
Fix pdf page number color in safari

GitOrigin-RevId: 14745d50a16df1aa5129b068d5dcf3ae5bc81e8b
2025-04-24 08:05:27 +00:00
Jakob Ackermann
fe1129c2cf [web] avoid accessing the sharejs snapshot directly (#25037)
* [web] avoid accessing the sharejs snapshot directly

* [web] limit API interface for sharejs types

GitOrigin-RevId: faece372128e4580376e32fa93aa8fedf1e02957
2025-04-24 08:05:15 +00:00
Jakob Ackermann
7f086b21c8 [document-updater] modernize ApplyingUpdatesToADocTests (#25038)
- use beforeEach to ensure tests do not interfere with each other
  Notably, the 'when the ops come in a single linear order' test suite
  had state-changing tests that were dependent on the correct order.
  Also, the assigment of 'this.firstOpTimestamp' was in a test.
- consolidate populating project and doc ids
  The doc reference in this.update was undefined.
- fix doc reference in updates
  There were two misuses of 'doc_id' instead of 'doc'.
- Move mocking of MockWebApi.getDocument to the top and use
  sinon.resetHistory() or sinon.restore() for controlling the stub.
- Add another test for simple transforming of updates
  See 'when another client is sending a concurrent update'.

GitOrigin-RevId: 61ca8a1b0172920ad6ab1b604a9b9259cebddaad
2025-04-24 08:05:10 +00:00
Jimmy Domagala-Tang
b225b55e8d Merge pull request #24990 from overleaf/dk-paywall-modal-updates
Updates to AI assist interstitial modal

GitOrigin-RevId: d2365aaf36f1f61a39a0bd52357f4518736e2a7d
2025-04-23 08:05:35 +00:00
Jimmy Domagala-Tang
f95bf41824 Merge pull request #24697 from overleaf/jdt-show-addons-via-wf
Display When Ai Assist Is Granted Via Writefull

GitOrigin-RevId: 91f6e1843e2e1d1f7b3a49d95f31603e838c5545
2025-04-23 08:05:30 +00:00
David
d492512d9e Merge pull request #24878 from overleaf/mj-editor-redesign-experiment
[web] Move editor redesign to labs experiment

GitOrigin-RevId: 5f11ff29a6c392ff6e448a16450e2e65a2574097
2025-04-23 08:05:22 +00:00
Andrew Rumble
1c672e55f5 Minor bump of webpack and @pmmmwh/react-refresh-webpack-plugin
GitOrigin-RevId: 55421a1cd66e0ec7177470f38fe9f575837b6bac
2025-04-23 08:05:11 +00:00
David
9d858dcf0f Merge pull request #24961 from overleaf/dp-back-to-editor
Update toolbar in history view to add BackToEditorButton

GitOrigin-RevId: 4c260126a373b1b3a7c31f9f4b44f6ae7fba6f36
2025-04-23 08:05:06 +00:00
David
584db6c301 Merge pull request #24989 from overleaf/mj-ide-redesign-modal-content
[web] Update copy in editor redesign switcher modal

GitOrigin-RevId: 9df3d6d580fec2540d0254e1f3a65b3b059bc63f
2025-04-23 08:05:01 +00:00
M Fahru
a29280a1fe Merge pull request #24833 from overleaf/kh-add-stripe-get-subscription
[web] fetch Stripe subscription

GitOrigin-RevId: bffc31224aece584f4f1e3294bb1285d17f99195
2025-04-18 08:05:44 +00:00
Eric Mc Sween
315bde6f1b Merge pull request #24968 from overleaf/em-chunks-concurrency-postgres
Handle concurrency during chunk extension in the Postgres backend

GitOrigin-RevId: fd706b73deacf141cbd478d3ed47f298e6c6db72
2025-04-18 08:05:37 +00:00
Eric Mc Sween
2256697323 Merge pull request #24967 from overleaf/em-chunks-concurrency-pg-migration
Add closed column to PostgreSQL chunks

GitOrigin-RevId: 6babf7e94936ebfac31650aa7f190630e3288bbf
2025-04-18 08:05:29 +00:00
Jakob Ackermann
ee2338a33b [web] align criteria for fallback to clsi-cache (#24970)
* [web] rename helper for browser cache bug, avoid confusion w/ clsi-cache

* [web] align criteria for fallback to clsi-cache

Notably, avoid the fallback from inside pdf-caching when disabled.

GitOrigin-RevId: 3fd918de14eef59c45c28cc5b5e256048cb91528
2025-04-18 08:05:25 +00:00
Christopher Hoskin
94e12ec404 Merge pull request #24971 from overleaf/csh-issue-19131-diable-filestore-endpoints
Disable the old filestore endpoints if user_files not defined

GitOrigin-RevId: f7d188cef3fe53835070903448690baff4ebad98
2025-04-18 08:04:59 +00:00
Domagoj Kriskovic
26032d6b77 Add origin for ai assist interstitial modal (#24904)
GitOrigin-RevId: 035edf8791b0afdc8c320598d09d99f5e84acc47
2025-04-18 08:04:55 +00:00
Andrew Rumble
adb9723d62 Log a warning when a user is rejected from accessing real-time by CORS
GitOrigin-RevId: 04a7ffbc24654c876688db446164bf36a162828f
2025-04-18 08:04:47 +00:00
Antoine Clausse
b901bb6c75 [web] Update fetch-mock to version 12 (#24837)
* Update fetch-mock to version 12

* Replace `fetchMock.done` by `fetchMock.callHistory.done`

* Replace `…Mock.called` by `…Mock.callHistory.called`

* Replace `fetchMock.reset` by `fetchMock.hardReset`

* Replace `fetchMock.restore` by `fetchMock.hardReset`

* Replace `fetchMock.resetHistory` by `fetchMock.clearHistory`

* Replace `fetchMock.calls` by `fetchMock.callHistory.calls`

* Replace `fetchMock.flush` by `fetchMock.callHistory.flush`

* Update tests for fetch-mock version 12

See https://www.wheresrhys.co.uk/fetch-mock/docs/Usage/upgrade-guide

* Update stories for fetch-mock version 12

* Remove `overwriteRoutes` option

* Add `fetchMock.spyGlobal()` to storybook

* Remove deprecated `sendAsJson` param

* Replace `fetchMock.hardReset()` by `fetchMock.removeRoutes().clearHistory()`

* Fixup fetch-mock in storybook:

Call `mockGlobal` inside the hook, call `removeRoutes` and `unmockGlobal` on cleanup

Behaviour can be tested by navigating between

https://storybook.dev-overleaf.com/main/?path=/story/editor-ai-error-assistant-compile-log-entries--first-log-entry
https://storybook.dev-overleaf.com/main/?path=/story/editor-ai-error-assistant-compile-log-entries--rate-limited

https://storybook.dev-overleaf.com/main/?path=/story/project-list-notifications--project-invite
https://storybook.dev-overleaf.com/main/?path=/story/project-list-notifications--project-invite-network-error

And clicking the buttons

GitOrigin-RevId: 35611b4430259e4c21c3d819ad18b2e6dab66242
2025-04-17 08:06:24 +00:00
Antoine Clausse
fa62529d82 [clsi] Replace diskusage by fs (#24789)
* Replace `diskusage` by `fs` in clsi

* Replace `diskusage` by `fs` in clsi-cache

* Update disk space calculations to include block size in bytes

Co-authored-by: Jakob Ackermann <jakob.ackermann@overleaf.com>

* Add warning comments about Docker-for-Mac fs stats being off by a factor

---------

Co-authored-by: Jakob Ackermann <jakob.ackermann@overleaf.com>
GitOrigin-RevId: 02ea07e531b89bb3d10ddfe780348b19cbddad1f
2025-04-17 08:06:16 +00:00
Andrew Rumble
cde7ff5d2f Don't run the verifier loop when app created from test
GitOrigin-RevId: e8a565ae00019de66cbbaf961e0ee3ace90f800e
2025-04-17 08:06:11 +00:00
Eric Mc Sween
82c95dd82d Merge pull request #24930 from overleaf/em-ae-jd-full-project-search-launch
Launch full project search from regular search

GitOrigin-RevId: 3ef4f6923a0aeef6ab68768bab79e4be32f09eb0
2025-04-17 08:06:07 +00:00
Christopher Hoskin
778221c0af Merge pull request #24924 from overleaf/csh-issue-18692-404-if-templates-not-configured
Only define the template endpoints if configured

GitOrigin-RevId: 9ec9d411d6aed8774a880e80d0559a3491e832de
2025-04-17 08:06:03 +00:00
Kristina Hjertberg
f8f2585164 [web] mv PaymentService to modules
GitOrigin-RevId: 73d739f53d96ff9e9d51a535907dbdc878aa6624
2025-04-17 08:05:58 +00:00
M Fahru
04d36122bd Merge pull request #24659 from overleaf/mf-init-plans-page-bs5
[web] Migrate plans page and interstitial payment page to bootstrap 5 with feature flag

GitOrigin-RevId: 4491b2205a19b943e8d8bf13f699f92278f5e183
2025-04-17 08:05:54 +00:00
Mathias Jakobsen
dfc00ed8c1 Merge pull request #24949 from overleaf/mj-papers-notification-papers-feature
[web] Show papers notification to users with papers feature

GitOrigin-RevId: 376ed7a43bec28766f89101588ed2546ec3093d2
2025-04-17 08:05:49 +00:00
Jessica Lawshe
262a1d09c6 Add BS3 templates stylesheet back
Still used by portals

GitOrigin-RevId: 0d85d437c3f771fbb6ff76048f9e49654c88cf83
2025-04-17 08:05:45 +00:00
Andrew Rumble
5e76a97bc4 Log more information for failing tests
GitOrigin-RevId: 22ae2151b476b4f810ab9223b42f9cb6fe9d7442
2025-04-17 08:05:40 +00:00
Eric Mc Sween
aa367bcd1d Merge pull request #24897 from overleaf/em-chunks-concurrency
Concurrency handling for history chunks with Mongo backend

GitOrigin-RevId: 30abe11237c80e7803c8934a20a57a7223afa85a
2025-04-17 08:05:36 +00:00
Mathias Jakobsen
fe68930e9a Merge pull request #24733 from overleaf/mj-labs-without-features
[web] Move labs experiments from features to own property

GitOrigin-RevId: 22dee79758e5fa65fc31d9b8d4b155443cd585e7
2025-04-17 08:05:28 +00:00
Mathias Jakobsen
3a0c71175b Merge pull request #24891 from overleaf/mj-ide-redesign-hide-unavailable-menu-options
[web] Editor Redesign: Hide unavailable menu items

GitOrigin-RevId: 4aaee8befb6234a00bd4ec6087dcfd1417878576
2025-04-17 08:05:24 +00:00
Mathias Jakobsen
8fc206073b Merge pull request #24923 from overleaf/dp-mj-papers-notification
Add notification banner for Papers integration marketing

GitOrigin-RevId: 625c3afcc6ca617fd01af58a05a6c85f7126398b
2025-04-17 08:05:19 +00:00
Alf Eaton
34d5564abc Revert "Upgrade PDF.js to v5 (#24646)" (#24946)
This reverts commit 3ef46af6363aab5b5b007b6c9d72decae65a36ab.

GitOrigin-RevId: 9c3671b3ff196bb62ff547210a1138cb603d6791
2025-04-17 08:05:15 +00:00
David
c1fc5b88b3 Merge pull request #24916 from overleaf/dp-git-logo
Use orange version of Git logo in account settings

GitOrigin-RevId: 10067d4190d54ea14183b20620001237090758da
2025-04-17 08:05:03 +00:00
Alf Eaton
a1098a921c Upgrade PDF.js to v5 (#24646)
GitOrigin-RevId: 3ef46af6363aab5b5b007b6c9d72decae65a36ab
2025-04-17 08:04:59 +00:00
Andrew Rumble
a1a3019d1e Debugging flakey test
GitOrigin-RevId: 0da9b4ee107162aed88f2f782e80a6325fb48622
2025-04-17 08:04:51 +00:00
Tim Down
34be8b75ad Merge pull request #24936 from overleaf/td-warning-badge-light
Use dark-on-light for warning badge by default

GitOrigin-RevId: 6259ec08c9c31f54dbdad6261a966f638303cc3b
2025-04-17 08:04:46 +00:00
Brian Gough
cedc96bdd7 Merge pull request #24906 from overleaf/bg-history-redis-read-cache
implement read cache for history-v1 chunks

GitOrigin-RevId: 128de7e9380fd489f68d5045d3333a27018845c2
2025-04-16 08:06:18 +00:00
Jakob Ackermann
457d61fa9a [web] avoid logging when password is too similar to email (#24914)
GitOrigin-RevId: 122e1790e4827aa26da712011e946ea025a08300
2025-04-16 08:06:11 +00:00
David
35902407b3 Update integration logos in editor redesign (#24902)
* Update git bridge logo based on current theme

* Use GithubLogo black instead of GithubLogo

* Add missing logos

* Update dropbox logo

* Revert default DropboxLogoBlack size

* Remove white background from DropboxLogo

* Rename GitBridgeLogo to GitLogo

GitOrigin-RevId: 00d08716d9ccb0df7912dba39ec0477d672dc56d
2025-04-16 08:05:56 +00:00
Domagoj Kriskovic
1f7bfb4737 "default-visual-for-beginners" split test tear down (#24759)
GitOrigin-RevId: cdd6c8d3ffb60b9ae3e972f2267f1838cf720c83
2025-04-16 08:05:47 +00:00
Jakob Ackermann
6ac5142b41 [web] remove dead endpoints for downloading clsi output w/o buildId (#24825)
The corresponding clsi endpoints have already been removed.

GitOrigin-RevId: 0414040557f50dae6ad58228eefae7b2739a5656
2025-04-16 08:05:39 +00:00
Jakob Ackermann
39110d9da9 [clsi-cache] check compiler settings before using compile from cache (#24845)
* [web] provide an actual rootFolder from EditorProviders in tests

- Fixup SocketIOMock and ShareJS mocks to provide the complete interface
- Extend SocketIOMock interface to count event listeners
- Fixup test that did not expect to find a working rootDoc

* [web] expose imageName from ProjectContext

* [clsi-cache] check compiler settings before using compile from cache

* [web] avoid fetching initial compile from clsi-cache in PDF detach tab

GitOrigin-RevId: e3c754a7ceca55f03a317e1bc8ae45ed12cc2f02
2025-04-16 08:05:35 +00:00
Tim Down
ef958f97a1 Merge pull request #24644 from overleaf/td-contact-form-react-email-validation
Add validation to email field in React version of contact form

GitOrigin-RevId: 9804af9fd5c4cf8e29e47a8661c50198eb0f6bcf
2025-04-16 08:05:23 +00:00
Tim Down
832028e92d Merge pull request #24718 from overleaf/td-bs5-warning-bg
Fix for form warning text colour

GitOrigin-RevId: 48a6d710548d919a5968a04709b41e4f1b2cd004
2025-04-16 08:05:19 +00:00
Jakob Ackermann
f0edc7ba00 [web] update the projects lastUpdated timestamp when changing file-tree (#24867)
* [misc] freeze time before any other unit test setup steps

Freezing it after other work (notably sandboxed-module imports) will
result in flaky tests.

* [web] update the projects lastUpdated timestamp when changing file-tree

GitOrigin-RevId: b82b2ff74dc31886f3c4bd300375117eead6e0cd
2025-04-16 08:05:14 +00:00
Brian Gough
d6c2188f2d Merge pull request #24903 from overleaf/bg-fix-backup-scheduler
remove history.id check from processPendingProjects function

GitOrigin-RevId: 017ab7c9bf693ed4077d4619574154166af04fe3
2025-04-16 08:05:06 +00:00
David
bc95219bf6 Merge pull request #24862 from overleaf/dp-check-logs
Create openTab utility in rail-context to open rail at specific tab

GitOrigin-RevId: bfac2597fdd66db6dd8280873be97096f2b812dc
2025-04-16 08:05:01 +00:00
David
3e49fd6967 Merge pull request #24860 from overleaf/dp-chat-names-dark
Fix chat message names in dark mode

GitOrigin-RevId: dd919e3398a53b981939add5e3300c6427133b39
2025-04-16 08:04:57 +00:00
David
846ccd3aac Merge pull request #24859 from overleaf/dp-history-file-tree
Always show file tree in history view in new editor

GitOrigin-RevId: e993aae4c1625ef7fb0a716ae5bff8edb0e8d0d0
2025-04-16 08:04:52 +00:00
David
9babb6283b Merge pull request #24866 from overleaf/dp-toolbar-truncation
Add explicit width to codemirror toolbar wrapper to ensure it truncates correctly

GitOrigin-RevId: 458788daa8e634aebcb5f61dc4ce78f871b03913
2025-04-16 08:04:47 +00:00
Brian Gough
62c8af2a93 Merge pull request #24856 from overleaf/bg-history-redis-buffer-tweaks
fix error logging for chunk cache mismatches

GitOrigin-RevId: 85344c4025fdaa6ee916c5438ff38c7c49f4bce3
2025-04-15 08:06:27 +00:00
Brian Gough
3850e97446 Merge pull request #24857 from overleaf/bg-backup-scheduler-fix-for-broken-projects
handle broken projects in history backup

GitOrigin-RevId: a295d9d7a29715807a8172dae46e0fe3c14ecf12
2025-04-15 08:06:23 +00:00
Eric Mc Sween
397a546095 Merge pull request #24895 from overleaf/em-fix-history-changes
Properly serialize changes returned by the history changes endpoint

GitOrigin-RevId: a85fd8aede9316100d2cec901c1ab9b7d1faa9bb
2025-04-15 08:06:19 +00:00
Brian Gough
d8c5160349 Merge pull request #24858 from overleaf/bg-fix-server-ce-history-redis
add history-v1 redis configuration to server-ce

GitOrigin-RevId: 7ad37f6261165b29b7c16b92890b2103afaf47d7
2025-04-15 08:06:15 +00:00
M Fahru
3a5d24eb7a Merge pull request #24524 from overleaf/mf-gallery-redesign-leftover-remove
[web] Remove leftover gallery redesign split test code

GitOrigin-RevId: 484272bfaae7db5b5329ae19ca556ee8e4a33650
2025-04-15 08:06:05 +00:00
Kristina Hjertberg
a3b908e255 [web] add payment field to Subscription
GitOrigin-RevId: 3fd569372636f880b5e2d4e5dd98f6e28067c464
2025-04-15 08:05:57 +00:00
Jimmy Domagala-Tang
0e49a5d9b0 Merge pull request #24391 from overleaf/jdt-move-wf-features-to-ol
Move Writefull table + equation generator to OL toolbar

GitOrigin-RevId: b7bc5b1cde5687360c4f60cb64924da139ccfbe9
2025-04-15 08:05:47 +00:00
Eric Mc Sween
958ff0f3bf Merge pull request #24847 from overleaf/em-chunks-index
Include closed chunks in active chunks index

GitOrigin-RevId: ff75959737908afa72cee2c2784abb476c115e80
2025-04-15 08:05:33 +00:00
Tim Down
773cbc92eb Merge pull request #24592 from overleaf/td-socket-io-origins
Use updated socket.io with CORS origin check in real-time

GitOrigin-RevId: a1a874ba298e145aa4c9dd55ef723d79843f8145
2025-04-15 08:05:25 +00:00
Domagoj Kriskovic
b9f1013f37 Update "collaborator-invited" analytics event (#24639)
GitOrigin-RevId: e469c50dadea568032ec825fa99b33b80d0b3964
2025-04-15 08:05:18 +00:00
Domagoj Kriskovic
30c5495b21 Update share modal copy for reviewer role (#24694)
* Update share modal copy for reviewer role

* fix ShareProjectModal test

* use "limited_to_n_collaborators_per_project"

GitOrigin-RevId: f5afcb18a3921a6b1132a4c323af9d8395a7d07b
2025-04-15 08:05:14 +00:00
Brian Gough
835e14b8b2 Merge pull request #24768 from overleaf/bg-history-redis-buffer
test redis caching when loading latest chunk in history-v1

GitOrigin-RevId: f0ee09e5e9e1d7605e228913cb8539be4134e1f7
2025-04-15 08:05:03 +00:00
Eric Mc Sween
fb03fe4d26 Merge pull request #24776 from overleaf/em-project-history-unused-endpoints
Remove unused endpoints in project-history

GitOrigin-RevId: 2940c1c1973177e3200cb78884b307f708fd88c3
2025-04-14 08:05:08 +00:00
Jakob Ackermann
4a17a1e713 [web] gracefully access compile stats for event (#24818)
* [web] gracefully access compile stats for event

* [clsi] always emit stats and timings

GitOrigin-RevId: 959e5fe1508245ffecfab1219fd86e53b210fca1
2025-04-14 08:04:51 +00:00
Liangjun Song
c60ceaf932 Merge pull request #24466 from overleaf/ls-script-runner
Script runner

GitOrigin-RevId: 4cc7004f05177dba2a2151aa6db7e75fb679d11d
2025-04-14 08:04:37 +00:00
CloudBuild
8ad335cf47 auto update translation
GitOrigin-RevId: b09c8b2b9c0e29f3061aaacb7dbfff4ccaaec466
2025-04-11 08:06:17 +00:00
M Fahru
14308f4fba Merge pull request #23085 from overleaf/mf-teardown-bs5-register-login
[web] Teardown bs5 login and register page

GitOrigin-RevId: e2335ecbe89212984acd24fbbaa6f9549dc45b96
2025-04-11 08:06:13 +00:00
Brian Gough
fe8d6392d5 Merge pull request #24793 from overleaf/bg-update-build-scripts-to-include-history-redis
update build scripts to include HISTORY_REDIS_HOST

GitOrigin-RevId: da0f317c80401067c0f4aa772196cb2f24849b8e
2025-04-11 08:06:03 +00:00
Eric Mc Sween
dd526693f5 Merge pull request #24775 from overleaf/em-bypass-project-history
Call history-v1 directly for latest history and changes

GitOrigin-RevId: 39c32dd50ff7875f82bbb2716da753a9c3e6e81d
2025-04-11 08:05:56 +00:00
Eric Mc Sween
42aea53307 Merge pull request #24754 from overleaf/em-promisify-history-controller
Promisify HistoryController

GitOrigin-RevId: e1783acb8c7ba7e00b109a4f4a514973cc3163d2
2025-04-11 08:05:52 +00:00
Eric Mc Sween
3aa579f232 Merge pull request #24736 from overleaf/em-history-get-changes
Endpoint for getting history changes

GitOrigin-RevId: b96afed0492522d62df9c24390f76e5490afbb44
2025-04-11 08:05:48 +00:00
David
9cd7e49daf Merge pull request #24332 from overleaf/dp-review-panel
Add review panel to new editor

GitOrigin-RevId: 918a29d81fcfaf60bc4af8a20a25545d79c4a3ed
2025-04-11 08:05:31 +00:00
Kristina
9f22564ca3 Merge pull request #24680 from overleaf/kh-rename-recurly-namespace
[web] rename recurly namespace

GitOrigin-RevId: b7cfd26923d47bd7f3de4140be24d2d1ef20f6c8
2025-04-11 08:05:20 +00:00
Kristina
af46bcdace Merge pull request #24396 from overleaf/kh-rm-unused-recurly-subscription-property
[web] rm dead code in `SubscriptionViewModelBuilder`

GitOrigin-RevId: 64006e3f51f61c40e2d4e01fd04961546319ffdd
2025-04-11 08:05:16 +00:00
Mathias Jakobsen
c27c7bbe83 Merge pull request #24720 from overleaf/mj-teardown-write-and-cite-split-tests
[web] Tear down Write and Cite related split tests

GitOrigin-RevId: 0002829f53b0f28d482c79ed2cbf61066690ae02
2025-04-11 08:05:05 +00:00
Andrew Rumble
814a55809b Reinstate options param in getDoc
Removed in #23209 - used by the admin restore doc functionality.

GitOrigin-RevId: bb3b682ef19719956236ec24807e19cbc09f049c
2025-04-11 08:04:48 +00:00
CloudBuild
29b0dd0725 auto update translation
GitOrigin-RevId: 9010716cae7147df527797d531b902f40f20352c
2025-04-10 08:06:36 +00:00
Jimmy Domagala-Tang
f7f4a03abb Merge pull request #24523 from overleaf/jdt-prevent-bundle-dbl-buys
Redirect bundle purchases when users already have it

GitOrigin-RevId: d8e3c0256db08c08c2be24f38caef91fb26b90e8
2025-04-10 08:06:28 +00:00
Mathias Jakobsen
f11a6a6b87 Merge pull request #24717 from overleaf/dp-github-typo
Give GitHub a capital H in integrations panel description

GitOrigin-RevId: 8644e638c3d557221c1cf3b42b2884d9fdc4572c
2025-04-10 08:06:06 +00:00
Mathias Jakobsen
6207c853ef Merge pull request #24708 from overleaf/mj-rail-element-tooltips
[web] Add tooltips to rail buttons

GitOrigin-RevId: 1b75c48f30b593e9e725bcb9658c4c15c9fe6a66
2025-04-10 08:06:01 +00:00
Mathias Jakobsen
c183176fd3 Merge pull request #24705 from overleaf/mj-ide-rail-hover-state
[web] Editor redesign: Add hover state colors to rail

GitOrigin-RevId: 9b4a19a913963fcd4b76cf24467b8c1ad6fb1fab
2025-04-10 08:05:57 +00:00
Mathias Jakobsen
15663796ad Merge pull request #24703 from overleaf/mj-ide-align-icon
[web] Editor redesign: Align logo and home button with rail tabs

GitOrigin-RevId: 837651bb55348a043ec16afeffb508bc1d723689
2025-04-10 08:05:52 +00:00
Mathias Jakobsen
e670024f5c Merge pull request #24678 from overleaf/mj-ide-view-help-menu
[web] Add remaining options to menu bar

GitOrigin-RevId: cf6cc6c2aaf450e362588c514e1a87e923a611b4
2025-04-10 08:05:48 +00:00
Mathias Jakobsen
bdf0194fc8 [web] Add editor commands to command registry and toolbar (#24538)
* [web] Add editor commands to command registry and toolbar

* [web] Omit empty groups

* [web] Editor redesign: Move toolbar commands to custom hook

* [web] Disable editor commands when editor is not visible

GitOrigin-RevId: be9f4060fc44e51223e16860fdcf6698c927998c
2025-04-10 08:05:43 +00:00
Brian Gough
4ba0e97b95 Merge pull request #24749 from overleaf/bg-history-redis-buffer
add history redis to history-v1

GitOrigin-RevId: 70dc1aee809ad17902c93c020f3804c0f1429238
2025-04-10 08:05:35 +00:00
Brian Gough
d85dbe429d Merge pull request #24745 from overleaf/bg-history-use-consistent-import-for-chunk-store
use consistent import for chunk_store

GitOrigin-RevId: 427b148c53c9d0913b2cdfdc634273a1d8ece060
2025-04-10 08:05:31 +00:00
Jakob Ackermann
d99ba08d01 [clsi] run SyncTeX in specific output dir rather than compile dir (#24690)
* [clsi] drop support for docker-in-docker

* [clsi] run SyncTeX in specific output dir rather than compile dir

* [clsi] store output.synctex.gz outside of tar-ball in clsi-cache

* [clsi] add documentation for rewriting of docker bind-mounts

* [server-pro] update env vars for sandboxed compiles in sample config

GitOrigin-RevId: 8debd7102ac612544961f237aa4ff1c530aa3da3
2025-04-10 08:05:26 +00:00
Jakob Ackermann
b831a0b3f7 [clsi-cache] frontend (#24389)
* [clsi-lb] forward ?clsiserverid=cache requests to clsi-cache

* [web] use clsi-cache in frontend

* [web] upgrade compile from cache to full compile when triggered inflight

* [web] fix pdf-js-viewer.spec.tsx tests -- add ?clsiserverid=foo to url

* [web] fix renamed reference after merge

* [web] fix download of other output files and use specific build

* [web] consolidate validation of filename into ClsiCacheHandler

* [web] remove unused projectName from getLatestBuildFromCache

* [web] avoid hitting the same clsi-cache instance first all the time

* [web] update documentation

GitOrigin-RevId: d48265a7ba89d6731092640e1492bc9f103f5c33
2025-04-10 08:05:22 +00:00
Jakob Ackermann
b538d56591 [clsi-cache] backend (#24388)
* [clsi-cache] initial revision of the clsi-cache service

* [clsi] send output files to clsi-cache and import from clsi-cache

* [web] pass editorId to clsi

* [web] clear clsi-cache when clearing clsi cache

* [web] add split-tests for controlling clsi-cache rollout

* [web] populate clsi-cache when cloning/creating project from template

* [clsi-cache] produce less noise when populating cache hits 404

* [clsi-cache] push docker image to AR

* [clsi-cache] push docker image to AR

* [clsi-cache] allow compileGroup in job payload

* [clsi-cache] set X-Zone header from latest endpoint

* [clsi-cache] use method POST for /enqueue endpoint

* [web] populate clsi-cache in zone b with template data

* [clsi-cache] limit number of editors per project/user folder to 10

* [web] clone: populate the clsi-cache unless the TeXLive release changed

* [clsi-cache] keep user folder when clearing cache as anonymous user

* [clsi] download old output.tar.gz when synctex finds empty compile dir

* [web] fix lint

* [clsi-cache] multi-zonal lookup of single build output

* [clsi-cache] add more validation and limits

Co-authored-by: Brian Gough <brian.gough@overleaf.com>

* [clsi] do not include clsi-cache tar-ball in output.zip

* [clsi-cache] fix reference after remaining constant

Co-authored-by: Alf Eaton <alf.eaton@overleaf.com>

* [web] consolidate validation of filename into ClsiCacheHandler

* [clsi-cache] extend metrics and event tracking

- break down most of the clsi metrics by label
  - compile=initial - new compile dir without previous output files
  - compile=recompile - recompile in existing compile dir
  - compile=from-cache - compile using previous clsi-cache
- extend segmentation on compile-result-backend event
  - isInitialCompile=true - found new compile dir at start of request
  - restoredClsiCache=true - restored compile dir from clsi-cache

* [clsi] rename metrics labels for download of clsi-cache

This is in preparation for synctex changes.

* [clsi] use constant for limit of entries in output.tar.gz

Co-authored-by: Eric Mc Sween <eric.mcsween@overleaf.com>

* [clsi-cache] fix cloning of project cache

---------

Co-authored-by: Brian Gough <brian.gough@overleaf.com>
Co-authored-by: Alf Eaton <alf.eaton@overleaf.com>
Co-authored-by: Eric Mc Sween <eric.mcsween@overleaf.com>
GitOrigin-RevId: 4901a65497af13be1549af7f38ceee3188fcf881
2025-04-10 08:05:17 +00:00
Kristina
7920cd9d3d Merge pull request #24519 from overleaf/kh-add-payment-service-low-delta
[web] add `PaymentService` to `buildUsersSubscriptionViewModel`

GitOrigin-RevId: 543531226bad38d34b225dae28cf00a5e02e5558
2025-04-10 08:05:06 +00:00
Tim Down
28468e134c Merge pull request #24660 from overleaf/td-bs5-remove-react-bootstrap-0
Remove react-bootstrap 0.33.1

GitOrigin-RevId: c320a6b18c576afdc0fd49559915d3d2f3a7a1ef
2025-04-10 08:04:50 +00:00
882 changed files with 27325 additions and 18025 deletions

View file

@ -29,6 +29,7 @@ services:
- DOCKER_RUNNER=true
- TEXLIVE_IMAGE=texlive-full # docker build texlive -t texlive-full
- COMPILES_HOST_DIR=${PWD}/compiles
- OUTPUT_HOST_DIR=${PWD}/output
user: root
volumes:
- ${PWD}/compiles:/overleaf/services/clsi/compiles

View file

@ -75,9 +75,13 @@ services:
## Sandboxed Compiles: https://github.com/overleaf/overleaf/wiki/Server-Pro:-Sandboxed-Compiles
SANDBOXED_COMPILES: 'true'
SANDBOXED_COMPILES_SIBLING_CONTAINERS: 'true'
### Bind-mount source for /var/lib/overleaf/data/compiles inside the container.
SANDBOXED_COMPILES_HOST_DIR: '/home/user/sharelatex_data/data/compiles'
SANDBOXED_COMPILES_HOST_DIR_COMPILES: '/home/user/sharelatex_data/data/compiles'
### Bind-mount source for /var/lib/overleaf/data/output inside the container.
SANDBOXED_COMPILES_HOST_DIR_OUTPUT: '/home/user/sharelatex_data/data/output'
### Backwards compatibility (before Server Pro 5.5)
DOCKER_RUNNER: 'true'
SANDBOXED_COMPILES_SIBLING_CONTAINERS: 'true'
## Works with test LDAP server shown at bottom of docker compose
# OVERLEAF_LDAP_URL: 'ldap://ldap:389'

View file

@ -7,4 +7,4 @@ access-token-encryptor
--is-library=True
--node-version=20.18.2
--public-repo=False
--script-version=4.5.0
--script-version=4.7.0

View file

@ -7,4 +7,4 @@ fetch-utils
--is-library=True
--node-version=20.18.2
--public-repo=False
--script-version=4.5.0
--script-version=4.7.0

View file

@ -23,11 +23,11 @@ async function fetchJson(url, opts = {}) {
}
async function fetchJsonWithResponse(url, opts = {}) {
const { fetchOpts } = parseOpts(opts)
const { fetchOpts, detachSignal } = parseOpts(opts)
fetchOpts.headers = fetchOpts.headers ?? {}
fetchOpts.headers.Accept = fetchOpts.headers.Accept ?? 'application/json'
const response = await performRequest(url, fetchOpts)
const response = await performRequest(url, fetchOpts, detachSignal)
if (!response.ok) {
const body = await maybeGetResponseBody(response)
throw new RequestFailedError(url, opts, response, body)
@ -53,8 +53,8 @@ async function fetchStream(url, opts = {}) {
}
async function fetchStreamWithResponse(url, opts = {}) {
const { fetchOpts, abortController } = parseOpts(opts)
const response = await performRequest(url, fetchOpts)
const { fetchOpts, abortController, detachSignal } = parseOpts(opts)
const response = await performRequest(url, fetchOpts, detachSignal)
if (!response.ok) {
const body = await maybeGetResponseBody(response)
@ -76,8 +76,8 @@ async function fetchStreamWithResponse(url, opts = {}) {
* @throws {RequestFailedError} if the response has a failure status code
*/
async function fetchNothing(url, opts = {}) {
const { fetchOpts } = parseOpts(opts)
const response = await performRequest(url, fetchOpts)
const { fetchOpts, detachSignal } = parseOpts(opts)
const response = await performRequest(url, fetchOpts, detachSignal)
if (!response.ok) {
const body = await maybeGetResponseBody(response)
throw new RequestFailedError(url, opts, response, body)
@ -108,9 +108,9 @@ async function fetchRedirect(url, opts = {}) {
* @throws {RequestFailedError} if the response has a non redirect status code or missing Location header
*/
async function fetchRedirectWithResponse(url, opts = {}) {
const { fetchOpts } = parseOpts(opts)
const { fetchOpts, detachSignal } = parseOpts(opts)
fetchOpts.redirect = 'manual'
const response = await performRequest(url, fetchOpts)
const response = await performRequest(url, fetchOpts, detachSignal)
if (response.status < 300 || response.status >= 400) {
const body = await maybeGetResponseBody(response)
throw new RequestFailedError(url, opts, response, body)
@ -142,8 +142,8 @@ async function fetchString(url, opts = {}) {
}
async function fetchStringWithResponse(url, opts = {}) {
const { fetchOpts } = parseOpts(opts)
const response = await performRequest(url, fetchOpts)
const { fetchOpts, detachSignal } = parseOpts(opts)
const response = await performRequest(url, fetchOpts, detachSignal)
if (!response.ok) {
const body = await maybeGetResponseBody(response)
throw new RequestFailedError(url, opts, response, body)
@ -178,13 +178,14 @@ function parseOpts(opts) {
const abortController = new AbortController()
fetchOpts.signal = abortController.signal
let detachSignal = () => {}
if (opts.signal) {
abortOnSignal(abortController, opts.signal)
detachSignal = abortOnSignal(abortController, opts.signal)
}
if (opts.body instanceof Readable) {
abortOnDestroyedRequest(abortController, fetchOpts.body)
}
return { fetchOpts, abortController }
return { fetchOpts, abortController, detachSignal }
}
function setupJsonBody(fetchOpts, json) {
@ -208,6 +209,9 @@ function abortOnSignal(abortController, signal) {
abortController.abort(signal.reason)
}
signal.addEventListener('abort', listener)
return () => {
signal.removeEventListener('abort', listener)
}
}
function abortOnDestroyedRequest(abortController, stream) {
@ -226,11 +230,12 @@ function abortOnDestroyedResponse(abortController, response) {
})
}
async function performRequest(url, fetchOpts) {
async function performRequest(url, fetchOpts, detachSignal) {
let response
try {
response = await fetch(url, fetchOpts)
} catch (err) {
detachSignal()
if (fetchOpts.body instanceof Readable) {
fetchOpts.body.destroy()
}
@ -239,6 +244,7 @@ async function performRequest(url, fetchOpts) {
method: fetchOpts.method ?? 'GET',
})
}
response.body.on('close', detachSignal)
if (fetchOpts.body instanceof Readable) {
response.body.on('close', () => {
if (!fetchOpts.body.readableEnded) {

View file

@ -1,6 +1,9 @@
const { expect } = require('chai')
const fs = require('node:fs')
const events = require('node:events')
const { FetchError, AbortError } = require('node-fetch')
const { Readable } = require('node:stream')
const { pipeline } = require('node:stream/promises')
const { once } = require('node:events')
const { TestServer } = require('./helpers/TestServer')
const selfsigned = require('selfsigned')
@ -203,6 +206,31 @@ describe('fetch-utils', function () {
).to.be.rejectedWith(AbortError)
expect(stream.destroyed).to.be.true
})
it('detaches from signal on success', async function () {
const signal = AbortSignal.timeout(10_000)
for (let i = 0; i < 20; i++) {
const s = await fetchStream(this.url('/hello'), { signal })
expect(events.getEventListeners(signal, 'abort')).to.have.length(1)
await pipeline(s, fs.createWriteStream('/dev/null'))
expect(events.getEventListeners(signal, 'abort')).to.have.length(0)
}
})
it('detaches from signal on error', async function () {
const signal = AbortSignal.timeout(10_000)
for (let i = 0; i < 20; i++) {
try {
await fetchStream(this.url('/500'), { signal })
} catch (err) {
if (err instanceof RequestFailedError && err.response.status === 500)
continue
throw err
} finally {
expect(events.getEventListeners(signal, 'abort')).to.have.length(0)
}
}
})
})
describe('fetchNothing', function () {
@ -391,9 +419,16 @@ async function* infiniteIterator() {
async function abortOnceReceived(func, server) {
const controller = new AbortController()
const promise = func(controller.signal)
expect(events.getEventListeners(controller.signal, 'abort')).to.have.length(1)
await once(server.events, 'request-received')
controller.abort()
return await promise
try {
return await promise
} finally {
expect(events.getEventListeners(controller.signal, 'abort')).to.have.length(
0
)
}
}
async function expectRequestAborted(req) {

View file

@ -7,4 +7,4 @@ logger
--is-library=True
--node-version=20.18.2
--public-repo=False
--script-version=4.5.0
--script-version=4.7.0

View file

@ -7,4 +7,4 @@ metrics
--is-library=True
--node-version=20.18.2
--public-repo=False
--script-version=4.5.0
--script-version=4.7.0

View file

@ -7,4 +7,4 @@ mongo-utils
--is-library=True
--node-version=20.18.2
--public-repo=False
--script-version=4.5.0
--script-version=4.7.0

View file

@ -7,4 +7,4 @@ o-error
--is-library=True
--node-version=20.18.2
--public-repo=False
--script-version=4.5.0
--script-version=4.7.0

View file

@ -7,4 +7,4 @@ object-persistor
--is-library=True
--node-version=20.18.2
--public-repo=False
--script-version=4.5.0
--script-version=4.7.0

View file

@ -7,4 +7,4 @@ overleaf-editor-core
--is-library=True
--node-version=20.18.2
--public-repo=False
--script-version=4.5.0
--script-version=4.7.0

View file

@ -7,4 +7,4 @@ promise-utils
--is-library=True
--node-version=20.18.2
--public-repo=False
--script-version=4.5.0
--script-version=4.7.0

View file

@ -7,4 +7,4 @@ ranges-tracker
--is-library=True
--node-version=20.18.2
--public-repo=False
--script-version=4.5.0
--script-version=4.7.0

View file

@ -7,4 +7,4 @@ redis-wrapper
--is-library=True
--node-version=20.18.2
--public-repo=False
--script-version=4.5.0
--script-version=4.7.0

View file

@ -7,4 +7,4 @@ settings
--is-library=True
--node-version=20.18.2
--public-repo=False
--script-version=4.5.0
--script-version=4.7.0

View file

@ -7,4 +7,4 @@ stream-utils
--is-library=True
--node-version=20.18.2
--public-repo=False
--script-version=4.5.0
--script-version=4.7.0

5695
package-lock.json generated

File diff suppressed because it is too large Load diff

View file

@ -55,6 +55,7 @@
"services/analytics",
"services/chat",
"services/clsi",
"services/clsi-cache",
"services/clsi-perf",
"services/contacts",
"services/docstore",

View file

@ -0,0 +1,22 @@
diff --git a/node_modules/pdfjs-dist/build/pdf.worker.mjs b/node_modules/pdfjs-dist/build/pdf.worker.mjs
index 6c5c6f1..bb6b7d1 100644
--- a/node_modules/pdfjs-dist/build/pdf.worker.mjs
+++ b/node_modules/pdfjs-dist/build/pdf.worker.mjs
@@ -1830,7 +1830,7 @@ async function __wbg_init(module_or_path) {
}
}
if (typeof module_or_path === 'undefined') {
- module_or_path = new URL('qcms_bg.wasm', import.meta.url);
+ module_or_path = new URL(/* webpackIgnore: true */ 'qcms_bg.wasm', import.meta.url);
}
const imports = __wbg_get_imports();
if (typeof module_or_path === 'string' || typeof Request === 'function' && module_or_path instanceof Request || typeof URL === 'function' && module_or_path instanceof URL) {
@@ -5358,7 +5358,7 @@ var OpenJPEG = (() => {
if (Module["locateFile"]) {
return locateFile("openjpeg.wasm");
}
- return new URL("openjpeg.wasm", import.meta.url).href;
+ return new URL(/* webpackIgnore: true */ "openjpeg.wasm", import.meta.url).href;
}
function getBinarySync(file) {
if (file == wasmBinaryFile && wasmBinary) {

View file

@ -45,5 +45,17 @@
"clusterWorkers": "CLUSTER_WORKERS",
"maxFileUploadSize": "MAX_FILE_UPLOAD_SIZE",
"httpsOnly": "HTTPS_ONLY",
"httpRequestTimeout": "OVERLEAF_HISTORY_V1_HTTP_REQUEST_TIMEOUT"
"httpRequestTimeout": "OVERLEAF_HISTORY_V1_HTTP_REQUEST_TIMEOUT",
"redis": {
"history": {
"host": "OVERLEAF_REDIS_HOST",
"password": "OVERLEAF_REDIS_PASS",
"port": "OVERLEAF_REDIS_PORT"
},
"lock": {
"host": "OVERLEAF_REDIS_HOST",
"password": "OVERLEAF_REDIS_PASS",
"port": "OVERLEAF_REDIS_PORT"
}
}
}

View file

@ -9,7 +9,7 @@ describe('Accounts', function () {
it('can log in and out', function () {
login('user@example.com')
cy.visit('/project')
cy.findByText('Account').click()
cy.findByRole('menuitem', { name: 'Account' }).click()
cy.findByText('Log Out').click()
cy.url().should('include', '/login')
cy.visit('/project')

View file

@ -293,7 +293,7 @@ describe('admin panel', function () {
cy.findByText(deletedProjectName).should('not.exist')
cy.log('navigate to thrashed projects and delete the project')
cy.get('.project-list-sidebar-react').within(() => {
cy.get('.project-list-sidebar-scroll').within(() => {
cy.findByText('Trashed Projects').click()
})
findProjectRow(deletedProjectName).within(() =>
@ -318,7 +318,7 @@ describe('admin panel', function () {
cy.log('login as the user and verify the project is restored')
login(user1)
cy.visit('/project')
cy.get('.project-list-sidebar-react').within(() => {
cy.get('.project-list-sidebar-scroll').within(() => {
cy.findByText('Trashed Projects').click()
})
cy.findByText(`${deletedProjectName} (Restored)`)

View file

@ -102,10 +102,6 @@ describe('Project creation and compilation', function () {
cy.findByText('Invite not yet accepted.')
})
cy.visit('/project')
cy.findByText('Account').click()
cy.findByText('Log Out').click()
login('collaborator@example.com')
openProjectViaInviteNotification(targetProjectName)
cy.get('@targetProjectId').then(targetProjectId => {

View file

@ -131,9 +131,7 @@ const allowedVars = Joi.object(
'GIT_BRIDGE_HOST',
'GIT_BRIDGE_PORT',
'V1_HISTORY_URL',
'DOCKER_RUNNER',
'SANDBOXED_COMPILES',
'SANDBOXED_COMPILES_SIBLING_CONTAINERS',
'ALL_TEX_LIVE_DOCKER_IMAGE_NAMES',
'OVERLEAF_TEMPLATES_USER_ID',
'OVERLEAF_NEW_PROJECT_TEMPLATE_LINKS',
@ -196,10 +194,7 @@ function setVarsDockerCompose({ pro, vars, version, withDataDir }) {
)
}
if (
cfg.services.sharelatex.environment
.SANDBOXED_COMPILES_SIBLING_CONTAINERS === 'true'
) {
if (cfg.services.sharelatex.environment.SANDBOXED_COMPILES === 'true') {
cfg.services.sharelatex.environment.SANDBOXED_COMPILES_HOST_DIR =
PATHS.SANDBOXED_COMPILES_HOST_DIR
cfg.services.sharelatex.environment.TEX_LIVE_DOCKER_IMAGE =

View file

@ -10,9 +10,7 @@ const LABEL_TEX_LIVE_VERSION = 'TeX Live version'
describe('SandboxedCompiles', function () {
const enabledVars = {
DOCKER_RUNNER: 'true',
SANDBOXED_COMPILES: 'true',
SANDBOXED_COMPILES_SIBLING_CONTAINERS: 'true',
ALL_TEX_LIVE_DOCKER_IMAGE_NAMES: '2023,2022',
}

View file

@ -96,12 +96,12 @@ describe('Templates', () => {
.parent()
.parent()
.within(() => cy.get('input[type="checkbox"]').first().check())
cy.get('.project-list-sidebar-react').within(() => {
cy.get('.project-list-sidebar-scroll').within(() => {
cy.findAllByText('New Tag').first().click()
})
cy.focused().type(tagName)
cy.findByText('Create').click()
cy.get('.project-list-sidebar-react').within(() => {
cy.get('.project-list-sidebar-scroll').within(() => {
cy.findByText(tagName)
.parent()
.within(() => cy.get('.name').should('have.text', `${tagName} (1)`))

View file

@ -6,4 +6,4 @@ chat
--esmock-loader=False
--node-version=20.18.2
--public-repo=False
--script-version=4.5.0
--script-version=4.7.0

View file

@ -20,6 +20,7 @@ The CLSI can be configured through the following environment variables:
* `CATCH_ERRORS` - Set to `true` to log uncaught exceptions
* `COMPILE_GROUP_DOCKER_CONFIGS` - JSON string of Docker configs for compile groups
* `COMPILES_HOST_DIR` - Working directory for LaTeX compiles
* `OUTPUT_HOST_DIR` - Output directory for LaTeX compiles
* `COMPILE_SIZE_LIMIT` - Sets the body-parser [limit](https://github.com/expressjs/body-parser#limit)
* `DOCKER_RUNNER` - Set to true to use sibling containers
* `DOCKER_RUNTIME` -

View file

@ -258,6 +258,8 @@ app.use(function (error, req, res, next) {
if (error instanceof Errors.NotFoundError) {
logger.debug({ err: error, url: req.url }, 'not found error')
res.sendStatus(404)
} else if (error instanceof Errors.InvalidParameter) {
res.status(400).send(error.message)
} else if (error.code === 'EPIPE') {
// inspect container returns EPIPE when shutting down
res.sendStatus(503) // send 503 Unavailable response

View file

@ -0,0 +1,276 @@
const crypto = require('node:crypto')
const fs = require('node:fs')
const Path = require('node:path')
const { pipeline } = require('node:stream/promises')
const { createGzip, createGunzip } = require('node:zlib')
const tarFs = require('tar-fs')
const _ = require('lodash')
const {
fetchNothing,
fetchStream,
RequestFailedError,
} = require('@overleaf/fetch-utils')
const logger = require('@overleaf/logger')
const Metrics = require('@overleaf/metrics')
const Settings = require('@overleaf/settings')
const { CACHE_SUBDIR } = require('./OutputCacheManager')
const { isExtraneousFile } = require('./ResourceWriter')
const TIMING_BUCKETS = [
0, 10, 100, 1000, 2000, 5000, 10000, 15000, 20000, 30000,
]
const MAX_ENTRIES_IN_OUTPUT_TAR = 100
/**
* @param {string} projectId
* @param {string} userId
* @param {string} buildId
* @param {string} editorId
* @param {[{path: string}]} outputFiles
* @param {string} compileGroup
* @param {Record<string, any>} options
*/
function notifyCLSICacheAboutBuild({
projectId,
userId,
buildId,
editorId,
outputFiles,
compileGroup,
options,
}) {
if (!Settings.apis.clsiCache.enabled) return
/**
* @param {[{path: string}]} files
*/
const enqueue = files => {
Metrics.count('clsi_cache_enqueue_files', files.length)
fetchNothing(`${Settings.apis.clsiCache.url}/enqueue`, {
method: 'POST',
json: {
projectId,
userId,
buildId,
editorId,
files,
downloadHost: Settings.apis.clsi.downloadHost,
clsiServerId: Settings.apis.clsi.clsiServerId,
compileGroup,
options,
},
signal: AbortSignal.timeout(15_000),
}).catch(err => {
logger.warn(
{ err, projectId, userId, buildId },
'enqueue for clsi cache failed'
)
})
}
// PDF preview
enqueue(
outputFiles
.filter(
f =>
f.path === 'output.pdf' ||
f.path === 'output.log' ||
f.path === 'output.synctex.gz' ||
f.path.endsWith('.blg')
)
.map(f => {
if (f.path === 'output.pdf') {
return _.pick(f, 'path', 'size', 'contentId', 'ranges')
}
return _.pick(f, 'path')
})
)
// Compile Cache
buildTarball({ projectId, userId, buildId, outputFiles })
.then(() => {
enqueue([{ path: 'output.tar.gz' }])
})
.catch(err => {
logger.warn(
{ err, projectId, userId, buildId },
'build output.tar.gz for clsi cache failed'
)
})
}
/**
* @param {string} projectId
* @param {string} userId
* @param {string} buildId
* @param {[{path: string}]} outputFiles
* @return {Promise<void>}
*/
async function buildTarball({ projectId, userId, buildId, outputFiles }) {
const timer = new Metrics.Timer('clsi_cache_build', 1, {}, TIMING_BUCKETS)
const outputDir = Path.join(
Settings.path.outputDir,
userId ? `${projectId}-${userId}` : projectId,
CACHE_SUBDIR,
buildId
)
const files = outputFiles.filter(f => !isExtraneousFile(f.path))
if (files.length > MAX_ENTRIES_IN_OUTPUT_TAR) {
Metrics.inc('clsi_cache_build_too_many_entries')
throw new Error('too many output files for output.tar.gz')
}
Metrics.count('clsi_cache_build_files', files.length)
const path = Path.join(outputDir, 'output.tar.gz')
try {
await pipeline(
tarFs.pack(outputDir, { entries: files.map(f => f.path) }),
createGzip(),
fs.createWriteStream(path)
)
} catch (err) {
try {
await fs.promises.unlink(path)
} catch (e) {}
throw err
} finally {
timer.done()
}
}
/**
* @param {string} projectId
* @param {string} userId
* @param {string} editorId
* @param {string} buildId
* @param {string} outputDir
* @return {Promise<boolean>}
*/
async function downloadOutputDotSynctexFromCompileCache(
projectId,
userId,
editorId,
buildId,
outputDir
) {
if (!Settings.apis.clsiCache.enabled) return false
const timer = new Metrics.Timer(
'clsi_cache_download',
1,
{ method: 'synctex' },
TIMING_BUCKETS
)
let stream
try {
stream = await fetchStream(
`${Settings.apis.clsiCache.url}/project/${projectId}/${
userId ? `user/${userId}/` : ''
}build/${editorId}-${buildId}/search/output/output.synctex.gz`,
{
method: 'GET',
signal: AbortSignal.timeout(10_000),
}
)
} catch (err) {
if (err instanceof RequestFailedError && err.response.status === 404) {
timer.done({ status: 'not-found' })
return false
}
timer.done({ status: 'error' })
throw err
}
await fs.promises.mkdir(outputDir, { recursive: true })
const dst = Path.join(outputDir, 'output.synctex.gz')
const tmp = dst + crypto.randomUUID()
try {
await pipeline(stream, fs.createWriteStream(tmp))
await fs.promises.rename(tmp, dst)
} catch (err) {
try {
await fs.promises.unlink(tmp)
} catch {}
throw err
}
timer.done({ status: 'success' })
return true
}
/**
* @param {string} projectId
* @param {string} userId
* @param {string} compileDir
* @return {Promise<boolean>}
*/
async function downloadLatestCompileCache(projectId, userId, compileDir) {
if (!Settings.apis.clsiCache.enabled) return false
const url = `${Settings.apis.clsiCache.url}/project/${projectId}/${
userId ? `user/${userId}/` : ''
}latest/output/output.tar.gz`
const timer = new Metrics.Timer(
'clsi_cache_download',
1,
{ method: 'tar' },
TIMING_BUCKETS
)
let stream
try {
stream = await fetchStream(url, {
method: 'GET',
signal: AbortSignal.timeout(10_000),
})
} catch (err) {
if (err instanceof RequestFailedError && err.response.status === 404) {
timer.done({ status: 'not-found' })
return false
}
timer.done({ status: 'error' })
throw err
}
let n = 0
let abort = false
await pipeline(
stream,
createGunzip(),
tarFs.extract(compileDir, {
// use ignore hook for counting entries (files+folders) and validation.
// Include folders as they incur mkdir calls.
ignore(_, header) {
if (abort) return true // log once
n++
if (n > MAX_ENTRIES_IN_OUTPUT_TAR) {
abort = true
logger.warn(
{
url,
compileDir,
},
'too many entries in tar-ball from clsi-cache'
)
} else if (header.type !== 'file' && header.type !== 'directory') {
abort = true
logger.warn(
{
url,
compileDir,
entryType: header.type,
},
'unexpected entry in tar-ball from clsi-cache'
)
}
return abort
},
})
)
Metrics.count('clsi_cache_download_entries', n)
timer.done({ status: 'success' })
return !abort
}
module.exports = {
notifyCLSICacheAboutBuild,
downloadLatestCompileCache,
downloadOutputDotSynctexFromCompileCache,
}

View file

@ -1,3 +1,4 @@
const Path = require('node:path')
const RequestParser = require('./RequestParser')
const CompileManager = require('./CompileManager')
const Settings = require('@overleaf/settings')
@ -5,6 +6,7 @@ const Metrics = require('./Metrics')
const ProjectPersistenceManager = require('./ProjectPersistenceManager')
const logger = require('@overleaf/logger')
const Errors = require('./Errors')
const { notifyCLSICacheAboutBuild } = require('./CLSICacheHandler')
let lastSuccessfulCompileTimestamp = 0
@ -29,100 +31,133 @@ function compile(req, res, next) {
if (error) {
return next(error)
}
CompileManager.doCompileWithLock(request, (error, result) => {
let { buildId, outputFiles, stats, timings } = result || {}
let code, status
if (outputFiles == null) {
outputFiles = []
}
if (error instanceof Errors.AlreadyCompilingError) {
code = 423 // Http 423 Locked
status = 'compile-in-progress'
} else if (error instanceof Errors.FilesOutOfSyncError) {
code = 409 // Http 409 Conflict
status = 'retry'
logger.warn(
{
const stats = {}
const timings = {}
CompileManager.doCompileWithLock(
request,
stats,
timings,
(error, result) => {
let { buildId, outputFiles } = result || {}
let code, status
if (outputFiles == null) {
outputFiles = []
}
if (error instanceof Errors.AlreadyCompilingError) {
code = 423 // Http 423 Locked
status = 'compile-in-progress'
} else if (error instanceof Errors.FilesOutOfSyncError) {
code = 409 // Http 409 Conflict
status = 'retry'
logger.warn(
{
projectId: request.project_id,
userId: request.user_id,
},
'files out of sync, please retry'
)
} else if (
error?.code === 'EPIPE' ||
error instanceof Errors.TooManyCompileRequestsError
) {
// docker returns EPIPE when shutting down
code = 503 // send 503 Unavailable response
status = 'unavailable'
} else if (error?.terminated) {
status = 'terminated'
} else if (error?.validate) {
status = `validation-${error.validate}`
} else if (error?.timedout) {
status = 'timedout'
logger.debug(
{ err: error, projectId: request.project_id },
'timeout running compile'
)
} else if (error) {
status = 'error'
code = 500
logger.error(
{ err: error, projectId: request.project_id },
'error running compile'
)
} else {
if (
outputFiles.some(
file => file.path === 'output.pdf' && file.size > 0
)
) {
status = 'success'
lastSuccessfulCompileTimestamp = Date.now()
} else if (request.stopOnFirstError) {
status = 'stopped-on-first-error'
} else {
status = 'failure'
logger.warn(
{ projectId: request.project_id, outputFiles },
'project failed to compile successfully, no output.pdf generated'
)
}
// log an error if any core files are found
if (outputFiles.some(file => file.path === 'core')) {
logger.error(
{ projectId: request.project_id, req, outputFiles },
'core file found in output'
)
}
}
if (error) {
outputFiles = error.outputFiles || []
buildId = error.buildId
}
if (
status === 'success' &&
request.editorId &&
request.populateClsiCache
) {
notifyCLSICacheAboutBuild({
projectId: request.project_id,
userId: request.user_id,
buildId: outputFiles[0].build,
editorId: request.editorId,
outputFiles,
compileGroup: request.compileGroup,
options: {
compiler: request.compiler,
draft: request.draft,
imageName: request.imageName
? Path.basename(request.imageName)
: undefined,
rootResourcePath: request.rootResourcePath,
stopOnFirstError: request.stopOnFirstError,
},
})
}
timer.done()
res.status(code || 200).send({
compile: {
status,
error: error?.message || error,
stats,
timings,
buildId,
outputUrlPrefix: Settings.apis.clsi.outputUrlPrefix,
outputFiles: outputFiles.map(file => ({
url:
`${Settings.apis.clsi.url}/project/${request.project_id}` +
(request.user_id != null
? `/user/${request.user_id}`
: '') +
`/build/${file.build}/output/${file.path}`,
...file,
})),
},
'files out of sync, please retry'
)
} else if (
error?.code === 'EPIPE' ||
error instanceof Errors.TooManyCompileRequestsError
) {
// docker returns EPIPE when shutting down
code = 503 // send 503 Unavailable response
status = 'unavailable'
} else if (error?.terminated) {
status = 'terminated'
} else if (error?.validate) {
status = `validation-${error.validate}`
} else if (error?.timedout) {
status = 'timedout'
logger.debug(
{ err: error, projectId: request.project_id },
'timeout running compile'
)
} else if (error) {
status = 'error'
code = 500
logger.error(
{ err: error, projectId: request.project_id },
'error running compile'
)
} else {
if (
outputFiles.some(
file => file.path === 'output.pdf' && file.size > 0
)
) {
status = 'success'
lastSuccessfulCompileTimestamp = Date.now()
} else if (request.stopOnFirstError) {
status = 'stopped-on-first-error'
} else {
status = 'failure'
logger.warn(
{ projectId: request.project_id, outputFiles },
'project failed to compile successfully, no output.pdf generated'
)
}
// log an error if any core files are found
if (outputFiles.some(file => file.path === 'core')) {
logger.error(
{ projectId: request.project_id, req, outputFiles },
'core file found in output'
)
}
})
}
if (error) {
outputFiles = error.outputFiles || []
buildId = error.buildId
}
timer.done()
res.status(code || 200).send({
compile: {
status,
error: error?.message || error,
stats,
timings,
buildId,
outputUrlPrefix: Settings.apis.clsi.outputUrlPrefix,
outputFiles: outputFiles.map(file => ({
url:
`${Settings.apis.clsi.url}/project/${request.project_id}` +
(request.user_id != null ? `/user/${request.user_id}` : '') +
`/build/${file.build}/output/${file.path}`,
...file,
})),
},
})
})
)
}
)
})
@ -153,24 +188,19 @@ function clearCache(req, res, next) {
}
function syncFromCode(req, res, next) {
const { file } = req.query
const { file, editorId, buildId, compileFromClsiCache } = req.query
const line = parseInt(req.query.line, 10)
const column = parseInt(req.query.column, 10)
const { imageName } = req.query
const projectId = req.params.project_id
const userId = req.params.user_id
if (imageName && !_isImageNameAllowed(imageName)) {
return res.status(400).send('invalid image')
}
CompileManager.syncFromCode(
projectId,
userId,
file,
line,
column,
imageName,
{ imageName, editorId, buildId, compileFromClsiCache },
function (error, pdfPositions) {
if (error) {
return next(error)
@ -186,20 +216,16 @@ function syncFromPdf(req, res, next) {
const page = parseInt(req.query.page, 10)
const h = parseFloat(req.query.h)
const v = parseFloat(req.query.v)
const { imageName } = req.query
const { imageName, editorId, buildId, compileFromClsiCache } = req.query
const projectId = req.params.project_id
const userId = req.params.user_id
if (imageName && !_isImageNameAllowed(imageName)) {
return res.status(400).send('invalid image')
}
CompileManager.syncFromPdf(
projectId,
userId,
page,
h,
v,
imageName,
{ imageName, editorId, buildId, compileFromClsiCache },
function (error, codePositions) {
if (error) {
return next(error)
@ -216,9 +242,6 @@ function wordcount(req, res, next) {
const projectId = req.params.project_id
const userId = req.params.user_id
const { image } = req.query
if (image && !_isImageNameAllowed(image)) {
return res.status(400).send('invalid image')
}
logger.debug({ image, file, projectId }, 'word count request')
CompileManager.wordcount(
@ -241,12 +264,6 @@ function status(req, res, next) {
res.send('OK')
}
function _isImageNameAllowed(imageName) {
const ALLOWED_IMAGES =
Settings.clsi && Settings.clsi.docker && Settings.clsi.docker.allowedImages
return !ALLOWED_IMAGES || ALLOWED_IMAGES.includes(imageName)
}
module.exports = {
compile,
stopCompile,

View file

@ -19,6 +19,10 @@ const Errors = require('./Errors')
const CommandRunner = require('./CommandRunner')
const { emitPdfStats } = require('./ContentCacheMetrics')
const SynctexOutputParser = require('./SynctexOutputParser')
const {
downloadLatestCompileCache,
downloadOutputDotSynctexFromCompileCache,
} = require('./CLSICacheHandler')
const COMPILE_TIME_BUCKETS = [
// NOTE: These buckets are locked in per metric name.
@ -42,22 +46,22 @@ function getOutputDir(projectId, userId) {
return Path.join(Settings.path.outputDir, getCompileName(projectId, userId))
}
async function doCompileWithLock(request) {
async function doCompileWithLock(request, stats, timings) {
const compileDir = getCompileDir(request.project_id, request.user_id)
await fsPromises.mkdir(compileDir, { recursive: true })
request.isInitialCompile =
(await fsPromises.mkdir(compileDir, { recursive: true })) === compileDir
// prevent simultaneous compiles
const lock = LockManager.acquire(compileDir)
try {
return await doCompile(request)
return await doCompile(request, stats, timings)
} finally {
lock.release()
}
}
async function doCompile(request) {
async function doCompile(request, stats, timings) {
const { project_id: projectId, user_id: userId } = request
const compileDir = getCompileDir(request.project_id, request.user_id)
const stats = {}
const timings = {}
const timerE2E = new Metrics.Timer(
'compile-e2e-v2',
@ -65,6 +69,25 @@ async function doCompile(request) {
request.metricsOpts,
COMPILE_TIME_BUCKETS
)
if (request.isInitialCompile) {
stats.isInitialCompile = 1
request.metricsOpts.compile = 'initial'
if (request.compileFromClsiCache) {
try {
if (await downloadLatestCompileCache(projectId, userId, compileDir)) {
stats.restoredClsiCache = 1
request.metricsOpts.compile = 'from-clsi-cache'
}
} catch (err) {
logger.warn(
{ err, projectId, userId },
'failed to populate compile dir from cache'
)
}
}
} else {
request.metricsOpts.compile = 'recompile'
}
const writeToDiskTimer = new Metrics.Timer(
'write-to-disk',
1,
@ -296,7 +319,7 @@ async function doCompile(request) {
emitPdfStats(stats, timings, request)
}
return { outputFiles, stats, timings, buildId }
return { outputFiles, buildId }
}
async function _saveOutputFiles({
@ -408,14 +431,7 @@ async function _checkDirectory(compileDir) {
return true
}
async function syncFromCode(
projectId,
userId,
filename,
line,
column,
imageName
) {
async function syncFromCode(projectId, userId, filename, line, column, opts) {
// If LaTeX was run in a virtual environment, the file path that synctex expects
// might not match the file path on the host. The .synctex.gz file however, will be accessed
// wherever it is on the host.
@ -431,7 +447,7 @@ async function syncFromCode(
'-o',
outputFilePath,
]
const stdout = await _runSynctex(projectId, userId, command, imageName)
const stdout = await _runSynctex(projectId, userId, command, opts)
logger.debug(
{ projectId, userId, filename, line, column, command, stdout },
'synctex code output'
@ -439,7 +455,7 @@ async function syncFromCode(
return SynctexOutputParser.parseViewOutput(stdout)
}
async function syncFromPdf(projectId, userId, page, h, v, imageName) {
async function syncFromPdf(projectId, userId, page, h, v, opts) {
const compileName = getCompileName(projectId, userId)
const baseDir = Settings.path.synctexBaseDir(compileName)
const outputFilePath = `${baseDir}/output.pdf`
@ -449,7 +465,7 @@ async function syncFromPdf(projectId, userId, page, h, v, imageName) {
'-o',
`${page}:${h}:${v}:${outputFilePath}`,
]
const stdout = await _runSynctex(projectId, userId, command, imageName)
const stdout = await _runSynctex(projectId, userId, command, opts)
logger.debug({ projectId, userId, page, h, v, stdout }, 'synctex pdf output')
return SynctexOutputParser.parseEditOutput(stdout, baseDir)
}
@ -478,32 +494,85 @@ async function _checkFileExists(dir, filename) {
}
}
async function _runSynctex(projectId, userId, command, imageName) {
const directory = getCompileDir(projectId, userId)
async function _runSynctex(projectId, userId, command, opts) {
const { imageName, editorId, buildId, compileFromClsiCache } = opts
if (imageName && !_isImageNameAllowed(imageName)) {
throw new Errors.InvalidParameter('invalid image')
}
if (editorId && !/^[a-f0-9-]+$/.test(editorId)) {
throw new Errors.InvalidParameter('invalid editorId')
}
if (buildId && !OutputCacheManager.BUILD_REGEX.test(buildId)) {
throw new Errors.InvalidParameter('invalid buildId')
}
const outputDir = getOutputDir(projectId, userId)
const runInOutputDir = buildId && CommandRunner.canRunSyncTeXInOutputDir()
const directory = runInOutputDir
? Path.join(outputDir, OutputCacheManager.CACHE_SUBDIR, buildId)
: getCompileDir(projectId, userId)
const timeout = 60 * 1000 // increased to allow for large projects
const compileName = getCompileName(projectId, userId)
const compileGroup = 'synctex'
const compileGroup = runInOutputDir ? 'synctex-output' : 'synctex'
const defaultImageName =
Settings.clsi && Settings.clsi.docker && Settings.clsi.docker.image
await _checkFileExists(directory, 'output.synctex.gz')
try {
const output = await CommandRunner.promises.run(
compileName,
command,
directory,
imageName || defaultImageName,
timeout,
{},
compileGroup
)
return output.stdout
} catch (error) {
throw OError.tag(error, 'error running synctex', {
command,
projectId,
userId,
})
}
// eslint-disable-next-line @typescript-eslint/return-await
return await OutputCacheManager.promises.queueDirOperation(
outputDir,
/**
* @return {Promise<string>}
*/
async () => {
try {
await _checkFileExists(directory, 'output.synctex.gz')
} catch (err) {
if (
err instanceof Errors.NotFoundError &&
compileFromClsiCache &&
editorId &&
buildId
) {
try {
await downloadOutputDotSynctexFromCompileCache(
projectId,
userId,
editorId,
buildId,
directory
)
} catch (err) {
logger.warn(
{ err, projectId, userId, editorId, buildId },
'failed to download output.synctex.gz from clsi-cache'
)
}
await _checkFileExists(directory, 'output.synctex.gz')
} else {
throw err
}
}
try {
const output = await CommandRunner.promises.run(
compileName,
command,
directory,
imageName || defaultImageName,
timeout,
{},
compileGroup
)
return output.stdout
} catch (error) {
throw OError.tag(error, 'error running synctex', {
command,
projectId,
userId,
})
}
}
)
}
async function wordcount(projectId, userId, filename, image) {
@ -515,6 +584,10 @@ async function wordcount(projectId, userId, filename, image) {
const compileName = getCompileName(projectId, userId)
const compileGroup = 'wordcount'
if (image && !_isImageNameAllowed(image)) {
throw new Errors.InvalidParameter('invalid image')
}
try {
await fsPromises.mkdir(compileDir, { recursive: true })
} catch (err) {
@ -602,6 +675,12 @@ function _parseWordcountFromOutput(output) {
return results
}
function _isImageNameAllowed(imageName) {
const ALLOWED_IMAGES =
Settings.clsi && Settings.clsi.docker && Settings.clsi.docker.allowedImages
return !ALLOWED_IMAGES || ALLOWED_IMAGES.includes(imageName)
}
module.exports = {
doCompileWithLock: callbackify(doCompileWithLock),
stopCompile: callbackify(stopCompile),

View file

@ -6,21 +6,12 @@ const dockerode = new Docker()
const crypto = require('node:crypto')
const async = require('async')
const LockManager = require('./DockerLockManager')
const fs = require('node:fs')
const Path = require('node:path')
const _ = require('lodash')
const ONE_HOUR_IN_MS = 60 * 60 * 1000
logger.debug('using docker runner')
function usingSiblingContainers() {
return (
Settings != null &&
Settings.path != null &&
Settings.path.sandboxedCompilesHostDir != null
)
}
let containerMonitorTimeout
let containerMonitorInterval
@ -35,24 +26,6 @@ const DockerRunner = {
compileGroup,
callback
) {
if (usingSiblingContainers()) {
const _newPath = Settings.path.sandboxedCompilesHostDir
logger.debug(
{ path: _newPath },
'altering bind path for sibling containers'
)
// Server Pro, example:
// '/var/lib/overleaf/data/compiles/<project-id>'
// ... becomes ...
// '/opt/overleaf_data/data/compiles/<project-id>'
directory = Path.join(
Settings.path.sandboxedCompilesHostDir,
Path.basename(directory)
)
}
const volumes = { [directory]: '/compile' }
command = command.map(arg =>
arg.toString().replace('$COMPILE_DIR', '/compile')
)
@ -72,7 +45,32 @@ const DockerRunner = {
image = `${Settings.texliveImageNameOveride}/${img[2]}`
}
if (compileGroup === 'synctex' || compileGroup === 'wordcount') {
if (compileGroup === 'synctex-output') {
// In: directory = '/overleaf/services/clsi/output/projectId-userId/generated-files/buildId'
// directory.split('/').slice(-3) === 'projectId-userId/generated-files/buildId'
// sandboxedCompilesHostDirOutput = '/host/output'
// Out: directory = '/host/output/projectId-userId/generated-files/buildId'
directory = Path.join(
Settings.path.sandboxedCompilesHostDirOutput,
...directory.split('/').slice(-3)
)
} else {
// In: directory = '/overleaf/services/clsi/compiles/projectId-userId'
// Path.basename(directory) === 'projectId-userId'
// sandboxedCompilesHostDirCompiles = '/host/compiles'
// Out: directory = '/host/compiles/projectId-userId'
directory = Path.join(
Settings.path.sandboxedCompilesHostDirCompiles,
Path.basename(directory)
)
}
const volumes = { [directory]: '/compile' }
if (
compileGroup === 'synctex' ||
compileGroup === 'synctex-output' ||
compileGroup === 'wordcount'
) {
volumes[directory] += ':ro'
}
@ -309,50 +307,17 @@ const DockerRunner = {
LockManager.runWithLock(
options.name,
releaseLock =>
// Check that volumes exist before starting the container.
// When a container is started with volume pointing to a
// non-existent directory then docker creates the directory but
// with root ownership.
DockerRunner._checkVolumes(options, volumes, err => {
if (err != null) {
return releaseLock(err)
}
DockerRunner._startContainer(
options,
volumes,
attachStreamHandler,
releaseLock
)
}),
DockerRunner._startContainer(
options,
volumes,
attachStreamHandler,
releaseLock
),
callback
)
},
// Check that volumes exist and are directories
_checkVolumes(options, volumes, callback) {
if (usingSiblingContainers()) {
// Server Pro, with sibling-containers active, skip checks
return callback(null)
}
const checkVolume = (path, cb) =>
fs.stat(path, (err, stats) => {
if (err != null) {
return cb(err)
}
if (!stats.isDirectory()) {
return cb(new Error('not a directory'))
}
cb()
})
const jobs = []
for (const vol in volumes) {
jobs.push(cb => checkVolume(vol, cb))
}
async.series(jobs, callback)
},
_startContainer(options, volumes, attachStreamHandler, callback) {
callback = _.once(callback)
const { name } = options
@ -617,6 +582,10 @@ const DockerRunner = {
containerMonitorInterval = undefined
}
},
canRunSyncTeXInOutputDir() {
return Boolean(Settings.path.sandboxedCompilesHostDirOutput)
},
}
DockerRunner.startContainerMonitor()

View file

@ -35,6 +35,7 @@ class QueueLimitReachedError extends OError {}
class TimedOutError extends OError {}
class NoXrefTableError extends OError {}
class TooManyCompileRequestsError extends OError {}
class InvalidParameter extends OError {}
module.exports = Errors = {
QueueLimitReachedError,
@ -44,4 +45,5 @@ module.exports = Errors = {
AlreadyCompilingError,
NoXrefTableError,
TooManyCompileRequestsError,
InvalidParameter,
}

View file

@ -99,6 +99,10 @@ module.exports = CommandRunner = {
}
return callback()
},
canRunSyncTeXInOutputDir() {
return true
},
}
module.exports.promises = {

View file

@ -83,6 +83,13 @@ async function cleanupDirectory(dir, options) {
})
}
/**
* @template T
*
* @param {string} dir
* @param {() => Promise<T>} fn
* @return {Promise<T>}
*/
async function queueDirOperation(dir, fn) {
const pending = PENDING_PROJECT_ACTIONS.get(dir) || Promise.resolve()
const p = pending.then(fn, fn).finally(() => {
@ -677,4 +684,5 @@ OutputCacheManager.promises = {
saveOutputFilesInBuildDir: promisify(
OutputCacheManager.saveOutputFilesInBuildDir
),
queueDirOperation,
}

View file

@ -93,8 +93,11 @@ module.exports = {
)
return outputFiles.filter(
// Ignore the pdf and also ignore the files ignored by the frontend.
({ path }) => path !== 'output.pdf' && !ignoreFiles.includes(path)
// Ignore the pdf, clsi-cache tar-ball and also ignore the files ignored by the frontend.
({ path }) =>
path !== 'output.pdf' &&
path !== 'output.tar.gz' &&
!ignoreFiles.includes(path)
)
} catch (error) {
if (

View file

@ -15,7 +15,6 @@ const logger = require('@overleaf/logger')
const oneDay = 24 * 60 * 60 * 1000
const Metrics = require('@overleaf/metrics')
const Settings = require('@overleaf/settings')
const diskusage = require('diskusage')
const { callbackify } = require('node:util')
const Path = require('node:path')
const fs = require('node:fs')
@ -33,7 +32,13 @@ async function collectDiskStats() {
const diskStats = {}
for (const path of paths) {
try {
const stats = await diskusage.check(path)
const { blocks, bavail, bsize } = await fs.promises.statfs(path)
const stats = {
// Warning: these values will be wrong by a factor in Docker-for-Mac.
// See https://github.com/docker/for-mac/issues/2136
total: blocks * bsize, // Total size of the file system in bytes
available: bavail * bsize, // Free space available to unprivileged users.
}
const diskAvailablePercent = (stats.available / stats.total) * 100
Metrics.gauge('disk_available_percent', diskAvailablePercent, 1, {
path,

View file

@ -3,6 +3,7 @@ const OutputCacheManager = require('./OutputCacheManager')
const VALID_COMPILERS = ['pdflatex', 'latex', 'xelatex', 'lualatex']
const MAX_TIMEOUT = 600
const EDITOR_ID_REGEX = /^[a-f0-9-]{36}$/ // UUID
function parse(body, callback) {
const response = {}
@ -28,12 +29,24 @@ function parse(body, callback) {
default: '',
type: 'string',
}),
// Will be populated later. Must always be populated for prom library.
compile: 'initial',
}
response.compiler = _parseAttribute('compiler', compile.options.compiler, {
validValues: VALID_COMPILERS,
default: 'pdflatex',
type: 'string',
})
response.compileFromClsiCache = _parseAttribute(
'compileFromClsiCache',
compile.options.compileFromClsiCache,
{ default: false, type: 'boolean' }
)
response.populateClsiCache = _parseAttribute(
'populateClsiCache',
compile.options.populateClsiCache,
{ default: false, type: 'boolean' }
)
response.enablePdfCaching = _parseAttribute(
'enablePdfCaching',
compile.options.enablePdfCaching,
@ -137,6 +150,10 @@ function parse(body, callback) {
)
response.rootResourcePath = _checkPath(rootResourcePath)
response.editorId = _parseAttribute('editorId', compile.options.editorId, {
type: 'string',
regex: EDITOR_ID_REGEX,
})
response.buildId = _parseAttribute('buildId', compile.options.buildId, {
type: 'string',
regex: OutputCacheManager.BUILD_REGEX,

View file

@ -262,6 +262,7 @@ module.exports = ResourceWriter = {
shouldDelete = false
}
if (
path === 'output.tar.gz' ||
path === 'output.synctex.gz' ||
path === 'output.pdfxref' ||
path === 'output.pdf' ||

View file

@ -2,10 +2,10 @@ clsi
--data-dirs=cache,compiles,output
--dependencies=
--docker-repos=gcr.io/overleaf-ops,us-east1-docker.pkg.dev/overleaf-ops/ol-docker
--env-add=ENABLE_PDF_CACHING="true",PDF_CACHING_ENABLE_WORKER_POOL="true",ALLOWED_IMAGES=quay.io/sharelatex/texlive-full:2017.1,TEXLIVE_IMAGE=quay.io/sharelatex/texlive-full:2017.1,TEX_LIVE_IMAGE_NAME_OVERRIDE=gcr.io/overleaf-ops,TEXLIVE_IMAGE_USER="tex",DOCKER_RUNNER="true",COMPILES_HOST_DIR=$PWD/compiles
--env-add=ENABLE_PDF_CACHING="true",PDF_CACHING_ENABLE_WORKER_POOL="true",ALLOWED_IMAGES=quay.io/sharelatex/texlive-full:2017.1,TEXLIVE_IMAGE=quay.io/sharelatex/texlive-full:2017.1,TEX_LIVE_IMAGE_NAME_OVERRIDE=gcr.io/overleaf-ops,TEXLIVE_IMAGE_USER="tex",DOCKER_RUNNER="true",COMPILES_HOST_DIR=$PWD/compiles,OUTPUT_HOST_DIR=$PWD/output
--env-pass-through=
--esmock-loader=False
--node-version=20.18.2
--public-repo=True
--script-version=4.5.0
--script-version=4.7.0
--use-large-ci-runner=True

View file

@ -1,10 +1,12 @@
const Path = require('node:path')
const http = require('node:http')
const https = require('node:https')
const os = require('node:os')
http.globalAgent.keepAlive = false
https.globalAgent.keepAlive = false
const isPreEmptible = process.env.PREEMPTIBLE === 'TRUE'
const CLSI_SERVER_ID = os.hostname().replace('-ctr', '')
module.exports = {
compileSizeLimit: process.env.COMPILE_SIZE_LIMIT || '7mb',
@ -48,12 +50,20 @@ module.exports = {
url: `http://${process.env.CLSI_HOST || '127.0.0.1'}:3013`,
// External url prefix for output files, e.g. for requests via load-balancers.
outputUrlPrefix: `${process.env.ZONE ? `/zone/${process.env.ZONE}` : ''}`,
clsiServerId: process.env.CLSI_SERVER_ID || CLSI_SERVER_ID,
downloadHost: process.env.DOWNLOAD_HOST || 'http://localhost:3013',
},
clsiPerf: {
host: `${process.env.CLSI_PERF_HOST || '127.0.0.1'}:${
process.env.CLSI_PERF_PORT || '3043'
}`,
},
clsiCache: {
enabled: !!process.env.CLSI_CACHE_HOST,
url: `http://${process.env.CLSI_CACHE_HOST}:3044`,
downloadURL: `http://${process.env.CLSI_CACHE_NGINX_HOST || process.env.CLSI_CACHE_HOST}:8080`,
},
},
smokeTest: process.env.SMOKE_TEST || false,
@ -88,14 +98,15 @@ if (process.env.ALLOWED_COMPILE_GROUPS) {
}
}
if (process.env.DOCKER_RUNNER) {
let seccompProfilePath
if ((process.env.DOCKER_RUNNER || process.env.SANDBOXED_COMPILES) === 'true') {
module.exports.clsi = {
dockerRunner: process.env.DOCKER_RUNNER === 'true',
dockerRunner: true,
docker: {
runtime: process.env.DOCKER_RUNTIME,
image:
process.env.TEXLIVE_IMAGE || 'quay.io/sharelatex/texlive-full:2017.1',
process.env.TEXLIVE_IMAGE ||
process.env.TEX_LIVE_DOCKER_IMAGE ||
'quay.io/sharelatex/texlive-full:2017.1',
env: {
HOME: '/tmp',
CLSI: 1,
@ -121,6 +132,7 @@ if (process.env.DOCKER_RUNNER) {
const defaultCompileGroupConfig = {
wordcount: { 'HostConfig.AutoRemove': true },
synctex: { 'HostConfig.AutoRemove': true },
'synctex-output': { 'HostConfig.AutoRemove': true },
}
module.exports.clsi.docker.compileGroupConfig = Object.assign(
defaultCompileGroupConfig,
@ -131,6 +143,7 @@ if (process.env.DOCKER_RUNNER) {
process.exit(1)
}
let seccompProfilePath
try {
seccompProfilePath = Path.resolve(__dirname, '../seccomp/clsi-profile.json')
module.exports.clsi.docker.seccomp_profile = JSON.stringify(
@ -165,5 +178,23 @@ if (process.env.DOCKER_RUNNER) {
module.exports.path.synctexBaseDir = () => '/compile'
module.exports.path.sandboxedCompilesHostDir = process.env.COMPILES_HOST_DIR
module.exports.path.sandboxedCompilesHostDirCompiles =
process.env.SANDBOXED_COMPILES_HOST_DIR_COMPILES ||
process.env.SANDBOXED_COMPILES_HOST_DIR ||
process.env.COMPILES_HOST_DIR
if (!module.exports.path.sandboxedCompilesHostDirCompiles) {
throw new Error(
'SANDBOXED_COMPILES enabled, but SANDBOXED_COMPILES_HOST_DIR_COMPILES not set'
)
}
module.exports.path.sandboxedCompilesHostDirOutput =
process.env.SANDBOXED_COMPILES_HOST_DIR_OUTPUT ||
process.env.OUTPUT_HOST_DIR
if (!module.exports.path.sandboxedCompilesHostDirOutput) {
// TODO(das7pad): Enforce in a future major version of Server Pro.
// throw new Error(
// 'SANDBOXED_COMPILES enabled, but SANDBOXED_COMPILES_HOST_DIR_OUTPUT not set'
// )
}
}

View file

@ -31,6 +31,7 @@ services:
TEXLIVE_IMAGE_USER: "tex"
DOCKER_RUNNER: "true"
COMPILES_HOST_DIR: $PWD/compiles
OUTPUT_HOST_DIR: $PWD/output
volumes:
- ./compiles:/overleaf/services/clsi/compiles
- /var/run/docker.sock:/var/run/docker.sock

View file

@ -49,5 +49,6 @@ services:
TEXLIVE_IMAGE_USER: "tex"
DOCKER_RUNNER: "true"
COMPILES_HOST_DIR: $PWD/compiles
OUTPUT_HOST_DIR: $PWD/output
command: npm run --silent test:acceptance

View file

@ -27,13 +27,13 @@
"async": "^3.2.5",
"body-parser": "^1.20.3",
"bunyan": "^1.8.15",
"diskusage": "^1.1.3",
"dockerode": "^4.0.5",
"express": "^4.21.2",
"lodash": "^4.17.21",
"p-limit": "^3.1.0",
"request": "^2.88.2",
"send": "^0.19.0",
"tar-fs": "^3.0.4",
"workerpool": "^6.1.5"
},
"devDependencies": {

View file

@ -11,6 +11,7 @@
const Client = require('./helpers/Client')
const request = require('request')
const ClsiApp = require('./helpers/ClsiApp')
const { expect } = require('chai')
describe('Broken LaTeX file', function () {
before(function (done) {
@ -62,6 +63,10 @@ Hello world
return this.body.compile.status.should.equal('failure')
})
it('should return isInitialCompile flag', function () {
expect(this.body.compile.stats.isInitialCompile).to.equal(1)
})
it('should return output files', function () {
// NOTE: No output.pdf file.
this.body.compile.outputFiles
@ -98,6 +103,10 @@ Hello world
return this.body.compile.status.should.equal('failure')
})
it('should not return isInitialCompile flag', function () {
expect(this.body.compile.stats.isInitialCompile).to.not.exist
})
it('should return output files', function () {
// NOTE: No output.pdf file.
this.body.compile.outputFiles

View file

@ -11,6 +11,7 @@
const Client = require('./helpers/Client')
const request = require('request')
const ClsiApp = require('./helpers/ClsiApp')
const { expect } = require('chai')
describe('Timed out compile', function () {
before(function (done) {
@ -54,6 +55,10 @@ describe('Timed out compile', function () {
return this.body.compile.status.should.equal('timedout')
})
it('should return isInitialCompile flag', function () {
expect(this.body.compile.stats.isInitialCompile).to.equal(1)
})
return it('should return the log output file name', function () {
const outputFilePaths = this.body.compile.outputFiles.map(x => x.path)
return outputFilePaths.should.include('output.log')

View file

@ -20,7 +20,7 @@ SandboxedModule.configure({
err() {},
},
},
globals: { Buffer, console, process, URL },
globals: { Buffer, console, process, URL, Math },
sourceTransformers: {
removeNodePrefix: function (source) {
return source.replace(/require\(['"]node:/g, "require('")

View file

@ -1,54 +1,11 @@
const SandboxedModule = require('sandboxed-module')
const sinon = require('sinon')
const { expect } = require('chai')
const modulePath = require('node:path').join(
__dirname,
'../../../app/js/CompileController'
)
const Errors = require('../../../app/js/Errors')
function tryImageNameValidation(method, imageNameField) {
describe('when allowedImages is set', function () {
beforeEach(function () {
this.Settings.clsi = { docker: {} }
this.Settings.clsi.docker.allowedImages = [
'repo/image:tag1',
'repo/image:tag2',
]
this.res.send = sinon.stub()
this.res.status = sinon.stub().returns({ send: this.res.send })
this.CompileManager[method].reset()
})
describe('with an invalid image', function () {
beforeEach(function () {
this.req.query[imageNameField] = 'something/evil:1337'
this.CompileController[method](this.req, this.res, this.next)
})
it('should return a 400', function () {
expect(this.res.status.calledWith(400)).to.equal(true)
})
it('should not run the query', function () {
expect(this.CompileManager[method].called).to.equal(false)
})
})
describe('with a valid image', function () {
beforeEach(function () {
this.req.query[imageNameField] = 'repo/image:tag1'
this.CompileController[method](this.req, this.res, this.next)
})
it('should not return a 400', function () {
expect(this.res.status.calledWith(400)).to.equal(false)
})
it('should run the query', function () {
expect(this.CompileManager[method].called).to.equal(true)
})
})
})
}
describe('CompileController', function () {
beforeEach(function () {
this.buildId = 'build-id-123'
@ -61,6 +18,11 @@ describe('CompileController', function () {
clsi: {
url: 'http://clsi.example.com',
outputUrlPrefix: '/zone/b',
downloadHost: 'http://localhost:3013',
},
clsiCache: {
enabled: false,
url: 'http://localhost:3044',
},
},
}),
@ -68,6 +30,11 @@ describe('CompileController', function () {
Timer: sinon.stub().returns({ done: sinon.stub() }),
},
'./ProjectPersistenceManager': (this.ProjectPersistenceManager = {}),
'./CLSICacheHandler': {
notifyCLSICacheAboutBuild: sinon.stub(),
downloadLatestCompileCache: sinon.stub().resolves(),
downloadOutputDotSynctexFromCompileCache: sinon.stub().resolves(),
},
'./Errors': (this.Erros = Errors),
},
})
@ -113,16 +80,21 @@ describe('CompileController', function () {
this.timings = { bar: 2 }
this.res.status = sinon.stub().returnsThis()
this.res.send = sinon.stub()
this.CompileManager.doCompileWithLock = sinon
.stub()
.callsFake((_req, stats, timings, cb) => {
Object.assign(stats, this.stats)
Object.assign(timings, this.timings)
cb(null, {
outputFiles: this.output_files,
buildId: this.buildId,
})
})
})
describe('successfully', function () {
beforeEach(function () {
this.CompileManager.doCompileWithLock = sinon.stub().yields(null, {
outputFiles: this.output_files,
stats: this.stats,
timings: this.timings,
buildId: this.buildId,
})
this.CompileController.compile(this.req, this.res)
})
@ -166,12 +138,6 @@ describe('CompileController', function () {
describe('without a outputUrlPrefix', function () {
beforeEach(function () {
this.Settings.apis.clsi.outputUrlPrefix = ''
this.CompileManager.doCompileWithLock = sinon.stub().yields(null, {
outputFiles: this.output_files,
stats: this.stats,
timings: this.timings,
buildId: this.buildId,
})
this.CompileController.compile(this.req, this.res)
})
@ -210,33 +176,35 @@ describe('CompileController', function () {
build: 1234,
},
]
this.CompileManager.doCompileWithLock = sinon.stub().yields(null, {
outputFiles: this.output_files,
stats: this.stats,
timings: this.timings,
buildId: this.buildId,
})
this.CompileManager.doCompileWithLock = sinon
.stub()
.callsFake((_req, stats, timings, cb) => {
Object.assign(stats, this.stats)
Object.assign(timings, this.timings)
cb(null, {
outputFiles: this.output_files,
buildId: this.buildId,
})
})
this.CompileController.compile(this.req, this.res)
})
it('should return the JSON response with status failure', function () {
this.res.status.calledWith(200).should.equal(true)
this.res.send
.calledWith({
compile: {
status: 'failure',
error: null,
stats: this.stats,
timings: this.timings,
outputUrlPrefix: '/zone/b',
buildId: this.buildId,
outputFiles: this.output_files.map(file => ({
url: `${this.Settings.apis.clsi.url}/project/${this.project_id}/build/${file.build}/output/${file.path}`,
...file,
})),
},
})
.should.equal(true)
this.res.send.should.have.been.calledWith({
compile: {
status: 'failure',
error: null,
stats: this.stats,
timings: this.timings,
outputUrlPrefix: '/zone/b',
buildId: this.buildId,
outputFiles: this.output_files.map(file => ({
url: `${this.Settings.apis.clsi.url}/project/${this.project_id}/build/${file.build}/output/${file.path}`,
...file,
})),
},
})
})
})
@ -255,33 +223,35 @@ describe('CompileController', function () {
build: 1234,
},
]
this.CompileManager.doCompileWithLock = sinon.stub().yields(null, {
outputFiles: this.output_files,
stats: this.stats,
timings: this.timings,
buildId: this.buildId,
})
this.CompileManager.doCompileWithLock = sinon
.stub()
.callsFake((_req, stats, timings, cb) => {
Object.assign(stats, this.stats)
Object.assign(timings, this.timings)
cb(null, {
outputFiles: this.output_files,
buildId: this.buildId,
})
})
this.CompileController.compile(this.req, this.res)
})
it('should return the JSON response with status failure', function () {
this.res.status.calledWith(200).should.equal(true)
this.res.send
.calledWith({
compile: {
status: 'failure',
error: null,
stats: this.stats,
buildId: this.buildId,
timings: this.timings,
outputUrlPrefix: '/zone/b',
outputFiles: this.output_files.map(file => ({
url: `${this.Settings.apis.clsi.url}/project/${this.project_id}/build/${file.build}/output/${file.path}`,
...file,
})),
},
})
.should.equal(true)
this.res.send.should.have.been.calledWith({
compile: {
status: 'failure',
error: null,
stats: this.stats,
buildId: this.buildId,
timings: this.timings,
outputUrlPrefix: '/zone/b',
outputFiles: this.output_files.map(file => ({
url: `${this.Settings.apis.clsi.url}/project/${this.project_id}/build/${file.build}/output/${file.path}`,
...file,
})),
},
})
})
})
@ -291,7 +261,11 @@ describe('CompileController', function () {
error.buildId = this.buildId
this.CompileManager.doCompileWithLock = sinon
.stub()
.callsArgWith(1, error, null)
.callsFake((_req, stats, timings, cb) => {
Object.assign(stats, this.stats)
Object.assign(timings, this.timings)
cb(error)
})
this.CompileController.compile(this.req, this.res)
})
@ -305,9 +279,8 @@ describe('CompileController', function () {
outputUrlPrefix: '/zone/b',
outputFiles: [],
buildId: this.buildId,
// JSON.stringify will omit these
stats: undefined,
timings: undefined,
stats: this.stats,
timings: this.timings,
},
})
.should.equal(true)
@ -321,7 +294,11 @@ describe('CompileController', function () {
)
this.CompileManager.doCompileWithLock = sinon
.stub()
.callsArgWith(1, error, null)
.callsFake((_req, stats, timings, cb) => {
Object.assign(stats, this.stats)
Object.assign(timings, this.timings)
cb(error)
})
this.CompileController.compile(this.req, this.res)
})
@ -334,9 +311,10 @@ describe('CompileController', function () {
error: 'too many concurrent compile requests',
outputUrlPrefix: '/zone/b',
outputFiles: [],
stats: this.stats,
timings: this.timings,
// JSON.stringify will omit these undefined values
buildId: undefined,
stats: undefined,
timings: undefined,
},
})
.should.equal(true)
@ -349,7 +327,11 @@ describe('CompileController', function () {
this.error.timedout = true
this.CompileManager.doCompileWithLock = sinon
.stub()
.callsArgWith(1, this.error, null)
.callsFake((_req, stats, timings, cb) => {
Object.assign(stats, this.stats)
Object.assign(timings, this.timings)
cb(this.error)
})
this.CompileController.compile(this.req, this.res)
})
@ -362,10 +344,10 @@ describe('CompileController', function () {
error: this.message,
outputUrlPrefix: '/zone/b',
outputFiles: [],
// JSON.stringify will omit these
stats: this.stats,
timings: this.timings,
// JSON.stringify will omit these undefined values
buildId: undefined,
stats: undefined,
timings: undefined,
},
})
.should.equal(true)
@ -376,7 +358,11 @@ describe('CompileController', function () {
beforeEach(function () {
this.CompileManager.doCompileWithLock = sinon
.stub()
.callsArgWith(1, null, [])
.callsFake((_req, stats, timings, cb) => {
Object.assign(stats, this.stats)
Object.assign(timings, this.timings)
cb(null, {})
})
this.CompileController.compile(this.req, this.res)
})
@ -389,10 +375,10 @@ describe('CompileController', function () {
status: 'failure',
outputUrlPrefix: '/zone/b',
outputFiles: [],
// JSON.stringify will omit these
stats: this.stats,
timings: this.timings,
// JSON.stringify will omit these undefined values
buildId: undefined,
stats: undefined,
timings: undefined,
},
})
.should.equal(true)
@ -439,8 +425,6 @@ describe('CompileController', function () {
})
.should.equal(true)
})
tryImageNameValidation('syncFromCode', 'imageName')
})
describe('syncFromPdf', function () {
@ -476,8 +460,6 @@ describe('CompileController', function () {
})
.should.equal(true)
})
tryImageNameValidation('syncFromPdf', 'imageName')
})
describe('wordcount', function () {
@ -511,7 +493,5 @@ describe('CompileController', function () {
})
.should.equal(true)
})
tryImageNameValidation('wordcount', 'image')
})
})

View file

@ -62,6 +62,7 @@ describe('CompileManager', function () {
}
this.OutputCacheManager = {
promises: {
queueDirOperation: sinon.stub().callsArg(1),
saveOutputFiles: sinon
.stub()
.resolves({ outputFiles: this.buildFiles, buildId: this.buildId }),
@ -160,6 +161,11 @@ describe('CompileManager', function () {
'./LockManager': this.LockManager,
'./SynctexOutputParser': this.SynctexOutputParser,
'fs/promises': this.fsPromises,
'./CLSICacheHandler': {
notifyCLSICacheAboutBuild: sinon.stub(),
downloadLatestCompileCache: sinon.stub().resolves(),
downloadOutputDotSynctexFromCompileCache: sinon.stub().resolves(),
},
},
})
})
@ -177,6 +183,11 @@ describe('CompileManager', function () {
flags: (this.flags = ['-file-line-error']),
compileGroup: (this.compileGroup = 'compile-group'),
stopOnFirstError: false,
metricsOpts: {
path: 'clsi-perf',
method: 'minimal',
compile: 'initial',
},
}
this.env = {
OVERLEAF_PROJECT_ID: this.projectId,
@ -188,7 +199,7 @@ describe('CompileManager', function () {
const error = new Error('locked')
this.LockManager.acquire.throws(error)
await expect(
this.CompileManager.promises.doCompileWithLock(this.request)
this.CompileManager.promises.doCompileWithLock(this.request, {}, {})
).to.be.rejectedWith(error)
})
@ -206,7 +217,9 @@ describe('CompileManager', function () {
describe('normally', function () {
beforeEach(async function () {
this.result = await this.CompileManager.promises.doCompileWithLock(
this.request
this.request,
{},
{}
)
})
@ -260,7 +273,11 @@ describe('CompileManager', function () {
describe('with draft mode', function () {
beforeEach(async function () {
this.request.draft = true
await this.CompileManager.promises.doCompileWithLock(this.request)
await this.CompileManager.promises.doCompileWithLock(
this.request,
{},
{}
)
})
it('should inject the draft mode header', function () {
@ -273,7 +290,11 @@ describe('CompileManager', function () {
describe('with a check option', function () {
beforeEach(async function () {
this.request.check = 'error'
await this.CompileManager.promises.doCompileWithLock(this.request)
await this.CompileManager.promises.doCompileWithLock(
this.request,
{},
{}
)
})
it('should run chktex', function () {
@ -305,7 +326,11 @@ describe('CompileManager', function () {
beforeEach(async function () {
this.request.rootResourcePath = 'main.Rtex'
this.request.check = 'error'
await this.CompileManager.promises.doCompileWithLock(this.request)
await this.CompileManager.promises.doCompileWithLock(
this.request,
{},
{}
)
})
it('should not run chktex', function () {
@ -334,7 +359,7 @@ describe('CompileManager', function () {
error.timedout = true
this.LatexRunner.promises.runLatex.rejects(error)
await expect(
this.CompileManager.promises.doCompileWithLock(this.request)
this.CompileManager.promises.doCompileWithLock(this.request, {}, {})
).to.be.rejected
})
@ -357,7 +382,7 @@ describe('CompileManager', function () {
error.terminated = true
this.LatexRunner.promises.runLatex.rejects(error)
await expect(
this.CompileManager.promises.doCompileWithLock(this.request)
this.CompileManager.promises.doCompileWithLock(this.request, {}, {})
).to.be.rejected
})
@ -455,7 +480,7 @@ describe('CompileManager', function () {
this.filename,
this.line,
this.column,
customImageName
{ imageName: customImageName }
)
})
@ -497,7 +522,7 @@ describe('CompileManager', function () {
this.page,
this.h,
this.v,
''
{ imageName: '' }
)
})
@ -532,7 +557,7 @@ describe('CompileManager', function () {
this.page,
this.h,
this.v,
customImageName
{ imageName: customImageName }
)
})

View file

@ -76,8 +76,11 @@ describe('DockerRunner', function () {
this.env = {}
this.callback = sinon.stub()
this.project_id = 'project-id-123'
this.volumes = { '/local/compile/directory': '/compile' }
this.volumes = { '/some/host/dir/compiles/directory': '/compile' }
this.Settings.clsi.docker.image = this.defaultImage = 'default-image'
this.Settings.path.sandboxedCompilesHostDirCompiles =
'/some/host/dir/compiles'
this.Settings.path.sandboxedCompilesHostDirOutput = '/some/host/dir/output'
this.compileGroup = 'compile-group'
return (this.Settings.clsi.docker.env = { PATH: 'mock-path' })
})
@ -151,9 +154,8 @@ describe('DockerRunner', function () {
})
})
describe('when path.sandboxedCompilesHostDir is set', function () {
describe('standard compile', function () {
beforeEach(function () {
this.Settings.path.sandboxedCompilesHostDir = '/some/host/dir/compiles'
this.directory = '/var/lib/overleaf/data/compiles/xyz'
this.DockerRunner._runAndWaitForContainer = sinon
.stub()
@ -183,6 +185,99 @@ describe('DockerRunner', function () {
})
})
describe('synctex-output', function () {
beforeEach(function () {
this.directory = '/var/lib/overleaf/data/output/xyz/generated-files/id'
this.DockerRunner._runAndWaitForContainer = sinon
.stub()
.callsArgWith(3, null, (this.output = 'mock-output'))
this.DockerRunner.run(
this.project_id,
this.command,
this.directory,
this.image,
this.timeout,
this.env,
'synctex-output',
this.callback
)
})
it('should re-write the bind directory and set ro flag', function () {
const volumes =
this.DockerRunner._runAndWaitForContainer.lastCall.args[1]
expect(volumes).to.deep.equal({
'/some/host/dir/output/xyz/generated-files/id': '/compile:ro',
})
})
it('should call the callback', function () {
this.callback.calledWith(null, this.output).should.equal(true)
})
})
describe('synctex', function () {
beforeEach(function () {
this.directory = '/var/lib/overleaf/data/compile/xyz'
this.DockerRunner._runAndWaitForContainer = sinon
.stub()
.callsArgWith(3, null, (this.output = 'mock-output'))
this.DockerRunner.run(
this.project_id,
this.command,
this.directory,
this.image,
this.timeout,
this.env,
'synctex',
this.callback
)
})
it('should re-write the bind directory', function () {
const volumes =
this.DockerRunner._runAndWaitForContainer.lastCall.args[1]
expect(volumes).to.deep.equal({
'/some/host/dir/compiles/xyz': '/compile:ro',
})
})
it('should call the callback', function () {
this.callback.calledWith(null, this.output).should.equal(true)
})
})
describe('wordcount', function () {
beforeEach(function () {
this.directory = '/var/lib/overleaf/data/compile/xyz'
this.DockerRunner._runAndWaitForContainer = sinon
.stub()
.callsArgWith(3, null, (this.output = 'mock-output'))
this.DockerRunner.run(
this.project_id,
this.command,
this.directory,
this.image,
this.timeout,
this.env,
'wordcount',
this.callback
)
})
it('should re-write the bind directory', function () {
const volumes =
this.DockerRunner._runAndWaitForContainer.lastCall.args[1]
expect(volumes).to.deep.equal({
'/some/host/dir/compiles/xyz': '/compile:ro',
})
})
it('should call the callback', function () {
this.callback.calledWith(null, this.output).should.equal(true)
})
})
describe('when the run throws an error', function () {
beforeEach(function () {
let firstTime = true
@ -390,7 +485,7 @@ describe('DockerRunner', function () {
const options =
this.DockerRunner._runAndWaitForContainer.lastCall.args[0]
return expect(options.HostConfig).to.deep.include({
Binds: ['/local/compile/directory:/compile:rw'],
Binds: ['/some/host/dir/compiles/directory:/compile:rw'],
LogConfig: { Type: 'none', Config: {} },
CapDrop: 'ALL',
SecurityOpt: ['no-new-privileges'],
@ -562,82 +657,6 @@ describe('DockerRunner', function () {
})
})
describe('when a volume does not exist', function () {
beforeEach(function () {
this.fs.stat = sinon.stub().yields(new Error('no such path'))
return this.DockerRunner.startContainer(
this.options,
this.volumes,
this.attachStreamHandler,
this.callback
)
})
it('should not try to create the container', function () {
return this.createContainer.called.should.equal(false)
})
it('should call the callback with an error', function () {
this.callback.calledWith(sinon.match(Error)).should.equal(true)
})
})
describe('when a volume exists but is not a directory', function () {
beforeEach(function () {
this.fs.stat = sinon.stub().yields(null, {
isDirectory() {
return false
},
})
return this.DockerRunner.startContainer(
this.options,
this.volumes,
this.attachStreamHandler,
this.callback
)
})
it('should not try to create the container', function () {
return this.createContainer.called.should.equal(false)
})
it('should call the callback with an error', function () {
this.callback.calledWith(sinon.match(Error)).should.equal(true)
})
})
describe('when a volume does not exist, but sibling-containers are used', function () {
beforeEach(function () {
this.fs.stat = sinon.stub().yields(new Error('no such path'))
this.Settings.path.sandboxedCompilesHostDir = '/some/path'
this.container.start = sinon.stub().yields()
return this.DockerRunner.startContainer(
this.options,
this.volumes,
() => {},
this.callback
)
})
afterEach(function () {
return delete this.Settings.path.sandboxedCompilesHostDir
})
it('should start the container with the given name', function () {
this.getContainer.calledWith(this.options.name).should.equal(true)
return this.container.start.called.should.equal(true)
})
it('should not try to create the container', function () {
return this.createContainer.called.should.equal(false)
})
return it('should call the callback', function () {
this.callback.called.should.equal(true)
return this.callback.calledWith(new Error()).should.equal(false)
})
})
return describe('when the container tries to be created, but already has been (race condition)', function () {})
})

View file

@ -21,12 +21,16 @@ const tk = require('timekeeper')
describe('ProjectPersistenceManager', function () {
beforeEach(function () {
this.fsPromises = {
statfs: sinon.stub(),
}
this.ProjectPersistenceManager = SandboxedModule.require(modulePath, {
requires: {
'@overleaf/metrics': (this.Metrics = { gauge: sinon.stub() }),
'./UrlCache': (this.UrlCache = {}),
'./CompileManager': (this.CompileManager = {}),
diskusage: (this.diskusage = { check: sinon.stub() }),
fs: { promises: this.fsPromises },
'@overleaf/settings': (this.settings = {
project_cache_length_ms: 1000,
path: {
@ -44,9 +48,10 @@ describe('ProjectPersistenceManager', function () {
describe('refreshExpiryTimeout', function () {
it('should leave expiry alone if plenty of disk', function (done) {
this.diskusage.check.resolves({
available: 40,
total: 100,
this.fsPromises.statfs.resolves({
blocks: 100,
bsize: 1,
bavail: 40,
})
this.ProjectPersistenceManager.refreshExpiryTimeout(() => {
@ -62,9 +67,10 @@ describe('ProjectPersistenceManager', function () {
})
it('should drop EXPIRY_TIMEOUT 10% if low disk usage', function (done) {
this.diskusage.check.resolves({
available: 5,
total: 100,
this.fsPromises.statfs.resolves({
blocks: 100,
bsize: 1,
bavail: 5,
})
this.ProjectPersistenceManager.refreshExpiryTimeout(() => {
@ -78,9 +84,10 @@ describe('ProjectPersistenceManager', function () {
})
it('should not drop EXPIRY_TIMEOUT to below 50% of project_cache_length_ms', function (done) {
this.diskusage.check.resolves({
available: 5,
total: 100,
this.fsPromises.statfs.resolves({
blocks: 100,
bsize: 1,
bavail: 5,
})
this.ProjectPersistenceManager.EXPIRY_TIMEOUT = 500
this.ProjectPersistenceManager.refreshExpiryTimeout(() => {
@ -94,7 +101,7 @@ describe('ProjectPersistenceManager', function () {
})
it('should not modify EXPIRY_TIMEOUT if there is an error getting disk values', function (done) {
this.diskusage.check.throws(new Error())
this.fsPromises.statfs.rejects(new Error())
this.ProjectPersistenceManager.refreshExpiryTimeout(() => {
this.ProjectPersistenceManager.EXPIRY_TIMEOUT.should.equal(1000)
done()

View file

@ -6,4 +6,4 @@ contacts
--esmock-loader=True
--node-version=20.18.2
--public-repo=False
--script-version=4.5.0
--script-version=4.7.0

View file

@ -6,4 +6,4 @@ docstore
--esmock-loader=False
--node-version=20.18.2
--public-repo=True
--script-version=4.5.0
--script-version=4.7.0

View file

@ -160,14 +160,6 @@ const DocumentManager = {
alreadyLoaded,
} = await DocumentManager.getDoc(projectId, docId)
if (oldLines != null && oldLines.length > 0 && oldLines[0].text != null) {
logger.debug(
{ docId, projectId, oldLines, newLines },
'document is JSON so not updating'
)
return
}
logger.debug(
{ docId, projectId, oldLines, newLines },
'setting a document via http'

View file

@ -6,4 +6,4 @@ document-updater
--esmock-loader=False
--node-version=20.18.2
--public-repo=True
--script-version=4.5.0
--script-version=4.7.0

View file

@ -21,6 +21,7 @@ services:
ELASTIC_SEARCH_DSN: es:9200
REDIS_HOST: redis
QUEUES_REDIS_HOST: redis
HISTORY_REDIS_HOST: redis
ANALYTICS_QUEUES_REDIS_HOST: redis
MONGO_HOST: mongo
POSTGRES_HOST: postgres

View file

@ -30,6 +30,7 @@ services:
environment:
ELASTIC_SEARCH_DSN: es:9200
REDIS_HOST: redis
HISTORY_REDIS_HOST: redis
QUEUES_REDIS_HOST: redis
ANALYTICS_QUEUES_REDIS_HOST: redis
MONGO_HOST: mongo

View file

@ -0,0 +1,211 @@
// @ts-check
const Settings = require('@overleaf/settings')
const logger = require('@overleaf/logger')
const RedisManager = require('../app/js/RedisManager')
const minimist = require('minimist')
const { db, ObjectId } = require('../app/js/mongodb')
const ProjectManager = require('../app/js/ProjectManager')
const OError = require('@overleaf/o-error')
const docUpdaterKeys = Settings.redis.documentupdater.key_schema
const rclient = RedisManager.rclient
const { verbose, commit, ...args } = minimist(process.argv.slice(2), {
boolean: ['verbose', 'commit'],
string: ['batchSize'],
default: {
batchSize: '1000',
},
})
logger.logger.level(verbose ? 'debug' : 'warn')
const batchSize = parseInt(args.batchSize, 10)
/**
* @typedef {import('ioredis').Redis} Redis
*/
/**
*
* @param {string} key
* @return {string|void}
*/
function extractDocId(key) {
const matches = key.match(/ProjectHistoryId:\{(.*?)\}/)
if (matches) {
return matches[1]
}
}
/**
*
* @param {string} docId
* @return {Promise<{projectId: string, historyId: string}>}
*/
async function getHistoryId(docId) {
const doc = await db.docs.findOne(
{ _id: new ObjectId(docId) },
{ projection: { project_id: 1 }, readPreference: 'secondaryPreferred' }
)
if (!doc) {
throw new OError('Doc not present in mongo', { docId })
}
const project = await db.projects.findOne(
{ _id: doc.project_id },
{
projection: { 'overleaf.history': 1 },
readPreference: 'secondaryPreferred',
}
)
if (!project?.overleaf?.history?.id) {
throw new OError('Project not present in mongo (or has no history id)', {
docId,
project,
doc,
})
}
return {
historyId: project?.overleaf?.history?.id,
projectId: doc.project_id.toString(),
}
}
/**
* @typedef {Object} UpdateableDoc
* @property {string} docId
* @property {string} projectId
* @property {string} historyId
*/
/**
*
* @param {Redis} node
* @param {Array<string>} docIds
* @return {Promise<Array<UpdateableDoc>>}
*/
async function findDocsWithMissingHistoryIds(node, docIds) {
const historyIds = await node.mget(
docIds.map(docId => docUpdaterKeys.projectHistoryId({ doc_id: docId }))
)
const results = []
for (const index in docIds) {
const historyId = historyIds[index]
const docId = docIds[index]
if (!historyId) {
try {
const { projectId, historyId } = await getHistoryId(docId)
results.push({ projectId, historyId, docId })
} catch (error) {
logger.warn(
{ error },
'Error gathering data for doc with missing history id'
)
}
}
}
return results
}
/**
*
* @param {Array<UpdateableDoc>} updates
* @return {Promise<void>}
*/
async function fixAndFlushProjects(updates) {
for (const update of updates) {
if (commit) {
try {
await rclient.set(
docUpdaterKeys.projectHistoryId({ doc_id: update.docId }),
update.historyId
)
logger.debug({ ...update }, 'Set history id in redis')
await ProjectManager.promises.flushAndDeleteProjectWithLocks(
update.projectId,
{}
)
logger.debug({ ...update }, 'Flushed project')
} catch (err) {
logger.error({ err, ...update }, 'Error fixing and flushing project')
}
} else {
logger.debug(
{ ...update },
'Would have set history id in redis and flushed'
)
}
}
}
/**
*
* @param {Array<Redis>} nodes
* @param {number} batchSize
* @return {Promise<void>}
*/
async function scanNodes(nodes, batchSize = 1000) {
let scanned = 0
for (const node of nodes) {
const stream = node.scanStream({
match: docUpdaterKeys.projectHistoryId({ doc_id: '*' }),
count: batchSize,
})
for await (const docKeys of stream) {
if (docKeys.length === 0) {
continue
}
stream.pause()
scanned += docKeys.length
const docIds = docKeys
.map((/** @type {string} */ docKey) => extractDocId(docKey))
.filter(Boolean)
try {
const updates = await findDocsWithMissingHistoryIds(node, docIds)
if (updates.length > 0) {
logger.info({ updates }, 'Found doc(s) with missing history ids')
await fixAndFlushProjects(updates)
}
} catch (error) {
logger.error({ docKeys }, 'Error processing batch')
} finally {
stream.resume()
}
}
logger.info({ scanned, server: node.serverInfo.role }, 'Scanned node')
}
}
async function main({ batchSize }) {
const nodes = (typeof rclient.nodes === 'function'
? rclient.nodes('master')
: undefined) || [rclient]
await scanNodes(nodes, batchSize)
}
let code = 0
main({ batchSize })
.then(() => {
logger.info({}, 'done')
})
.catch(error => {
logger.error({ error }, 'error')
code = 1
})
.finally(() => {
rclient.quit().then(() => process.exit(code))
})

View file

@ -16,13 +16,16 @@ const DocUpdaterClient = require('./helpers/DocUpdaterClient')
const DocUpdaterApp = require('./helpers/DocUpdaterApp')
describe('Applying updates to a doc', function () {
before(function (done) {
beforeEach(function (done) {
sinon.spy(MockWebApi, 'getDocument')
this.lines = ['one', 'two', 'three']
this.version = 42
this.op = {
i: 'one and a half\n',
p: 4,
}
this.project_id = DocUpdaterClient.randomId()
this.doc_id = DocUpdaterClient.randomId()
this.update = {
doc: this.doc_id,
op: [this.op],
@ -31,12 +34,12 @@ describe('Applying updates to a doc', function () {
this.result = ['one', 'one and a half', 'two', 'three']
DocUpdaterApp.ensureRunning(done)
})
afterEach(function () {
sinon.restore()
})
describe('when the document is not loaded', function () {
before(function (done) {
this.project_id = DocUpdaterClient.randomId()
this.doc_id = DocUpdaterClient.randomId()
sinon.spy(MockWebApi, 'getDocument')
beforeEach(function (done) {
this.startTime = Date.now()
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines: this.lines,
@ -50,15 +53,25 @@ describe('Applying updates to a doc', function () {
if (error != null) {
throw error
}
setTimeout(done, 200)
setTimeout(() => {
rclientProjectHistory.get(
ProjectHistoryKeys.projectHistoryFirstOpTimestamp({
project_id: this.project_id,
}),
(error, result) => {
if (error != null) {
throw error
}
result = parseInt(result, 10)
this.firstOpTimestamp = result
done()
}
)
}, 200)
}
)
})
after(function () {
MockWebApi.getDocument.restore()
})
it('should load the document from the web API', function () {
MockWebApi.getDocument
.calledWith(this.project_id, this.doc_id)
@ -92,21 +105,8 @@ describe('Applying updates to a doc', function () {
)
})
it('should set the first op timestamp', function (done) {
rclientProjectHistory.get(
ProjectHistoryKeys.projectHistoryFirstOpTimestamp({
project_id: this.project_id,
}),
(error, result) => {
if (error != null) {
throw error
}
result = parseInt(result, 10)
result.should.be.within(this.startTime, Date.now())
this.firstOpTimestamp = result
done()
}
)
it('should set the first op timestamp', function () {
this.firstOpTimestamp.should.be.within(this.startTime, Date.now())
})
it('should yield last updated time', function (done) {
@ -138,7 +138,7 @@ describe('Applying updates to a doc', function () {
})
describe('when sending another update', function () {
before(function (done) {
beforeEach(function (done) {
this.timeout(10000)
this.second_update = Object.assign({}, this.update)
this.second_update.v = this.version + 1
@ -207,13 +207,85 @@ describe('Applying updates to a doc', function () {
)
})
})
describe('when another client is sending a concurrent update', function () {
beforeEach(function (done) {
this.timeout(10000)
this.otherUpdate = {
doc: this.doc_id,
op: [{ p: 8, i: 'two and a half\n' }],
v: this.version,
meta: { source: 'other-random-publicId' },
}
this.secondStartTime = Date.now()
DocUpdaterClient.sendUpdate(
this.project_id,
this.doc_id,
this.otherUpdate,
error => {
if (error != null) {
throw error
}
setTimeout(done, 200)
}
)
})
it('should update the doc', function (done) {
DocUpdaterClient.getDoc(
this.project_id,
this.doc_id,
(error, res, doc) => {
if (error) done(error)
doc.lines.should.deep.equal([
'one',
'one and a half',
'two',
'two and a half',
'three',
])
done()
}
)
})
it('should not change the first op timestamp', function (done) {
rclientProjectHistory.get(
ProjectHistoryKeys.projectHistoryFirstOpTimestamp({
project_id: this.project_id,
}),
(error, result) => {
if (error != null) {
throw error
}
result = parseInt(result, 10)
result.should.equal(this.firstOpTimestamp)
done()
}
)
})
it('should yield last updated time', function (done) {
DocUpdaterClient.getProjectLastUpdatedAt(
this.project_id,
(error, res, body) => {
if (error != null) {
throw error
}
res.statusCode.should.equal(200)
body.lastUpdatedAt.should.be.within(
this.secondStartTime,
Date.now()
)
done()
}
)
})
})
})
describe('when the document is loaded', function () {
before(function (done) {
this.project_id = DocUpdaterClient.randomId()
this.doc_id = DocUpdaterClient.randomId()
beforeEach(function (done) {
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines: this.lines,
version: this.version,
@ -222,7 +294,7 @@ describe('Applying updates to a doc', function () {
if (error != null) {
throw error
}
sinon.spy(MockWebApi, 'getDocument')
sinon.resetHistory()
DocUpdaterClient.sendUpdate(
this.project_id,
this.doc_id,
@ -237,10 +309,6 @@ describe('Applying updates to a doc', function () {
})
})
after(function () {
MockWebApi.getDocument.restore()
})
it('should not need to call the web api', function () {
MockWebApi.getDocument.called.should.equal(false)
})
@ -272,10 +340,7 @@ describe('Applying updates to a doc', function () {
})
describe('when the document is loaded and is using project-history only', function () {
before(function (done) {
this.project_id = DocUpdaterClient.randomId()
this.doc_id = DocUpdaterClient.randomId()
beforeEach(function (done) {
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines: this.lines,
version: this.version,
@ -284,7 +349,7 @@ describe('Applying updates to a doc', function () {
if (error != null) {
throw error
}
sinon.spy(MockWebApi, 'getDocument')
sinon.resetHistory()
DocUpdaterClient.sendUpdate(
this.project_id,
this.doc_id,
@ -299,10 +364,6 @@ describe('Applying updates to a doc', function () {
})
})
after(function () {
MockWebApi.getDocument.restore()
})
it('should update the doc', function (done) {
DocUpdaterClient.getDoc(
this.project_id,
@ -331,9 +392,7 @@ describe('Applying updates to a doc', function () {
describe('when the document has been deleted', function () {
describe('when the ops come in a single linear order', function () {
before(function (done) {
this.project_id = DocUpdaterClient.randomId()
this.doc_id = DocUpdaterClient.randomId()
beforeEach(function (done) {
const lines = ['', '', '']
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines,
@ -353,54 +412,49 @@ describe('Applying updates to a doc', function () {
{ doc_id: this.doc_id, v: 10, op: [{ i: 'd', p: 10 }] },
]
this.my_result = ['hello world', '', '']
done()
})
it('should be able to continue applying updates when the project has been deleted', function (done) {
let update
const actions = []
for (update of this.updates.slice(0, 6)) {
;(update => {
actions.push(callback =>
DocUpdaterClient.sendUpdate(
this.project_id,
this.doc_id,
update,
callback
)
for (const update of this.updates.slice(0, 6)) {
actions.push(callback =>
DocUpdaterClient.sendUpdate(
this.project_id,
this.doc_id,
update,
callback
)
})(update)
)
}
actions.push(callback =>
DocUpdaterClient.deleteDoc(this.project_id, this.doc_id, callback)
)
for (update of this.updates.slice(6)) {
;(update => {
actions.push(callback =>
DocUpdaterClient.sendUpdate(
this.project_id,
this.doc_id,
update,
callback
)
for (const update of this.updates.slice(6)) {
actions.push(callback =>
DocUpdaterClient.sendUpdate(
this.project_id,
this.doc_id,
update,
callback
)
})(update)
)
}
async.series(actions, error => {
if (error != null) {
throw error
// process updates
actions.push(cb =>
DocUpdaterClient.getDoc(this.project_id, this.doc_id, cb)
)
async.series(actions, done)
})
it('should be able to continue applying updates when the project has been deleted', function (done) {
DocUpdaterClient.getDoc(
this.project_id,
this.doc_id,
(error, res, doc) => {
if (error) return done(error)
doc.lines.should.deep.equal(this.my_result)
done()
}
DocUpdaterClient.getDoc(
this.project_id,
this.doc_id,
(error, res, doc) => {
if (error) return done(error)
doc.lines.should.deep.equal(this.my_result)
done()
}
)
})
)
})
it('should store the doc ops in the correct order', function (done) {
@ -422,9 +476,7 @@ describe('Applying updates to a doc', function () {
})
describe('when older ops come in after the delete', function () {
before(function (done) {
this.project_id = DocUpdaterClient.randomId()
this.doc_id = DocUpdaterClient.randomId()
beforeEach(function (done) {
const lines = ['', '', '']
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines,
@ -492,11 +544,9 @@ describe('Applying updates to a doc', function () {
})
describe('with a broken update', function () {
before(function (done) {
this.project_id = DocUpdaterClient.randomId()
this.doc_id = DocUpdaterClient.randomId()
beforeEach(function (done) {
this.broken_update = {
doc_id: this.doc_id,
doc: this.doc_id,
v: this.version,
op: [{ d: 'not the correct content', p: 0 }],
}
@ -547,9 +597,7 @@ describe('Applying updates to a doc', function () {
})
describe('when there is no version in Mongo', function () {
before(function (done) {
this.project_id = DocUpdaterClient.randomId()
this.doc_id = DocUpdaterClient.randomId()
beforeEach(function (done) {
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines: this.lines,
})
@ -586,9 +634,7 @@ describe('Applying updates to a doc', function () {
})
describe('when the sending duplicate ops', function () {
before(function (done) {
this.project_id = DocUpdaterClient.randomId()
this.doc_id = DocUpdaterClient.randomId()
beforeEach(function (done) {
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines: this.lines,
version: this.version,
@ -671,11 +717,9 @@ describe('Applying updates to a doc', function () {
})
describe('when sending updates for a non-existing doc id', function () {
before(function (done) {
this.project_id = DocUpdaterClient.randomId()
this.doc_id = DocUpdaterClient.randomId()
beforeEach(function (done) {
this.non_existing = {
doc_id: this.doc_id,
doc: this.doc_id,
v: this.version,
op: [{ d: 'content', p: 0 }],
}

View file

@ -50,64 +50,68 @@ app.use((req, res, next) => {
Metrics.injectMetricsRoute(app)
app.head(
'/project/:project_id/file/:file_id',
keyBuilder.userFileKeyMiddleware,
fileController.getFileHead
)
app.get(
'/project/:project_id/file/:file_id',
keyBuilder.userFileKeyMiddleware,
fileController.getFile
)
app.post(
'/project/:project_id/file/:file_id',
keyBuilder.userFileKeyMiddleware,
fileController.insertFile
)
app.put(
'/project/:project_id/file/:file_id',
keyBuilder.userFileKeyMiddleware,
bodyParser.json(),
fileController.copyFile
)
app.delete(
'/project/:project_id/file/:file_id',
keyBuilder.userFileKeyMiddleware,
fileController.deleteFile
)
app.delete(
'/project/:project_id',
keyBuilder.userProjectKeyMiddleware,
fileController.deleteProject
)
if (settings.filestore.stores.user_files) {
app.head(
'/project/:project_id/file/:file_id',
keyBuilder.userFileKeyMiddleware,
fileController.getFileHead
)
app.get(
'/project/:project_id/file/:file_id',
keyBuilder.userFileKeyMiddleware,
fileController.getFile
)
app.post(
'/project/:project_id/file/:file_id',
keyBuilder.userFileKeyMiddleware,
fileController.insertFile
)
app.put(
'/project/:project_id/file/:file_id',
keyBuilder.userFileKeyMiddleware,
bodyParser.json(),
fileController.copyFile
)
app.delete(
'/project/:project_id/file/:file_id',
keyBuilder.userFileKeyMiddleware,
fileController.deleteFile
)
app.delete(
'/project/:project_id',
keyBuilder.userProjectKeyMiddleware,
fileController.deleteProject
)
app.get(
'/project/:project_id/size',
keyBuilder.userProjectKeyMiddleware,
fileController.directorySize
)
app.get(
'/project/:project_id/size',
keyBuilder.userProjectKeyMiddleware,
fileController.directorySize
)
}
app.head(
'/template/:template_id/v/:version/:format',
keyBuilder.templateFileKeyMiddleware,
fileController.getFileHead
)
app.get(
'/template/:template_id/v/:version/:format',
keyBuilder.templateFileKeyMiddleware,
fileController.getFile
)
app.get(
'/template/:template_id/v/:version/:format/:sub_type',
keyBuilder.templateFileKeyMiddleware,
fileController.getFile
)
app.post(
'/template/:template_id/v/:version/:format',
keyBuilder.templateFileKeyMiddleware,
fileController.insertFile
)
if (settings.filestore.stores.template_files) {
app.head(
'/template/:template_id/v/:version/:format',
keyBuilder.templateFileKeyMiddleware,
fileController.getFileHead
)
app.get(
'/template/:template_id/v/:version/:format',
keyBuilder.templateFileKeyMiddleware,
fileController.getFile
)
app.get(
'/template/:template_id/v/:version/:format/:sub_type',
keyBuilder.templateFileKeyMiddleware,
fileController.getFile
)
app.post(
'/template/:template_id/v/:version/:format',
keyBuilder.templateFileKeyMiddleware,
fileController.insertFile
)
}
app.get(
'/bucket/:bucket/key/*',

View file

@ -7,6 +7,6 @@ filestore
--esmock-loader=False
--node-version=20.18.2
--public-repo=True
--script-version=4.5.0
--script-version=4.7.0
--test-acceptance-shards=SHARD_01_,SHARD_02_,SHARD_03_
--use-large-ci-runner=True

View file

@ -138,6 +138,45 @@ async function getHistoryBefore(req, res, next) {
}
}
/**
* Get all changes since the beginning of history or since a given version
*/
async function getChanges(req, res, next) {
const projectId = req.swagger.params.project_id.value
const since = req.swagger.params.since.value ?? 0
if (since < 0) {
// Negative values would cause an infinite loop
return res.status(400).json({
error: `Version out of bounds: ${since}`,
})
}
const changes = []
let chunk = await chunkStore.loadLatest(projectId)
if (since > chunk.getEndVersion()) {
return res.status(400).json({
error: `Version out of bounds: ${since}`,
})
}
// Fetch all chunks that come after the chunk that contains the start version
while (chunk.getStartVersion() > since) {
const changesInChunk = chunk.getChanges()
changes.unshift(...changesInChunk)
chunk = await chunkStore.loadAtVersion(projectId, chunk.getStartVersion())
}
// Extract the relevant changes from the chunk that contains the start version
const changesInChunk = chunk
.getChanges()
.slice(since - chunk.getStartVersion())
changes.unshift(...changesInChunk)
res.json(changes.map(change => change.toRaw()))
}
async function getZip(req, res, next) {
const projectId = req.swagger.params.project_id.value
const version = req.swagger.params.version.value
@ -337,6 +376,7 @@ module.exports = {
getLatestHistoryRaw: expressify(getLatestHistoryRaw),
getHistory: expressify(getHistory),
getHistoryBefore: expressify(getHistoryBefore),
getChanges: expressify(getChanges),
getZip: expressify(getZip),
createZip: expressify(createZip),
deleteProject: expressify(deleteProject),

View file

@ -100,9 +100,48 @@ const importChanges = {
],
}
const getChanges = {
'x-swagger-router-controller': 'projects',
operationId: 'getChanges',
tags: ['Project'],
description: 'Get changes applied to a project',
parameters: [
{
name: 'project_id',
in: 'path',
description: 'project id',
required: true,
type: 'string',
},
{
name: 'since',
in: 'query',
description: 'start version',
required: false,
type: 'number',
},
],
responses: {
200: {
description: 'Success',
schema: {
type: 'array',
items: {
$ref: '#/definitions/Change',
},
},
},
},
security: [
{
basic: [],
},
],
}
exports.paths = {
'/projects/{project_id}/import': { post: importSnapshot },
'/projects/{project_id}/legacy_import': { post: importSnapshot },
'/projects/{project_id}/changes': { post: importChanges },
'/projects/{project_id}/changes': { get: getChanges, post: importChanges },
'/projects/{project_id}/legacy_changes': { post: importChanges },
}

View file

@ -90,15 +90,16 @@ process.on('SIGINT', () => {
/**
* @param {number} port
* @param {boolean} enableVerificationLoop
* @return {Promise<http.Server>}
*/
export async function startApp(port) {
export async function startApp(port, enableVerificationLoop = true) {
await mongodb.client.connect()
await loadGlobalBlobs()
await healthCheck()
const server = http.createServer(app)
await promisify(server.listen.bind(server, port))()
loopRandomProjects(shutdownEmitter)
enableVerificationLoop && loopRandomProjects(shutdownEmitter)
return server
}

View file

@ -6,5 +6,5 @@ history-v1
--esmock-loader=False
--node-version=20.18.2
--public-repo=False
--script-version=4.5.0
--script-version=4.7.0
--tsconfig-extra-includes=backup-deletion-app.mjs,backup-verifier-app.mjs,backup-worker-app.mjs,api/**/*,migrations/**/*,storage/**/*

View file

@ -89,6 +89,16 @@
"host": "QUEUES_REDIS_HOST",
"password": "QUEUES_REDIS_PASSWORD",
"port": "QUEUES_REDIS_PORT"
},
"history": {
"host": "HISTORY_REDIS_HOST",
"password": "HISTORY_REDIS_PASSWORD",
"port": "HISTORY_REDIS_PORT"
},
"lock": {
"host": "REDIS_HOST",
"password": "REDIS_PASSWORD",
"port": "REDIS_PORT"
}
}
}

View file

@ -21,6 +21,7 @@ services:
ELASTIC_SEARCH_DSN: es:9200
REDIS_HOST: redis
QUEUES_REDIS_HOST: redis
HISTORY_REDIS_HOST: redis
ANALYTICS_QUEUES_REDIS_HOST: redis
MONGO_HOST: mongo
POSTGRES_HOST: postgres

View file

@ -37,6 +37,7 @@ services:
environment:
ELASTIC_SEARCH_DSN: es:9200
REDIS_HOST: redis
HISTORY_REDIS_HOST: redis
QUEUES_REDIS_HOST: redis
ANALYTICS_QUEUES_REDIS_HOST: redis
MONGO_HOST: mongo

View file

@ -0,0 +1,27 @@
// @ts-check
/**
* @import { Knex } from "knex"
*/
/**
* @param { Knex } knex
* @returns { Promise<void> }
*/
exports.up = async function (knex) {
await knex.raw(`
ALTER TABLE chunks
ADD COLUMN closed BOOLEAN NOT NULL DEFAULT FALSE
`)
}
/**
* @param { Knex } knex
* @returns { Promise<void> }
*/
exports.down = async function (knex) {
await knex.raw(`
ALTER TABLE chunks
DROP COLUMN closed
`)
}

View file

@ -1,10 +1,12 @@
exports.BatchBlobStore = require('./lib/batch_blob_store')
exports.blobHash = require('./lib/blob_hash')
exports.HashCheckBlobStore = require('./lib/hash_check_blob_store')
exports.chunkBuffer = require('./lib/chunk_buffer')
exports.chunkStore = require('./lib/chunk_store')
exports.historyStore = require('./lib/history_store').historyStore
exports.knex = require('./lib/knex')
exports.mongodb = require('./lib/mongodb')
exports.redis = require('./lib/redis')
exports.persistChanges = require('./lib/persist_changes')
exports.persistor = require('./lib/persistor')
exports.ProjectArchive = require('./lib/project_archive')
@ -18,3 +20,6 @@ exports.loadGlobalBlobs = loadGlobalBlobs
const { InvalidChangeError } = require('./lib/errors')
exports.InvalidChangeError = InvalidChangeError
const { ChunkVersionConflictError } = require('./lib/chunk_store/errors')
exports.ChunkVersionConflictError = ChunkVersionConflictError

View file

@ -1,5 +1,7 @@
'use strict'
const OError = require('@overleaf/o-error')
const check = require('check-types')
const { Blob } = require('overleaf-editor-core')
@ -7,41 +9,58 @@ const assert = check.assert
const MONGO_ID_REGEXP = /^[0-9a-f]{24}$/
const POSTGRES_ID_REGEXP = /^[1-9][0-9]{0,9}$/
const PROJECT_ID_REGEXP = /^([0-9a-f]{24}|[1-9][0-9]{0,9})$/
const MONGO_OR_POSTGRES_ID_REGEXP = /^([0-9a-f]{24}|[1-9][0-9]{0,9})$/
function transaction(transaction, message) {
assert.function(transaction, message)
}
function blobHash(arg, message) {
assert.match(arg, Blob.HEX_HASH_RX, message)
try {
assert.match(arg, Blob.HEX_HASH_RX, message)
} catch (error) {
throw OError.tag(error, message, { arg })
}
}
/**
* A project id is a string that contains either an integer (for projects stored in Postgres) or 24
* hex digits (for projects stored in Mongo)
*/
function projectId(arg, message) {
try {
assert.match(arg, MONGO_OR_POSTGRES_ID_REGEXP, message)
} catch (error) {
throw OError.tag(error, message, { arg })
}
}
/**
* A chunk id is a string that contains either an integer (for projects stored in Postgres) or 24
* hex digits (for projects stored in Mongo)
*/
function projectId(arg, message) {
assert.match(arg, PROJECT_ID_REGEXP, message)
}
/**
* A chunk id is either a number (for projects stored in Postgres) or a 24
* character string (for projects stored in Mongo)
*/
function chunkId(arg, message) {
const valid = check.integer(arg) || check.match(arg, MONGO_ID_REGEXP)
if (!valid) {
throw new TypeError(message)
try {
assert.match(arg, MONGO_OR_POSTGRES_ID_REGEXP, message)
} catch (error) {
throw OError.tag(error, message, { arg })
}
}
function mongoId(arg, message) {
assert.match(arg, MONGO_ID_REGEXP)
try {
assert.match(arg, MONGO_ID_REGEXP, message)
} catch (error) {
throw OError.tag(error, message, { arg })
}
}
function postgresId(arg, message) {
assert.match(arg, POSTGRES_ID_REGEXP, message)
try {
assert.match(arg, POSTGRES_ID_REGEXP, message)
} catch (error) {
throw OError.tag(error, message, { arg })
}
}
module.exports = {

View file

@ -2,11 +2,7 @@
* Provides a generator function to back up project chunks and blobs.
*/
import {
getProjectChunksFromVersion,
loadAtVersion,
loadByChunkRecord,
} from './chunk_store/index.js'
import chunkStore from './chunk_store/index.js'
import {
GLOBAL_BLOBS, // NOTE: must call loadGlobalBlobs() before using this
@ -33,7 +29,10 @@ async function lookBehindForSeenBlobs(
) {
// the snapshot in this chunk has not been backed up
// so we find the set of backed up blobs from the previous chunk
const previousChunk = await loadAtVersion(projectId, lastBackedUpVersion)
const previousChunk = await chunkStore.loadAtVersion(
projectId,
lastBackedUpVersion
)
const previousChunkHistory = previousChunk.getHistory()
previousChunkHistory.findBlobHashes(seenBlobs)
}
@ -115,13 +114,13 @@ export async function* backupGenerator(projectId, lastBackedUpVersion) {
lastBackedUpVersion >= 0 ? lastBackedUpVersion + 1 : 0
let isStartingChunk = true
let currentBackedUpVersion = lastBackedUpVersion
const chunkRecordIterator = getProjectChunksFromVersion(
const chunkRecordIterator = chunkStore.getProjectChunksFromVersion(
projectId,
firstPendingVersion
)
for await (const chunkRecord of chunkRecordIterator) {
const { chunk, chunkBuffer } = await loadByChunkRecord(
const { chunk, chunkBuffer } = await chunkStore.loadByChunkRecord(
projectId,
chunkRecord
)

View file

@ -13,7 +13,7 @@ async function initialize(projectId) {
* Return blob metadata for the given project and hash
*/
async function findBlob(projectId, hash) {
assert.postgresId(projectId, `bad projectId ${projectId}`)
assert.postgresId(projectId, 'bad projectId')
projectId = parseInt(projectId, 10)
assert.blobHash(hash, 'bad hash')
@ -35,7 +35,7 @@ async function findBlob(projectId, hash) {
* @return {Promise.<Array.<Blob?>>} no guarantee on order
*/
async function findBlobs(projectId, hashes) {
assert.postgresId(projectId, `bad projectId ${projectId}`)
assert.postgresId(projectId, 'bad projectId')
projectId = parseInt(projectId, 10)
assert.array(hashes, 'bad hashes: not array')
hashes.forEach(function (hash) {
@ -57,7 +57,7 @@ async function findBlobs(projectId, hashes) {
* Return metadata for all blobs in the given project
*/
async function getProjectBlobs(projectId) {
assert.postgresId(projectId, `bad projectId ${projectId}`)
assert.postgresId(projectId, 'bad projectId')
projectId = parseInt(projectId, 10)
const records = await knex('project_blobs')
@ -103,7 +103,7 @@ async function getProjectBlobsBatch(projectIds) {
* Add a blob's metadata to the blobs table after it has been uploaded.
*/
async function insertBlob(projectId, blob) {
assert.postgresId(projectId, `bad projectId ${projectId}`)
assert.postgresId(projectId, 'bad projectId')
projectId = parseInt(projectId, 10)
await knex('project_blobs')
@ -116,7 +116,7 @@ async function insertBlob(projectId, blob) {
* Deletes all blobs for a given project
*/
async function deleteBlobs(projectId) {
assert.postgresId(projectId, `bad projectId ${projectId}`)
assert.postgresId(projectId, 'bad projectId')
projectId = parseInt(projectId, 10)
await knex('project_blobs').where('project_id', projectId).delete()

View file

@ -0,0 +1,39 @@
'use strict'
/**
* @module storage/lib/chunk_buffer
*/
const chunkStore = require('../chunk_store')
const redisBackend = require('../chunk_store/redis')
const metrics = require('@overleaf/metrics')
/**
* Load the latest Chunk stored for a project, including blob metadata.
*
* @param {string} projectId
* @return {Promise.<Chunk>}
*/
async function loadLatest(projectId) {
const chunkRecord = await chunkStore.loadLatestRaw(projectId)
const cachedChunk = await redisBackend.getCurrentChunkIfValid(
projectId,
chunkRecord
)
if (cachedChunk) {
metrics.inc('chunk_buffer.loadLatest', 1, {
status: 'cache-hit',
})
return cachedChunk
} else {
metrics.inc('chunk_buffer.loadLatest', 1, {
status: 'cache-miss',
})
const chunk = await chunkStore.loadLatest(projectId)
await redisBackend.setCurrentChunk(projectId, chunk)
return chunk
}
}
module.exports = {
loadLatest,
}

View file

@ -1,3 +1,5 @@
// @ts-check
'use strict'
/**
@ -156,7 +158,6 @@ async function loadAtTimestamp(projectId, timestamp) {
* @param {string} projectId
* @param {Chunk} chunk
* @param {Date} [earliestChangeTimestamp]
* @return {Promise.<number>} for the chunkId of the inserted chunk
*/
async function create(projectId, chunk, earliestChangeTimestamp) {
assert.projectId(projectId, 'bad projectId')
@ -164,13 +165,18 @@ async function create(projectId, chunk, earliestChangeTimestamp) {
assert.maybe.date(earliestChangeTimestamp, 'bad timestamp')
const backend = getBackend(projectId)
const chunkStart = chunk.getStartVersion()
const chunkId = await uploadChunk(projectId, chunk)
await backend.confirmCreate(
projectId,
chunk,
chunkId,
earliestChangeTimestamp
)
const opts = {}
if (chunkStart > 0) {
opts.oldChunkId = await getChunkIdForVersion(projectId, chunkStart - 1)
}
if (earliestChangeTimestamp != null) {
opts.earliestChangeTimestamp = earliestChangeTimestamp
}
await backend.confirmCreate(projectId, chunk, chunkId, opts)
}
/**
@ -220,13 +226,12 @@ async function update(
const oldChunkId = await getChunkIdForVersion(projectId, oldEndVersion)
const newChunkId = await uploadChunk(projectId, newChunk)
await backend.confirmUpdate(
projectId,
oldChunkId,
newChunk,
newChunkId,
earliestChangeTimestamp
)
const opts = {}
if (earliestChangeTimestamp != null) {
opts.earliestChangeTimestamp = earliestChangeTimestamp
}
await backend.confirmUpdate(projectId, oldChunkId, newChunk, newChunkId, opts)
}
/**
@ -234,7 +239,7 @@ async function update(
*
* @param {string} projectId
* @param {number} version
* @return {Promise.<number>}
* @return {Promise.<string>}
*/
async function getChunkIdForVersion(projectId, version) {
const backend = getBackend(projectId)
@ -343,10 +348,14 @@ async function deleteProjectChunks(projectId) {
* Delete a given number of old chunks from both the database
* and from object storage.
*
* @param {number} count - number of chunks to delete
* @param {number} minAgeSecs - how many seconds ago must chunks have been
* deleted
* @return {Promise}
* @param {object} options
* @param {number} [options.batchSize] - number of chunks to delete in each
* batch
* @param {number} [options.maxBatches] - maximum number of batches to process
* @param {number} [options.minAgeSecs] - minimum age of chunks to delete
* @param {number} [options.timeout] - maximum time to spend deleting chunks
*
* @return {Promise<number>} number of chunks deleted
*/
async function deleteOldChunks(options = {}) {
const batchSize = options.batchSize ?? DEFAULT_DELETE_BATCH_SIZE

View file

@ -1,4 +1,6 @@
const { ObjectId, ReadPreference } = require('mongodb')
// @ts-check
const { ObjectId, ReadPreference, MongoError } = require('mongodb')
const { Chunk } = require('overleaf-editor-core')
const OError = require('@overleaf/o-error')
const assert = require('../assert')
@ -7,6 +9,10 @@ const { ChunkVersionConflictError } = require('./errors')
const DUPLICATE_KEY_ERROR_CODE = 11000
/**
* @import { ClientSession } from 'mongodb'
*/
/**
* Get the latest chunk's metadata from the database
* @param {string} projectId
@ -18,7 +24,10 @@ async function getLatestChunk(projectId, opts = {}) {
const { readOnly = false } = opts
const record = await mongodb.chunks.findOne(
{ projectId: new ObjectId(projectId), state: 'active' },
{
projectId: new ObjectId(projectId),
state: { $in: ['active', 'closed'] },
},
{
sort: { startVersion: -1 },
readPreference: readOnly
@ -42,7 +51,7 @@ async function getChunkForVersion(projectId, version) {
const record = await mongodb.chunks.findOne(
{
projectId: new ObjectId(projectId),
state: 'active',
state: { $in: ['active', 'closed'] },
startVersion: { $lte: version },
endVersion: { $gte: version },
},
@ -94,7 +103,7 @@ async function getChunkForTimestamp(projectId, timestamp) {
const record = await mongodb.chunks.findOne(
{
projectId: new ObjectId(projectId),
state: 'active',
state: { $in: ['active', 'closed'] },
endTimestamp: { $gte: timestamp },
},
// We use the index on the startVersion for sorting records. This assumes
@ -126,7 +135,7 @@ async function getLastActiveChunkBeforeTimestamp(projectId, timestamp) {
const record = await mongodb.chunks.findOne(
{
projectId: new ObjectId(projectId),
state: 'active',
state: { $in: ['active', 'closed'] },
$or: [
{
endTimestamp: {
@ -155,7 +164,10 @@ async function getProjectChunkIds(projectId) {
assert.mongoId(projectId, 'bad projectId')
const cursor = mongodb.chunks.find(
{ projectId: new ObjectId(projectId), state: 'active' },
{
projectId: new ObjectId(projectId),
state: { $in: ['active', 'closed'] },
},
{ projection: { _id: 1 } }
)
return await cursor.map(record => record._id).toArray()
@ -169,7 +181,10 @@ async function getProjectChunks(projectId) {
const cursor = mongodb.chunks
.find(
{ projectId: new ObjectId(projectId), state: 'active' },
{
projectId: new ObjectId(projectId),
state: { $in: ['active', 'closed'] },
},
{ projection: { state: 0 } }
)
.sort({ startVersion: 1 })
@ -198,48 +213,35 @@ async function insertPendingChunk(projectId, chunk) {
/**
* Record that a new chunk was created.
*
* @param {string} projectId
* @param {Chunk} chunk
* @param {string} chunkId
* @param {object} opts
* @param {Date} [opts.earliestChangeTimestamp]
* @param {string} [opts.oldChunkId]
*/
async function confirmCreate(
projectId,
chunk,
chunkId,
earliestChangeTimestamp,
mongoOpts = {}
) {
async function confirmCreate(projectId, chunk, chunkId, opts = {}) {
assert.mongoId(projectId, 'bad projectId')
assert.instance(chunk, Chunk, 'bad chunk')
assert.mongoId(chunkId, 'bad chunkId')
assert.instance(chunk, Chunk, 'bad newChunk')
assert.mongoId(chunkId, 'bad newChunkId')
let result
try {
result = await mongodb.chunks.updateOne(
{
_id: new ObjectId(chunkId),
projectId: new ObjectId(projectId),
state: 'pending',
},
{ $set: { state: 'active', updatedAt: new Date() } },
mongoOpts
)
} catch (err) {
if (err.code === DUPLICATE_KEY_ERROR_CODE) {
throw new ChunkVersionConflictError('chunk start version is not unique', {
await mongodb.client.withSession(async session => {
await session.withTransaction(async () => {
if (opts.oldChunkId != null) {
await closeChunk(projectId, opts.oldChunkId, { session })
}
await activateChunk(projectId, chunkId, { session })
await updateProjectRecord(
projectId,
chunkId,
})
} else {
throw err
}
}
if (result.matchedCount === 0) {
throw new OError('pending chunk not found', { projectId, chunkId })
}
await updateProjectRecord(
projectId,
chunk,
earliestChangeTimestamp,
mongoOpts
)
chunk,
opts.earliestChangeTimestamp,
{ session }
)
})
})
}
/**
@ -276,41 +278,145 @@ async function updateProjectRecord(
/**
* Record that a chunk was replaced by a new one.
*
* @param {string} projectId
* @param {string} oldChunkId
* @param {Chunk} newChunk
* @param {string} newChunkId
* @param {object} [opts]
* @param {Date} [opts.earliestChangeTimestamp]
*/
async function confirmUpdate(
projectId,
oldChunkId,
newChunk,
newChunkId,
earliestChangeTimestamp
opts = {}
) {
assert.mongoId(projectId, 'bad projectId')
assert.mongoId(oldChunkId, 'bad oldChunkId')
assert.instance(newChunk, Chunk, 'bad newChunk')
assert.mongoId(newChunkId, 'bad newChunkId')
const session = mongodb.client.startSession()
try {
await mongodb.client.withSession(async session => {
await session.withTransaction(async () => {
await deleteChunk(projectId, oldChunkId, { session })
await confirmCreate(
await deleteActiveChunk(projectId, oldChunkId, { session })
await activateChunk(projectId, newChunkId, { session })
await updateProjectRecord(
projectId,
newChunk,
newChunkId,
earliestChangeTimestamp,
opts.earliestChangeTimestamp,
{ session }
)
})
} finally {
await session.endSession()
})
}
/**
* Activate a pending chunk
*
* @param {string} projectId
* @param {string} chunkId
* @param {object} [opts]
* @param {ClientSession} [opts.session]
*/
async function activateChunk(projectId, chunkId, opts = {}) {
assert.mongoId(projectId, 'bad projectId')
assert.mongoId(chunkId, 'bad chunkId')
let result
try {
result = await mongodb.chunks.updateOne(
{
_id: new ObjectId(chunkId),
projectId: new ObjectId(projectId),
state: 'pending',
},
{ $set: { state: 'active', updatedAt: new Date() } },
opts
)
} catch (err) {
if (err instanceof MongoError && err.code === DUPLICATE_KEY_ERROR_CODE) {
throw new ChunkVersionConflictError('chunk start version is not unique', {
projectId,
chunkId,
})
} else {
throw err
}
}
if (result.matchedCount === 0) {
throw new OError('pending chunk not found', { projectId, chunkId })
}
}
/**
* Close a chunk
*
* A closed chunk is one that can't be extended anymore.
*
* @param {string} projectId
* @param {string} chunkId
* @param {object} [opts]
* @param {ClientSession} [opts.session]
*/
async function closeChunk(projectId, chunkId, opts = {}) {
const result = await mongodb.chunks.updateOne(
{
_id: new ObjectId(chunkId),
projectId: new ObjectId(projectId),
state: 'active',
},
{ $set: { state: 'closed' } },
opts
)
if (result.matchedCount === 0) {
throw new ChunkVersionConflictError('unable to close chunk', {
projectId,
chunkId,
})
}
}
/**
* Delete an active chunk
*
* This is used to delete chunks that are in the process of being extended. It
* will refuse to delete chunks that are already closed and can therefore not be
* extended.
*
* @param {string} projectId
* @param {string} chunkId
* @param {object} [opts]
* @param {ClientSession} [opts.session]
*/
async function deleteActiveChunk(projectId, chunkId, opts = {}) {
const updateResult = await mongodb.chunks.updateOne(
{
_id: new ObjectId(chunkId),
projectId: new ObjectId(projectId),
state: 'active',
},
{ $set: { state: 'deleted', updatedAt: new Date() } },
opts
)
if (updateResult.matchedCount === 0) {
throw new ChunkVersionConflictError('unable to delete active chunk', {
projectId,
chunkId,
})
}
}
/**
* Delete a chunk.
*
* @param {number} projectId
* @param {number} chunkId
* @param {string} projectId
* @param {string} chunkId
* @return {Promise}
*/
async function deleteChunk(projectId, chunkId, mongoOpts = {}) {
@ -331,7 +437,10 @@ async function deleteProjectChunks(projectId) {
assert.mongoId(projectId, 'bad projectId')
await mongodb.chunks.updateMany(
{ projectId: new ObjectId(projectId), state: 'active' },
{
projectId: new ObjectId(projectId),
state: { $in: ['active', 'closed'] },
},
{ $set: { state: 'deleted', updatedAt: new Date() } }
)
}

View file

@ -1,3 +1,5 @@
// @ts-check
const { Chunk } = require('overleaf-editor-core')
const assert = require('../assert')
const knex = require('../knex')
@ -7,6 +9,10 @@ const { updateProjectRecord } = require('./mongo')
const DUPLICATE_KEY_ERROR_CODE = '23505'
/**
* @import { Knex } from 'knex'
*/
/**
* Get the latest chunk's metadata from the database
* @param {string} projectId
@ -14,12 +20,11 @@ const DUPLICATE_KEY_ERROR_CODE = '23505'
* @param {boolean} [opts.readOnly]
*/
async function getLatestChunk(projectId, opts = {}) {
assert.postgresId(projectId, `bad projectId ${projectId}`)
projectId = parseInt(projectId, 10)
assert.postgresId(projectId, 'bad projectId')
const { readOnly = false } = opts
const record = await (readOnly ? knexReadOnly : knex)('chunks')
.where('doc_id', projectId)
.where('doc_id', parseInt(projectId, 10))
.orderBy('end_version', 'desc')
.first()
if (record == null) {
@ -30,13 +35,15 @@ async function getLatestChunk(projectId, opts = {}) {
/**
* Get the metadata for the chunk that contains the given version.
*
* @param {string} projectId
* @param {number} version
*/
async function getChunkForVersion(projectId, version) {
assert.postgresId(projectId, `bad projectId ${projectId}`)
projectId = parseInt(projectId, 10)
assert.postgresId(projectId, 'bad projectId')
const record = await knex('chunks')
.where('doc_id', projectId)
.where('doc_id', parseInt(projectId, 10))
.where('end_version', '>=', version)
.orderBy('end_version')
.first()
@ -48,20 +55,23 @@ async function getChunkForVersion(projectId, version) {
/**
* Get the metadata for the chunk that contains the given version.
*
* @param {string} projectId
* @param {Date} timestamp
*/
async function getFirstChunkBeforeTimestamp(projectId, timestamp) {
assert.date(timestamp, 'bad timestamp')
const recordActive = await getChunkForVersion(projectId, 0)
// projectId must be valid if getChunkForVersion did not throw
projectId = parseInt(projectId, 10)
if (recordActive && recordActive.endTimestamp <= timestamp) {
return recordActive
}
// fallback to deleted chunk
const recordDeleted = await knex('old_chunks')
.where('doc_id', projectId)
.where('doc_id', parseInt(projectId, 10))
.where('start_version', '=', 0)
.where('end_timestamp', '<=', timestamp)
.orderBy('end_version', 'desc')
@ -75,14 +85,16 @@ async function getFirstChunkBeforeTimestamp(projectId, timestamp) {
/**
* Get the metadata for the chunk that contains the version that was current at
* the given timestamp.
*
* @param {string} projectId
* @param {Date} timestamp
*/
async function getLastActiveChunkBeforeTimestamp(projectId, timestamp) {
assert.date(timestamp, 'bad timestamp')
assert.postgresId(projectId, 'bad projectId')
projectId = parseInt(projectId, 10)
const query = knex('chunks')
.where('doc_id', projectId)
.where('doc_id', parseInt(projectId, 10))
.where(function () {
this.where('end_timestamp', '<=', timestamp).orWhere(
'end_timestamp',
@ -102,10 +114,12 @@ async function getLastActiveChunkBeforeTimestamp(projectId, timestamp) {
/**
* Get the metadata for the chunk that contains the version that was current at
* the given timestamp.
*
* @param {string} projectId
* @param {Date} timestamp
*/
async function getChunkForTimestamp(projectId, timestamp) {
assert.postgresId(projectId, `bad projectId ${projectId}`)
projectId = parseInt(projectId, 10)
assert.postgresId(projectId, 'bad projectId')
// This query will find the latest chunk after the timestamp (query orders
// in reverse chronological order), OR the latest chunk
@ -118,11 +132,11 @@ async function getChunkForTimestamp(projectId, timestamp) {
'WHERE doc_id = ? ' +
'ORDER BY end_version desc LIMIT 1' +
')',
[timestamp, projectId]
[timestamp, parseInt(projectId, 10)]
)
const record = await knex('chunks')
.where('doc_id', projectId)
.where('doc_id', parseInt(projectId, 10))
.where(whereAfterEndTimestampOrLatestChunk)
.orderBy('end_version')
.first()
@ -137,7 +151,7 @@ async function getChunkForTimestamp(projectId, timestamp) {
*/
function chunkFromRecord(record) {
return {
id: record.id,
id: record.id.toString(),
startVersion: record.start_version,
endVersion: record.end_version,
endTimestamp: record.end_timestamp,
@ -146,35 +160,41 @@ function chunkFromRecord(record) {
/**
* Get all of a project's chunk ids
*
* @param {string} projectId
*/
async function getProjectChunkIds(projectId) {
assert.postgresId(projectId, `bad projectId ${projectId}`)
projectId = parseInt(projectId, 10)
assert.postgresId(projectId, 'bad projectId')
const records = await knex('chunks').select('id').where('doc_id', projectId)
const records = await knex('chunks')
.select('id')
.where('doc_id', parseInt(projectId, 10))
return records.map(record => record.id)
}
/**
* Get all of a projects chunks directly
*
* @param {string} projectId
*/
async function getProjectChunks(projectId) {
assert.postgresId(projectId, `bad projectId ${projectId}`)
projectId = parseInt(projectId, 10)
assert.postgresId(projectId, 'bad projectId')
const records = await knex('chunks')
.select()
.where('doc_id', projectId)
.where('doc_id', parseInt(projectId, 10))
.orderBy('end_version')
return records.map(chunkFromRecord)
}
/**
* Insert a pending chunk before sending it to object storage.
*
* @param {string} projectId
* @param {Chunk} chunk
*/
async function insertPendingChunk(projectId, chunk) {
assert.postgresId(projectId, `bad projectId ${projectId}`)
projectId = parseInt(projectId, 10)
assert.postgresId(projectId, 'bad projectId')
const result = await knex.first(
knex.raw("nextval('chunks_id_seq'::regclass)::integer as chunkid")
@ -182,80 +202,119 @@ async function insertPendingChunk(projectId, chunk) {
const chunkId = result.chunkid
await knex('pending_chunks').insert({
id: chunkId,
doc_id: projectId,
doc_id: parseInt(projectId, 10),
end_version: chunk.getEndVersion(),
start_version: chunk.getStartVersion(),
end_timestamp: chunk.getEndTimestamp(),
})
return chunkId
return chunkId.toString()
}
/**
* Record that a new chunk was created.
*
* @param {string} projectId
* @param {Chunk} chunk
* @param {string} chunkId
* @param {object} opts
* @param {Date} [opts.earliestChangeTimestamp]
* @param {string} [opts.oldChunkId]
*/
async function confirmCreate(
projectId,
chunk,
chunkId,
earliestChangeTimestamp
) {
assert.postgresId(projectId, `bad projectId ${projectId}`)
projectId = parseInt(projectId, 10)
async function confirmCreate(projectId, chunk, chunkId, opts = {}) {
assert.postgresId(projectId, 'bad projectId')
await knex.transaction(async tx => {
if (opts.oldChunkId != null) {
await _assertChunkIsNotClosed(tx, projectId, opts.oldChunkId)
await _closeChunk(tx, projectId, opts.oldChunkId)
}
await Promise.all([
_deletePendingChunk(tx, projectId, chunkId),
_insertChunk(tx, projectId, chunk, chunkId),
])
await updateProjectRecord(projectId, chunk, earliestChangeTimestamp)
await updateProjectRecord(
// The history id in Mongo is an integer for Postgres projects
parseInt(projectId, 10),
chunk,
opts.earliestChangeTimestamp
)
})
}
/**
* Record that a chunk was replaced by a new one.
*
* @param {string} projectId
* @param {string} oldChunkId
* @param {Chunk} newChunk
* @param {string} newChunkId
*/
async function confirmUpdate(
projectId,
oldChunkId,
newChunk,
newChunkId,
earliestChangeTimestamp
opts = {}
) {
assert.postgresId(projectId, `bad projectId ${projectId}`)
projectId = parseInt(projectId, 10)
assert.postgresId(projectId, 'bad projectId')
await knex.transaction(async tx => {
await _assertChunkIsNotClosed(tx, projectId, oldChunkId)
await _deleteChunks(tx, { doc_id: projectId, id: oldChunkId })
await Promise.all([
_deletePendingChunk(tx, projectId, newChunkId),
_insertChunk(tx, projectId, newChunk, newChunkId),
])
await updateProjectRecord(projectId, newChunk, earliestChangeTimestamp)
await updateProjectRecord(
// The history id in Mongo is an integer for Postgres projects
parseInt(projectId, 10),
newChunk,
opts.earliestChangeTimestamp
)
})
}
/**
* Delete a pending chunk
*
* @param {Knex} tx
* @param {string} projectId
* @param {string} chunkId
*/
async function _deletePendingChunk(tx, projectId, chunkId) {
await tx('pending_chunks')
.where({
doc_id: projectId,
id: chunkId,
doc_id: parseInt(projectId, 10),
id: parseInt(chunkId, 10),
})
.del()
}
/**
* Adds an active chunk
*
* @param {Knex} tx
* @param {string} projectId
* @param {Chunk} chunk
* @param {string} chunkId
*/
async function _insertChunk(tx, projectId, chunk, chunkId) {
const startVersion = chunk.getStartVersion()
const endVersion = chunk.getEndVersion()
try {
await tx('chunks').insert({
id: chunkId,
doc_id: projectId,
id: parseInt(chunkId, 10),
doc_id: parseInt(projectId, 10),
start_version: startVersion,
end_version: endVersion,
end_timestamp: chunk.getEndTimestamp(),
})
} catch (err) {
if (err.code === DUPLICATE_KEY_ERROR_CODE) {
if (
err instanceof Error &&
'code' in err &&
err.code === DUPLICATE_KEY_ERROR_CODE
) {
throw new ChunkVersionConflictError(
'chunk start or end version is not unique',
{ projectId, chunkId, startVersion, endVersion }
@ -265,35 +324,92 @@ async function _insertChunk(tx, projectId, chunk, chunkId) {
}
}
/**
* Check that a chunk is not closed
*
* This is used to synchronize chunk creations and extensions.
*
* @param {Knex} tx
* @param {string} projectId
* @param {string} chunkId
*/
async function _assertChunkIsNotClosed(tx, projectId, chunkId) {
const record = await tx('chunks')
.forUpdate()
.select('closed')
.where('doc_id', parseInt(projectId, 10))
.where('id', parseInt(chunkId, 10))
.first()
if (!record) {
throw new ChunkVersionConflictError('unable to close chunk: not found', {
projectId,
chunkId,
})
}
if (record.closed) {
throw new ChunkVersionConflictError(
'unable to close chunk: already closed',
{
projectId,
chunkId,
}
)
}
}
/**
* Close a chunk
*
* A closed chunk can no longer be extended.
*
* @param {Knex} tx
* @param {string} projectId
* @param {string} chunkId
*/
async function _closeChunk(tx, projectId, chunkId) {
await tx('chunks')
.update({ closed: true })
.where('doc_id', parseInt(projectId, 10))
.where('id', parseInt(chunkId, 10))
}
/**
* Delete a chunk.
*
* @param {number} projectId
* @param {number} chunkId
* @return {Promise}
* @param {string} projectId
* @param {string} chunkId
*/
async function deleteChunk(projectId, chunkId) {
assert.postgresId(projectId, `bad projectId ${projectId}`)
projectId = parseInt(projectId, 10)
assert.postgresId(projectId, 'bad projectId')
assert.integer(chunkId, 'bad chunkId')
await _deleteChunks(knex, { doc_id: projectId, id: chunkId })
await _deleteChunks(knex, {
doc_id: parseInt(projectId, 10),
id: parseInt(chunkId, 10),
})
}
/**
* Delete all of a project's chunks
*
* @param {string} projectId
*/
async function deleteProjectChunks(projectId) {
assert.postgresId(projectId, `bad projectId ${projectId}`)
projectId = parseInt(projectId, 10)
assert.postgresId(projectId, 'bad projectId')
await knex.transaction(async tx => {
await _deleteChunks(knex, { doc_id: projectId })
await _deleteChunks(knex, { doc_id: parseInt(projectId, 10) })
})
}
/**
* Delete many chunks
*
* @param {Knex} tx
* @param {any} whereClause
*/
async function _deleteChunks(tx, whereClause) {
const rows = await tx('chunks').returning('*').where(whereClause).del()
const rows = await tx('chunks').where(whereClause).del().returning('*')
if (rows.length === 0) {
return
}
@ -311,6 +427,9 @@ async function _deleteChunks(tx, whereClause) {
/**
* Get a batch of old chunks for deletion
*
* @param {number} count
* @param {number} minAgeSecs
*/
async function getOldChunksBatch(count, minAgeSecs) {
const maxDeletedAt = new Date(Date.now() - minAgeSecs * 1000)
@ -321,15 +440,22 @@ async function getOldChunksBatch(count, minAgeSecs) {
.limit(count)
return records.map(oldChunk => ({
projectId: oldChunk.doc_id.toString(),
chunkId: oldChunk.chunk_id,
chunkId: oldChunk.chunk_id.toString(),
}))
}
/**
* Delete a batch of old chunks from the database
*
* @param {string[]} chunkIds
*/
async function deleteOldChunks(chunkIds) {
await knex('old_chunks').whereIn('chunk_id', chunkIds).del()
await knex('old_chunks')
.whereIn(
'chunk_id',
chunkIds.map(id => parseInt(id, 10))
)
.del()
}
/**

View file

@ -0,0 +1,478 @@
const metrics = require('@overleaf/metrics')
const logger = require('@overleaf/logger')
const redis = require('../redis')
const rclient = redis.rclientHistory //
const { Snapshot, Change, History, Chunk } = require('overleaf-editor-core')
const TEMPORARY_CACHE_LIFETIME = 300 // 5 minutes
const keySchema = {
snapshot({ projectId }) {
return `snapshot:{${projectId}}`
},
startVersion({ projectId }) {
return `snapshot-version:{${projectId}}`
},
changes({ projectId }) {
return `changes:{${projectId}}`
},
expireTime({ projectId }) {
return `expire-time:{${projectId}}`
},
persistTime({ projectId }) {
return `persist-time:{${projectId}}`
},
}
rclient.defineCommand('get_current_chunk', {
numberOfKeys: 3,
lua: `
local startVersionValue = redis.call('GET', KEYS[2])
if not startVersionValue then
return nil -- this is a cache-miss
end
local snapshotValue = redis.call('GET', KEYS[1])
local changesValues = redis.call('LRANGE', KEYS[3], 0, -1)
return {snapshotValue, startVersionValue, changesValues}
`,
})
/**
* Retrieves the current chunk of project history from Redis storage
* @param {string} projectId - The unique identifier of the project
* @returns {Promise<Chunk|null>} A Promise that resolves to a Chunk object containing project history,
* or null if retrieval fails
* @throws {Error} If Redis operations fail
*/
async function getCurrentChunk(projectId) {
try {
const result = await rclient.get_current_chunk(
keySchema.snapshot({ projectId }),
keySchema.startVersion({ projectId }),
keySchema.changes({ projectId })
)
if (!result) {
return null // cache-miss
}
const snapshot = Snapshot.fromRaw(JSON.parse(result[0]))
const startVersion = JSON.parse(result[1])
const changes = result[2].map(c => Change.fromRaw(JSON.parse(c)))
const history = new History(snapshot, changes)
const chunk = new Chunk(history, startVersion)
metrics.inc('chunk_store.redis.get_current_chunk', 1, { status: 'success' })
return chunk
} catch (err) {
logger.error({ err, projectId }, 'error getting current chunk from redis')
metrics.inc('chunk_store.redis.get_current_chunk', 1, { status: 'error' })
return null
}
}
rclient.defineCommand('get_current_chunk_if_valid', {
numberOfKeys: 3,
lua: `
local expectedStartVersion = ARGV[1]
local expectedChangesCount = tonumber(ARGV[2])
local startVersionValue = redis.call('GET', KEYS[2])
if not startVersionValue then
return nil -- this is a cache-miss
end
if startVersionValue ~= expectedStartVersion then
return nil -- this is a cache-miss
end
local changesCount = redis.call('LLEN', KEYS[3])
if changesCount ~= expectedChangesCount then
return nil -- this is a cache-miss
end
local snapshotValue = redis.call('GET', KEYS[1])
local changesValues = redis.call('LRANGE', KEYS[3], 0, -1)
return {snapshotValue, startVersionValue, changesValues}
`,
})
async function getCurrentChunkIfValid(projectId, chunkRecord) {
try {
const changesCount = chunkRecord.endVersion - chunkRecord.startVersion
const result = await rclient.get_current_chunk_if_valid(
keySchema.snapshot({ projectId }),
keySchema.startVersion({ projectId }),
keySchema.changes({ projectId }),
chunkRecord.startVersion,
changesCount
)
if (!result) {
return null // cache-miss
}
const snapshot = Snapshot.fromRaw(JSON.parse(result[0]))
const startVersion = parseInt(result[1], 10)
const changes = result[2].map(c => Change.fromRaw(JSON.parse(c)))
const history = new History(snapshot, changes)
const chunk = new Chunk(history, startVersion)
metrics.inc('chunk_store.redis.get_current_chunk_if_valid', 1, {
status: 'success',
})
return chunk
} catch (err) {
logger.error(
{ err, projectId, chunkRecord },
'error getting current chunk from redis'
)
metrics.inc('chunk_store.redis.get_current_chunk_if_valid', 1, {
status: 'error',
})
return null
}
}
rclient.defineCommand('get_current_chunk_metadata', {
numberOfKeys: 2,
lua: `
local startVersionValue = redis.call('GET', KEYS[1])
if not startVersionValue then
return nil -- this is a cache-miss
end
local changesCount = redis.call('LLEN', KEYS[2])
return {startVersionValue, changesCount}
`,
})
/**
* Retrieves the current chunk metadata for a given project from Redis
* @param {string} projectId - The ID of the project to get metadata for
* @returns {Promise<Object|null>} Object containing startVersion and changesCount if found, null on error or cache miss
* @property {number} startVersion - The starting version information
* @property {number} changesCount - The number of changes in the chunk
*/
async function getCurrentChunkMetadata(projectId) {
try {
const result = await rclient.get_current_chunk_metadata(
keySchema.startVersion({ projectId }),
keySchema.changes({ projectId })
)
if (!result) {
return null // cache-miss
}
const startVersion = JSON.parse(result[0])
const changesCount = parseInt(result[1], 10)
return { startVersion, changesCount }
} catch (err) {
return null
}
}
rclient.defineCommand('set_current_chunk', {
numberOfKeys: 4,
lua: `
local snapshotValue = ARGV[1]
local startVersionValue = ARGV[2]
local expireTime = ARGV[3]
redis.call('SET', KEYS[1], snapshotValue)
redis.call('SET', KEYS[2], startVersionValue)
redis.call('SET', KEYS[3], expireTime)
redis.call('DEL', KEYS[4]) -- clear the old changes list
if #ARGV >= 4 then
redis.call('RPUSH', KEYS[4], unpack(ARGV, 4))
end
`,
})
/**
* Stores the current chunk of project history in Redis
* @param {string} projectId - The ID of the project
* @param {Chunk} chunk - The chunk object containing history data
* @returns {Promise<*>} Returns the result of the Redis operation, or null if an error occurs
* @throws {Error} May throw Redis-related errors which are caught internally
*/
async function setCurrentChunk(projectId, chunk) {
try {
const snapshotKey = keySchema.snapshot({ projectId })
const startVersionKey = keySchema.startVersion({ projectId })
const changesKey = keySchema.changes({ projectId })
const expireTimeKey = keySchema.expireTime({ projectId })
const snapshot = chunk.history.snapshot
const startVersion = chunk.startVersion
const changes = chunk.history.changes
const expireTime = Date.now() + TEMPORARY_CACHE_LIFETIME * 1000
await rclient.set_current_chunk(
snapshotKey, // KEYS[1]
startVersionKey, // KEYS[2]
expireTimeKey, // KEYS[3]
changesKey, // KEYS[4]
JSON.stringify(snapshot.toRaw()), // ARGV[1]
startVersion, // ARGV[2]
expireTime, // ARGV[3]
...changes.map(c => JSON.stringify(c.toRaw())) // ARGV[4..]
)
metrics.inc('chunk_store.redis.set_current_chunk', 1, { status: 'success' })
} catch (err) {
logger.error(
{ err, projectId, chunk },
'error setting current chunk in redis'
)
metrics.inc('chunk_store.redis.set_current_chunk', 1, { status: 'error' })
return null // while testing we will suppress any errors
}
}
/**
* Checks whether a cached chunk's version metadata matches the current chunk's metadata
* @param {Chunk} cachedChunk - The chunk retrieved from cache
* @param {Chunk} currentChunk - The current chunk to compare against
* @returns {boolean} - Returns true if the chunks have matching start and end versions, false otherwise
*/
function checkCacheValidity(cachedChunk, currentChunk) {
return Boolean(
cachedChunk &&
cachedChunk.getStartVersion() === currentChunk.getStartVersion() &&
cachedChunk.getEndVersion() === currentChunk.getEndVersion()
)
}
/**
* Validates if a cached chunk matches the current chunk metadata by comparing versions
* @param {Object} cachedChunk - The cached chunk object to validate
* @param {Object} currentChunkMetadata - The current chunk metadata to compare against
* @param {number} currentChunkMetadata.startVersion - The starting version number
* @param {number} currentChunkMetadata.endVersion - The ending version number
* @returns {boolean} - True if the cached chunk is valid, false otherwise
*/
function checkCacheValidityWithMetadata(cachedChunk, currentChunkMetadata) {
return Boolean(
cachedChunk &&
cachedChunk.getStartVersion() === currentChunkMetadata.startVersion &&
cachedChunk.getEndVersion() === currentChunkMetadata.endVersion
)
}
/**
* Compares two chunks for equality using stringified JSON comparison
* @param {string} projectId - The ID of the project
* @param {Chunk} cachedChunk - The cached chunk to compare
* @param {Chunk} currentChunk - The current chunk to compare against
* @returns {boolean} - Returns false if either chunk is null/undefined, otherwise returns the comparison result
*/
function compareChunks(projectId, cachedChunk, currentChunk) {
if (!cachedChunk || !currentChunk) {
return false
}
const identical = JSON.stringify(cachedChunk) === JSON.stringify(currentChunk)
if (!identical) {
try {
logger.error(
{
projectId,
cachedChunkStartVersion: cachedChunk.getStartVersion(),
cachedChunkEndVersion: cachedChunk.getEndVersion(),
currentChunkStartVersion: currentChunk.getStartVersion(),
currentChunkEndVersion: currentChunk.getEndVersion(),
},
'chunk cache mismatch'
)
} catch (err) {
// ignore errors while logging
}
}
metrics.inc('chunk_store.redis.compare_chunks', 1, {
status: identical ? 'success' : 'fail',
})
return identical
}
// Define Lua script for atomic cache clearing
rclient.defineCommand('expire_chunk_cache', {
numberOfKeys: 5,
lua: `
local persistTimeExists = redis.call('EXISTS', KEYS[5])
if persistTimeExists == 1 then
return nil -- chunk has changes pending, do not expire
end
local currentTime = tonumber(ARGV[1])
local expireTimeValue = redis.call('GET', KEYS[4])
if not expireTimeValue then
return nil -- this is a cache-miss
end
local expireTime = tonumber(expireTimeValue)
if currentTime < expireTime then
return nil -- cache is still valid
end
-- Cache is expired and all changes are persisted, proceed to delete the keys atomically
redis.call('DEL', KEYS[1]) -- snapshot key
redis.call('DEL', KEYS[2]) -- startVersion key
redis.call('DEL', KEYS[3]) -- changes key
redis.call('DEL', KEYS[4]) -- expireTime key
return 1
`,
})
/**
* Expire cache entries for a project's chunk data if needed
* @param {string} projectId - The ID of the project whose cache should be cleared
* @returns {Promise<boolean>} A promise that resolves to true if successful, false on error
*/
async function expireCurrentChunk(projectId, currentTime) {
try {
const snapshotKey = keySchema.snapshot({ projectId })
const startVersionKey = keySchema.startVersion({ projectId })
const changesKey = keySchema.changes({ projectId })
const expireTimeKey = keySchema.expireTime({ projectId })
const persistTimeKey = keySchema.persistTime({ projectId })
const result = await rclient.expire_chunk_cache(
snapshotKey,
startVersionKey,
changesKey,
expireTimeKey,
persistTimeKey,
currentTime || Date.now()
)
if (!result) {
logger.debug(
{ projectId },
'chunk cache not expired due to pending changes'
)
metrics.inc('chunk_store.redis.expire_cache', 1, {
status: 'skip-due-to-pending-changes',
})
return false // not expired
}
metrics.inc('chunk_store.redis.expire_cache', 1, { status: 'success' })
return true
} catch (err) {
logger.error({ err, projectId }, 'error clearing chunk cache from redis')
metrics.inc('chunk_store.redis.expire_cache', 1, { status: 'error' })
return false
}
}
// Define Lua script for atomic cache clearing
rclient.defineCommand('clear_chunk_cache', {
numberOfKeys: 5,
lua: `
local persistTimeExists = redis.call('EXISTS', KEYS[5])
if persistTimeExists == 1 then
return nil -- chunk has changes pending, do not clear
end
-- Delete all keys related to a project's chunk cache atomically
redis.call('DEL', KEYS[1]) -- snapshot key
redis.call('DEL', KEYS[2]) -- startVersion key
redis.call('DEL', KEYS[3]) -- changes key
redis.call('DEL', KEYS[4]) -- expireTime key
return 1
`,
})
/**
* Clears all cache entries for a project's chunk data
* @param {string} projectId - The ID of the project whose cache should be cleared
* @returns {Promise<boolean>} A promise that resolves to true if successful, false on error
*/
async function clearCache(projectId) {
try {
const snapshotKey = keySchema.snapshot({ projectId })
const startVersionKey = keySchema.startVersion({ projectId })
const changesKey = keySchema.changes({ projectId })
const expireTimeKey = keySchema.expireTime({ projectId })
const persistTimeKey = keySchema.persistTime({ projectId }) // Add persistTimeKey
const result = await rclient.clear_chunk_cache(
snapshotKey,
startVersionKey,
changesKey,
expireTimeKey,
persistTimeKey
)
if (result === null) {
logger.debug(
{ projectId },
'chunk cache not cleared due to pending changes'
)
metrics.inc('chunk_store.redis.clear_cache', 1, {
status: 'skip-due-to-pending-changes',
})
return false
}
metrics.inc('chunk_store.redis.clear_cache', 1, { status: 'success' })
return true
} catch (err) {
logger.error({ err, projectId }, 'error clearing chunk cache from redis')
metrics.inc('chunk_store.redis.clear_cache', 1, { status: 'error' })
return false
}
}
// Define Lua script for getting chunk status
rclient.defineCommand('get_chunk_status', {
numberOfKeys: 2, // expireTimeKey, persistTimeKey
lua: `
local expireTimeValue = redis.call('GET', KEYS[1])
local persistTimeValue = redis.call('GET', KEYS[2])
return {expireTimeValue, persistTimeValue}
`,
})
/**
* Retrieves the current chunk status for a given project from Redis
* @param {string} projectId - The ID of the project to get status for
* @returns {Promise<Object>} Object containing expireTime and persistTime, or nulls on error
* @property {number|null} expireTime - The expiration time of the chunk
* @property {number|null} persistTime - The persistence time of the chunk
*/
async function getCurrentChunkStatus(projectId) {
try {
const expireTimeKey = keySchema.expireTime({ projectId })
const persistTimeKey = keySchema.persistTime({ projectId })
const result = await rclient.get_chunk_status(expireTimeKey, persistTimeKey)
// Lua script returns an array [expireTimeValue, persistTimeValue]
// Redis nil replies are converted to null by ioredis
const [expireTime, persistTime] = result
return {
expireTime: expireTime ? parseInt(expireTime, 10) : null, // Parse to number or null
persistTime: persistTime ? parseInt(persistTime, 10) : null, // Parse to number or null
}
} catch (err) {
logger.warn({ err, projectId }, 'error getting chunk status from redis')
return { expireTime: null, persistTime: null } // Return nulls on error
}
}
/**
* Sets the persist time for a project's chunk cache.
* This is primarily intended for testing purposes.
* @param {string} projectId - The ID of the project.
* @param {number} timestamp - The timestamp to set as the persist time.
* @returns {Promise<void>}
*/
async function setPersistTime(projectId, timestamp) {
try {
const persistTimeKey = keySchema.persistTime({ projectId })
await rclient.set(persistTimeKey, timestamp)
metrics.inc('chunk_store.redis.set_persist_time', 1, { status: 'success' })
} catch (err) {
logger.error(
{ err, projectId, timestamp },
'error setting persist time in redis'
)
metrics.inc('chunk_store.redis.set_persist_time', 1, { status: 'error' })
// Re-throw the error so the test fails if setting fails
throw err
}
}
module.exports = {
getCurrentChunk,
getCurrentChunkIfValid,
setCurrentChunk,
getCurrentChunkMetadata,
checkCacheValidity,
checkCacheValidityWithMetadata,
compareChunks,
expireCurrentChunk,
clearCache,
getCurrentChunkStatus,
setPersistTime, // Export the new function
}

View file

@ -25,8 +25,8 @@ const gunzip = promisify(zlib.gunzip)
class LoadError extends OError {
/**
* @param {number|string} projectId
* @param {number|string} chunkId
* @param {string} projectId
* @param {string} chunkId
* @param {any} cause
*/
constructor(projectId, chunkId, cause) {
@ -42,8 +42,8 @@ class LoadError extends OError {
class StoreError extends OError {
/**
* @param {number|string} projectId
* @param {number|string} chunkId
* @param {string} projectId
* @param {string} chunkId
* @param {any} cause
*/
constructor(projectId, chunkId, cause) {
@ -58,8 +58,8 @@ class StoreError extends OError {
}
/**
* @param {number|string} projectId
* @param {number|string} chunkId
* @param {string} projectId
* @param {string} chunkId
* @return {string}
*/
function getKey(projectId, chunkId) {
@ -89,8 +89,8 @@ class HistoryStore {
/**
* Load the raw object for a History.
*
* @param {number|string} projectId
* @param {number|string} chunkId
* @param {string} projectId
* @param {string} chunkId
* @return {Promise<import('overleaf-editor-core/lib/types').RawHistory>}
*/
async loadRaw(projectId, chunkId) {
@ -144,8 +144,8 @@ class HistoryStore {
/**
* Compress and store a {@link History}.
*
* @param {number|string} projectId
* @param {number|string} chunkId
* @param {string} projectId
* @param {string} chunkId
* @param {import('overleaf-editor-core/lib/types').RawHistory} rawHistory
*/
async storeRaw(projectId, chunkId, rawHistory) {

View file

@ -1,6 +1,8 @@
// @ts-check
'use strict'
const env = process.env.NODE_ENV || 'development'
const knexfile = require('../../knexfile')
module.exports = require('knex')(knexfile[env])
module.exports = require('knex').default(knexfile[env])

View file

@ -0,0 +1,19 @@
const config = require('config')
const redis = require('@overleaf/redis-wrapper')
const historyRedisOptions = config.get('redis.history')
const rclientHistory = redis.createClient(historyRedisOptions)
const lockRedisOptions = config.get('redis.history')
const rclientLock = redis.createClient(lockRedisOptions)
async function disconnect() {
await Promise.all([rclientHistory.disconnect(), rclientLock.disconnect()])
}
module.exports = {
rclientHistory,
rclientLock,
redis,
disconnect,
}

View file

@ -0,0 +1,52 @@
const BATCH_SIZE = 1000 // Default batch size for SCAN
/**
* Asynchronously scans a Redis instance or cluster for keys matching a pattern.
*
* This function handles both standalone Redis instances and Redis clusters.
* For clusters, it iterates over all master nodes. It yields keys in batches
* as they are found by the SCAN command.
*
* @param {object} redisClient - The Redis client instance (from @overleaf/redis-wrapper).
* @param {string} pattern - The pattern to match keys against (e.g., 'user:*').
* @param {number} [count=BATCH_SIZE] - Optional hint for Redis SCAN count per iteration.
* @yields {string[]} A batch of matching keys.
*/
async function* scanRedisCluster(redisClient, pattern, count = BATCH_SIZE) {
const nodes = redisClient.nodes ? redisClient.nodes('master') : [redisClient]
for (const node of nodes) {
let cursor = '0'
do {
// redisClient from @overleaf/redis-wrapper uses ioredis style commands
const [nextCursor, keys] = await node.scan(
cursor,
'MATCH',
pattern,
'COUNT',
count
)
cursor = nextCursor
if (keys.length > 0) {
yield keys
}
} while (cursor !== '0')
}
}
/**
* Extracts the content within the first pair of curly braces {} from a string.
* This is used to extract a user ID or project ID from a Redis key.
*
* @param {string} key - The input string containing content within curly braces.
* @returns {string | null} The extracted content (the key ID) if found, otherwise null.
*/
function extractKeyId(key) {
const match = key.match(/\{(.*?)\}/)
if (match && match[1]) {
return match[1]
}
return null
}
module.exports = { scanRedisCluster, extractKeyId }

View file

@ -9,6 +9,7 @@ import {
create,
} from '../lib/chunk_store/index.js'
import { client } from '../lib/mongodb.js'
import redis from '../lib/redis.js'
import knex from '../lib/knex.js'
import { historyStore } from '../lib/history_store.js'
import pLimit from 'p-limit'
@ -1091,5 +1092,13 @@ if (import.meta.url === `file://${process.argv[1]}`) {
.catch(err => {
console.error('Error closing MongoDB connection:', err)
})
redis
.disconnect()
.then(() => {
console.log('Redis connection closed')
})
.catch(err => {
console.error('Error closing Redis connection:', err)
})
})
}

View file

@ -10,6 +10,7 @@ import {
import assert from '../lib/assert.js'
import knex from '../lib/knex.js'
import { client } from '../lib/mongodb.js'
import redis from '../lib/redis.js'
import { setTimeout } from 'node:timers/promises'
import fs from 'node:fs'
@ -23,6 +24,7 @@ async function gracefulShutdown() {
console.log('Gracefully shutting down')
await knex.destroy()
await client.close()
await redis.disconnect()
await setTimeout(100)
process.exit()
}

View file

@ -240,17 +240,25 @@ async function processPendingProjects(
changeTimes.push(pendingAt)
const pendingAge = Math.floor((Date.now() - pendingAt.getTime()) / 1000)
if (pendingAge > WARN_THRESHOLD) {
const backupStatus = await getBackupStatus(projectId)
logger.warn(
{
projectId,
pendingAt,
pendingAge,
backupStatus,
warnThreshold: WARN_THRESHOLD,
},
`pending change exceeds rpo warning threshold`
)
try {
const backupStatus = await getBackupStatus(projectId)
logger.warn(
{
projectId,
pendingAt,
pendingAge,
backupStatus,
warnThreshold: WARN_THRESHOLD,
},
`pending change exceeds rpo warning threshold`
)
} catch (err) {
logger.error(
{ projectId, pendingAt, pendingAge },
'Error getting backup status'
)
throw err
}
}
}
if (showOnly && verbose) {
@ -290,10 +298,11 @@ async function processPendingProjects(
)
}
}
const oldestChange = changeTimes.reduce((min, time) =>
time < min ? time : min
)
// Set oldestChange to undefined if there are no changes
const oldestChange =
changeTimes.length > 0
? changeTimes.reduce((min, time) => (time < min ? time : min))
: undefined
if (showOnly) {
console.log(
@ -303,7 +312,9 @@ async function processPendingProjects(
console.log(`Found ${count} projects with pending changes:`)
console.log(` ${addedCount} jobs added to queue`)
console.log(` ${existingCount} jobs already existed in queue`)
console.log(` Oldest pending change: ${formatPendingTime(oldestChange)}`)
if (oldestChange) {
console.log(` Oldest pending change: ${formatPendingTime(oldestChange)}`)
}
}
}

View file

@ -0,0 +1,98 @@
const logger = require('@overleaf/logger')
const commandLineArgs = require('command-line-args') // Add this line
const redis = require('../lib/redis')
const { scanRedisCluster, extractKeyId } = require('../lib/scan')
const { expireCurrentChunk } = require('../lib/chunk_store/redis')
const rclient = redis.rclientHistory
const EXPIRE_TIME_KEY_PATTERN = `expire-time:{*}`
const optionDefinitions = [{ name: 'dry-run', alias: 'd', type: Boolean }]
const options = commandLineArgs(optionDefinitions)
const DRY_RUN = options['dry-run'] || false
logger.initialize('expire-redis-chunks')
function isExpiredKey(expireTimestamp, currentTime) {
const expireTime = parseInt(expireTimestamp, 10)
if (isNaN(expireTime)) {
return false
}
logger.debug(
{
expireTime,
currentTime,
expireIn: expireTime - currentTime,
expired: currentTime > expireTime,
},
'Checking if key is expired'
)
return currentTime > expireTime
}
async function processKeysBatch(keysBatch, rclient) {
let clearedKeyCount = 0
if (keysBatch.length === 0) {
return 0
}
// For efficiency, we use MGET to fetch all the timestamps in a single request
const expireTimestamps = await rclient.mget(keysBatch)
const currentTime = Date.now()
for (let i = 0; i < keysBatch.length; i++) {
const key = keysBatch[i]
// For each key, do a quick check to see if the key is expired before calling
// the LUA script to expire the chunk atomically.
if (isExpiredKey(expireTimestamps[i], currentTime)) {
const projectId = extractKeyId(key)
if (DRY_RUN) {
logger.info({ projectId }, '[Dry Run] Would expire chunk for project')
} else {
await expireCurrentChunk(projectId)
}
clearedKeyCount++
}
}
return clearedKeyCount
}
async function expireRedisChunks() {
let scannedKeyCount = 0
let clearedKeyCount = 0
const START_TIME = Date.now()
if (DRY_RUN) {
// Use global DRY_RUN
logger.info({}, 'starting expireRedisChunks scan in DRY RUN mode')
} else {
logger.info({}, 'starting expireRedisChunks scan')
}
for await (const keysBatch of scanRedisCluster(
rclient,
EXPIRE_TIME_KEY_PATTERN
)) {
scannedKeyCount += keysBatch.length
clearedKeyCount += await processKeysBatch(keysBatch, rclient)
if (scannedKeyCount % 1000 === 0) {
logger.info(
{ scannedKeyCount, clearedKeyCount },
'expireRedisChunks scan progress'
)
}
}
logger.info(
{
scannedKeyCount,
clearedKeyCount,
elapsedTimeInSeconds: Math.floor((Date.now() - START_TIME) / 1000),
dryRun: DRY_RUN,
},
'expireRedisChunks scan complete'
)
await redis.disconnect()
}
expireRedisChunks().catch(err => {
logger.fatal({ err }, 'unhandled error in expireRedisChunks')
process.exit(1)
})

View file

@ -0,0 +1,145 @@
const { rclientHistory, disconnect } = require('../lib/redis')
const { scanRedisCluster } = require('../lib/scan')
// Lua script to get snapshot length, change lengths, and change timestamps
// Assumes snapshot key is a string and changes key is a list.
const LUA_SCRIPT = `
-- local cjson = require('cjson')
local snapshotKey = KEYS[1]
local changesKey = KEYS[2]
-- Get snapshot length (returns 0 if key does not exist)
local snapshotLen = redis.call('STRLEN', snapshotKey)
-- Return nil if snapshot is empty
if snapshotLen == 0 then
return nil
end
local changeLengths = {}
local changeTimestamps = {}
-- Get all changes (returns empty list if key does not exist)
local changes = redis.call('LRANGE', changesKey, 0, -1)
-- FIXME: it would be better to send all the changes back and do the processing
-- in JS to avoid blocking redis, if we need to run this script regularly
for i, change in ipairs(changes) do
-- Calculate length
table.insert(changeLengths, string.len(change))
-- Attempt to decode JSON and extract timestamp
local ok, decoded = pcall(cjson.decode, change)
if ok and type(decoded) == 'table' and decoded.timestamp then
table.insert(changeTimestamps, decoded.timestamp)
else
-- Handle cases where decoding fails or timestamp is missing
-- Log or insert a placeholder like nil if needed, otherwise skip
table.insert(changeTimestamps, nil) -- Keep placeholder for consistency
end
end
-- Return snapshot length, list of change lengths, and list of change timestamps
return {snapshotLen, changeLengths, changeTimestamps}
`
// Define the command if it doesn't exist
if (!rclientHistory.getProjectBufferStats) {
rclientHistory.defineCommand('getProjectBufferStats', {
numberOfKeys: 2,
lua: LUA_SCRIPT,
})
}
/**
* Processes a single project ID: fetches its buffer stats from Redis
* and writes the results to the output stream in CSV format.
*
* @param {string} projectId The project ID to process.
* @param {WritableStream} outputStream The stream to write CSV output to.
*/
async function processProject(projectId, outputStream) {
try {
// Get current time in milliseconds *before* fetching data
const nowMs = Date.now()
// Execute the Lua script
const result = await rclientHistory.getProjectBufferStats(
`snapshot:${projectId}`,
`changes:${projectId}`
)
// Check if the result is null (e.g., snapshot is empty)
if (result === null) {
console.log(
`Skipping project ${projectId}: Snapshot is empty or does not exist.`
)
return
}
const [snapshotSize, changeSizes, changeTimestamps] = result
// Output snapshot size
outputStream.write(`${projectId},snapshotSize,${snapshotSize}\n`)
outputStream.write(`${projectId},changeCount,${changeSizes.length}\n`)
const changes = changeSizes.map((size, index) => [
size,
changeTimestamps[index],
])
let totalChangeSize = 0
// Output change sizes
for (const [changeSize, changeTimestamp] of changes) {
totalChangeSize += parseInt(changeSize, 10)
const age = nowMs - new Date(changeTimestamp)
const ageInSeconds = Math.floor(age / 1000)
outputStream.write(`${projectId},change,${changeSize},${ageInSeconds}\n`)
}
outputStream.write(`${projectId},totalChangeSize,${totalChangeSize}\n`)
} catch (err) {
// Log error for this specific project but continue with others
console.error(`Error processing project ${projectId}:`, err)
}
}
async function main() {
const outputStream = process.stdout
// Write CSV header
outputStream.write('projectId,type,size,age\n')
try {
const scanPattern = 'snapshot:*'
console.log(`Scanning Redis for keys matching "${scanPattern}"...`)
for await (const keysBatch of scanRedisCluster(
rclientHistory,
scanPattern
)) {
for (const key of keysBatch) {
const parts = key.split(':')
if (parts.length !== 2 || parts[0] !== 'snapshot') {
console.warn(`Skipping malformed key: ${key}`)
continue
}
const projectId = parts[1]
// Call processProject directly and await it sequentially
await processProject(projectId, outputStream)
}
}
console.log('Finished processing keys.')
} catch (error) {
console.error('Error during Redis scan:', error)
} finally {
await disconnect()
console.log('Redis connections closed.')
}
}
main().catch(err => {
console.error('Unhandled error in main:', err)
process.exit(1)
})

View file

@ -1,11 +1,28 @@
import redis from '@overleaf/redis-wrapper'
import config from 'config'
const redisOptions = config.get('redis.queue')
// Get allowed Redis dbs from config
const redisConfig = config.get('redis')
const allowedDbs = Object.keys(redisConfig)
// Get the Redis db from command line argument or use the first available db as default
const db = process.argv[2]
// Validate redis db
if (!allowedDbs.includes(db)) {
if (db) {
console.error('Invalid redis db:', db)
}
console.error(`Usage: node redis.mjs [${allowedDbs.join('|')}]`)
process.exit(1)
}
// Get redis options based on command line argument
const redisOptions = config.get(`redis.${db}`)
console.log('Using redis db:', db)
console.log('REDIS CONFIG', {
...redisOptions,
password: '*'.repeat(redisOptions.password.length),
password: '*'.repeat(redisOptions.password?.length),
})
const rclient = redis.createClient(redisOptions)

View file

@ -6,6 +6,7 @@ import {
} from '../lib/chunk_store/index.js'
import { client } from '../lib/mongodb.js'
import knex from '../lib/knex.js'
import redis from '../lib/redis.js'
import {
loadGlobalBlobs,
BlobStore,
@ -247,4 +248,7 @@ main()
.finally(() => {
knex.destroy().catch(err => console.error('Error closing Postgres:', err))
client.close().catch(err => console.error('Error closing MongoDB:', err))
redis
.disconnect()
.catch(err => console.error('Error disconnecting Redis:', err))
})

View file

@ -16,6 +16,7 @@ import {
db,
client,
} from '../lib/mongodb.js'
import redis from '../lib/redis.js'
import commandLineArgs from 'command-line-args'
import fs from 'node:fs'
@ -146,4 +147,7 @@ main()
console.error('Error closing Postgres connection:', err)
})
client.close().catch(err => console.error('Error closing MongoDB:', err))
redis.disconnect().catch(err => {
console.error('Error disconnecting Redis:', err)
})
})

View file

@ -2,6 +2,7 @@ import commandLineArgs from 'command-line-args'
import { verifyProjectWithErrorContext } from '../lib/backupVerifier.mjs'
import knex from '../lib/knex.js'
import { client } from '../lib/mongodb.js'
import redis from '../lib/redis.js'
import { setTimeout } from 'node:timers/promises'
import { loadGlobalBlobs } from '../lib/blob_store/index.js'
@ -10,6 +11,7 @@ const { historyId } = commandLineArgs([{ name: 'historyId', type: String }])
async function gracefulShutdown(code = process.exitCode) {
await knex.destroy()
await client.close()
await redis.disconnect()
await setTimeout(1_000)
process.exit(code)
}

View file

@ -14,6 +14,7 @@ import { loadGlobalBlobs } from '../lib/blob_store/index.js'
import { getDatesBeforeRPO } from '../../backupVerifier/utils.mjs'
import { EventEmitter } from 'node:events'
import { mongodb } from '../index.js'
import redis from '../lib/redis.js'
logger.logger.level('fatal')
@ -30,6 +31,7 @@ const usageMessage = [
async function gracefulShutdown(code = process.exitCode) {
await knex.destroy()
await client.close()
await redis.disconnect()
await setTimeout(1_000)
process.exit(code)
}

View file

@ -30,14 +30,17 @@ import { historyStore } from '../../../../storage/lib/history_store.js'
* @typedef {import("overleaf-editor-core").Blob} Blob
*/
async function verifyProjectScript(historyId) {
// Timeout for script execution, increased to avoid flaky tests
const SCRIPT_TIMEOUT = 15_000
async function verifyProjectScript(historyId, expectFail = true) {
try {
const result = await promisify(execFile)(
process.argv0,
['storage/scripts/verify_project.mjs', `--historyId=${historyId}`],
{
encoding: 'utf-8',
timeout: 5_000,
timeout: SCRIPT_TIMEOUT,
env: {
...process.env,
LOG_LEVEL: 'warn',
@ -53,6 +56,9 @@ async function verifyProjectScript(historyId) {
'code' in err &&
'stderr' in err
) {
if (!expectFail) {
console.log(err)
}
return {
stdout: typeof err.stdout === 'string' ? err.stdout : '',
status: typeof err.code === 'number' ? err.code : -1,
@ -68,7 +74,7 @@ async function verifyProjectScript(historyId) {
* @param {string} hash
* @return {Promise<{stdout: string, status:number }>}
*/
async function verifyBlobScript(historyId, hash) {
async function verifyBlobScript(historyId, hash, expectFail = true) {
try {
const result = await promisify(execFile)(
process.argv0,
@ -79,7 +85,7 @@ async function verifyBlobScript(historyId, hash) {
],
{
encoding: 'utf-8',
timeout: 5_000,
timeout: SCRIPT_TIMEOUT,
env: {
...process.env,
LOG_LEVEL: 'warn',
@ -89,6 +95,9 @@ async function verifyBlobScript(historyId, hash) {
return { status: 0, stdout: result.stdout }
} catch (err) {
if (err && typeof err === 'object' && 'stdout' in err && 'code' in err) {
if (!expectFail) {
console.log(err)
}
return {
stdout: typeof err.stdout === 'string' ? err.stdout : '',
status: typeof err.code === 'number' ? err.code : -1,
@ -202,6 +211,7 @@ async function checkDEKExists(historyId) {
}
describe('backupVerifier', function () {
this.timeout(5_000 + SCRIPT_TIMEOUT) // allow time for external scripts to run
const historyIdPostgres = '42'
const historyIdMongo = '000000000000000000000042'
let blobHashPG, blobHashMongo, blobPathPG
@ -228,7 +238,7 @@ describe('backupVerifier', function () {
describe('storage/scripts/verify_project.mjs', function () {
describe('when the project is appropriately backed up', function () {
it('should return 0', async function () {
const response = await verifyProjectScript(historyIdPostgres)
const response = await verifyProjectScript(historyIdPostgres, false)
expect(response.status).to.equal(0)
})
})
@ -306,12 +316,20 @@ describe('backupVerifier', function () {
expect(result.stdout).to.include('hash mismatch for backed up blob')
})
it('should successfully verify from postgres', async function () {
const result = await verifyBlobScript(historyIdPostgres, blobHashPG)
const result = await verifyBlobScript(
historyIdPostgres,
blobHashPG,
false
)
expect(result.status).to.equal(0)
expect(result.stdout.split('\n')).to.include('OK')
})
it('should successfully verify from mongo', async function () {
const result = await verifyBlobScript(historyIdMongo, blobHashMongo)
const result = await verifyBlobScript(
historyIdMongo,
blobHashMongo,
false
)
expect(result.status).to.equal(0)
expect(result.stdout.split('\n')).to.include('OK')
})

View file

@ -22,6 +22,7 @@ const TextOperation = core.TextOperation
const V2DocVersions = core.V2DocVersions
const knex = require('../../../../storage').knex
const redis = require('../../../../storage/lib/chunk_store/redis')
describe('history import', function () {
beforeEach(cleanup.everything)
@ -580,7 +581,7 @@ describe('history import', function () {
.catch(expectResponse.unprocessableEntity)
.then(getLatestContent)
.then(response => {
// Check that no chaes were made
// Check that no changes were made
const snapshot = Snapshot.fromRaw(response.obj)
expect(snapshot.countFiles()).to.equal(1)
expect(snapshot.getFile(mainFilePathname).getHash()).to.equal(
@ -594,6 +595,10 @@ describe('history import', function () {
testFiles.NULL_CHARACTERS_TXT_BYTE_LENGTH
)
})
.then(() => {
// Now clear the cache because we have changed the string length in the database
return redis.clearCache(testProjectId)
})
.then(importChanges)
.then(getLatestContent)
.then(response => {

View file

@ -21,6 +21,8 @@ const {
Snapshot,
Change,
AddFileOperation,
EditFileOperation,
TextOperation,
} = require('overleaf-editor-core')
const testProjects = require('./support/test_projects')
@ -103,56 +105,176 @@ describe('project controller', function () {
// https://github.com/overleaf/write_latex/pull/5120#discussion_r244291862
})
describe('getLatestHashedContent', function () {
let limitsToPersistImmediately
describe('project with changes', function () {
let projectId
before(function () {
beforeEach(async function () {
// used to provide a limit which forces us to persist all of the changes.
const farFuture = new Date()
farFuture.setTime(farFuture.getTime() + 7 * 24 * 3600 * 1000)
limitsToPersistImmediately = {
const limits = {
minChangeTimestamp: farFuture,
maxChangeTimestamp: farFuture,
}
})
it('returns a snaphot', async function () {
const changes = [
new Change(
[new AddFileOperation('test.tex', File.fromString('ab'))],
new Date(),
[]
),
new Change(
[new AddFileOperation('other.tex', File.fromString('hello'))],
new Date(),
[]
),
]
const projectId = await createEmptyProject()
await persistChanges(projectId, changes, limitsToPersistImmediately, 0)
const response =
await testServer.basicAuthClient.apis.Project.getLatestHashedContent({
project_id: projectId,
})
expect(response.status).to.equal(HTTPStatus.OK)
const snapshot = Snapshot.fromRaw(response.obj)
expect(snapshot.countFiles()).to.equal(1)
expect(snapshot.getFile('test.tex').getHash()).to.equal(
testFiles.STRING_AB_HASH
)
projectId = await createEmptyProject()
await persistChanges(projectId, changes, limits, 0)
})
describe('getLatestHistoryRaw', function () {
it('should handles read', async function () {
const projectId = fixtures.docs.initializedProject.id
describe('getLatestHashedContent', function () {
it('returns a snapshot', async function () {
const response =
await testServer.pseudoJwtBasicAuthClient.apis.Project.getLatestHistoryRaw(
{
project_id: projectId,
readOnly: 'true',
}
await testServer.basicAuthClient.apis.Project.getLatestHashedContent({
project_id: projectId,
})
expect(response.status).to.equal(HTTPStatus.OK)
const snapshot = Snapshot.fromRaw(response.obj)
expect(snapshot.countFiles()).to.equal(2)
expect(snapshot.getFile('test.tex').getHash()).to.equal(
testFiles.STRING_AB_HASH
)
})
})
describe('getChanges', function () {
it('returns all changes when not given a limit', async function () {
const response =
await testServer.basicAuthClient.apis.Project.getChanges({
project_id: projectId,
})
expect(response.status).to.equal(HTTPStatus.OK)
const changes = response.obj
expect(changes.length).to.equal(2)
const filenames = changes
.flatMap(change => change.operations)
.map(operation => operation.pathname)
expect(filenames).to.deep.equal(['test.tex', 'other.tex'])
})
it('returns only requested changes', async function () {
const response =
await testServer.basicAuthClient.apis.Project.getChanges({
project_id: projectId,
since: 1,
})
expect(response.status).to.equal(HTTPStatus.OK)
const changes = response.obj
expect(changes.length).to.equal(1)
const filenames = changes
.flatMap(change => change.operations)
.map(operation => operation.pathname)
expect(filenames).to.deep.equal(['other.tex'])
})
it('rejects negative versions', async function () {
await expect(
testServer.basicAuthClient.apis.Project.getChanges({
project_id: projectId,
since: -1,
})
).to.be.rejectedWith('Bad Request')
})
it('rejects out of bounds versions', async function () {
await expect(
testServer.basicAuthClient.apis.Project.getChanges({
project_id: projectId,
since: 20,
})
).to.be.rejectedWith('Bad Request')
})
})
})
describe('project with many chunks', function () {
let projectId
beforeEach(async function () {
// used to provide a limit which forces us to persist all of the changes.
const farFuture = new Date()
farFuture.setTime(farFuture.getTime() + 7 * 24 * 3600 * 1000)
const limits = {
minChangeTimestamp: farFuture,
maxChangeTimestamp: farFuture,
maxChunkChanges: 5,
}
const changes = [
new Change(
[new AddFileOperation('test.tex', File.fromString(''))],
new Date(),
[]
),
]
for (let i = 0; i < 20; i++) {
const textOperation = new TextOperation()
textOperation.retain(i)
textOperation.insert('x')
changes.push(
new Change(
[new EditFileOperation('test.tex', textOperation)],
new Date(),
[]
)
expect(response.body).to.deep.equal({
startVersion: 0,
endVersion: 1,
endTimestamp: '2032-01-01T00:00:00.000Z',
})
)
}
projectId = await createEmptyProject()
await persistChanges(projectId, changes, limits, 0)
})
it('returns all changes when not given a limit', async function () {
const response = await testServer.basicAuthClient.apis.Project.getChanges(
{
project_id: projectId,
}
)
expect(response.status).to.equal(HTTPStatus.OK)
const changes = response.obj
expect(changes.length).to.equal(21)
expect(changes[10].operations[0].textOperation).to.deep.equal([9, 'x'])
})
it('returns only requested changes', async function () {
const response = await testServer.basicAuthClient.apis.Project.getChanges(
{
project_id: projectId,
since: 10,
}
)
expect(response.status).to.equal(HTTPStatus.OK)
const changes = response.obj
expect(changes.length).to.equal(11)
expect(changes[2].operations[0].textOperation).to.deep.equal([11, 'x'])
})
})
describe('getLatestHistoryRaw', function () {
it('should handles read', async function () {
const projectId = fixtures.docs.initializedProject.id
const response =
await testServer.pseudoJwtBasicAuthClient.apis.Project.getLatestHistoryRaw(
{
project_id: projectId,
readOnly: 'true',
}
)
expect(response.body).to.deep.equal({
startVersion: 0,
endVersion: 1,
endTimestamp: '2032-01-01T00:00:00.000Z',
})
})
})

View file

@ -26,7 +26,7 @@ async function listenOnRandomPort() {
return
} catch {}
}
server = await startApp(0)
server = await startApp(0, false)
}
after('close server', function (done) {

View file

@ -0,0 +1,248 @@
'use strict'
const OError = require('@overleaf/o-error')
const { expect } = require('chai')
const assert = require('../../../../storage/lib/assert')
describe('assert', function () {
describe('blobHash', function () {
it('should not throw for valid blob hashes', function () {
expect(() =>
assert.blobHash(
'aad321caf77ca6c5ab09e6c638c237705f93b001',
'should be a blob hash'
)
).to.not.throw()
})
it('should throw for invalid blob hashes', function () {
try {
assert.blobHash('invalid-hash', 'should be a blob hash')
expect.fail()
} catch (error) {
expect(error).to.be.instanceOf(TypeError)
expect(error.message).to.equal('should be a blob hash')
expect(OError.getFullInfo(error)).to.deep.equal({ arg: 'invalid-hash' })
}
})
it('should throw for string integer blob hashes', function () {
try {
assert.blobHash('123', 'should be a blob hash')
expect.fail()
} catch (error) {
expect(error).to.be.instanceOf(TypeError)
expect(error.message).to.equal('should be a blob hash')
expect(OError.getFullInfo(error)).to.deep.equal({ arg: '123' })
}
})
})
describe('projectId', function () {
it('should not throw for valid mongo project ids', function () {
expect(() =>
assert.projectId('507f1f77bcf86cd799439011', 'should be a project id')
).to.not.throw()
})
it('should not throw for valid postgres project ids', function () {
expect(() =>
assert.projectId('123456789', 'should be a project id')
).to.not.throw()
})
it('should throw for invalid project ids', function () {
try {
assert.projectId('invalid-id', 'should be a project id')
expect.fail()
} catch (error) {
expect(error).to.be.instanceOf(TypeError)
expect(error.message).to.equal('should be a project id')
expect(OError.getFullInfo(error)).to.deep.equal({ arg: 'invalid-id' })
}
})
it('should throw for non-numeric project ids', function () {
try {
assert.projectId('12345x', 'should be a project id')
expect.fail()
} catch (error) {
expect(error).to.be.instanceOf(TypeError)
expect(error.message).to.equal('should be a project id')
expect(OError.getFullInfo(error)).to.deep.equal({ arg: '12345x' })
}
})
it('should throw for postgres ids starting with 0', function () {
try {
assert.projectId('0123456', 'should be a project id')
expect.fail()
} catch (error) {
expect(error).to.be.instanceOf(TypeError)
expect(error.message).to.equal('should be a project id')
expect(OError.getFullInfo(error)).to.deep.equal({ arg: '0123456' })
}
})
})
describe('chunkId', function () {
it('should not throw for valid mongo chunk ids', function () {
expect(() =>
assert.chunkId('507f1f77bcf86cd799439011', 'should be a chunk id')
).to.not.throw()
})
it('should not throw for valid postgres chunk ids', function () {
expect(() =>
assert.chunkId('123456789', 'should be a chunk id')
).to.not.throw()
})
it('should throw for invalid chunk ids', function () {
try {
assert.chunkId('invalid-id', 'should be a chunk id')
expect.fail()
} catch (error) {
expect(error).to.be.instanceOf(TypeError)
expect(error.message).to.equal('should be a chunk id')
expect(OError.getFullInfo(error)).to.deep.equal({ arg: 'invalid-id' })
}
})
it('should throw for integer chunk ids', function () {
try {
assert.chunkId(12345, 'should be a chunk id')
expect.fail()
} catch (error) {
expect(error).to.be.instanceOf(TypeError)
expect(error.message).to.equal('should be a chunk id')
expect(OError.getFullInfo(error)).to.deep.equal({ arg: 12345 })
}
})
})
describe('mongoId', function () {
it('should not throw for valid mongo ids', function () {
expect(() =>
assert.mongoId('507f1f77bcf86cd799439011', 'should be a mongo id')
).to.not.throw()
})
it('should throw for invalid mongo ids', function () {
try {
assert.mongoId('invalid-id', 'should be a mongo id')
expect.fail()
} catch (error) {
expect(error).to.be.instanceOf(TypeError)
expect(error.message).to.equal('should be a mongo id')
expect(OError.getFullInfo(error)).to.deep.equal({ arg: 'invalid-id' })
}
})
it('should throw for numeric mongo ids', function () {
try {
assert.mongoId('12345', 'should be a mongo id')
expect.fail()
} catch (error) {
expect(error).to.be.instanceOf(TypeError)
expect(error.message).to.equal('should be a mongo id')
expect(OError.getFullInfo(error)).to.deep.equal({ arg: '12345' })
}
})
it('should throw for mongo ids that are too short', function () {
try {
assert.mongoId('507f1f77bcf86cd79943901', 'should be a mongo id')
expect.fail()
} catch (error) {
expect(error).to.be.instanceOf(TypeError)
expect(error.message).to.equal('should be a mongo id')
expect(OError.getFullInfo(error)).to.deep.equal({
arg: '507f1f77bcf86cd79943901',
})
}
})
it('should throw for mongo ids that are too long', function () {
try {
assert.mongoId('507f1f77bcf86cd7994390111', 'should be a mongo id')
expect.fail()
} catch (error) {
expect(error).to.be.instanceOf(TypeError)
expect(error.message).to.equal('should be a mongo id')
expect(OError.getFullInfo(error)).to.deep.equal({
arg: '507f1f77bcf86cd7994390111',
})
}
})
})
describe('postgresId', function () {
it('should not throw for valid postgres ids', function () {
expect(() =>
assert.postgresId('123456789', 'should be a postgres id')
).to.not.throw()
expect(() =>
assert.postgresId('1', 'should be a postgres id')
).to.not.throw()
})
it('should throw for invalid postgres ids', function () {
try {
assert.postgresId('invalid-id', 'should be a postgres id')
expect.fail()
} catch (error) {
expect(error).to.be.instanceOf(TypeError)
expect(error.message).to.equal('should be a postgres id')
expect(OError.getFullInfo(error)).to.deep.equal({ arg: 'invalid-id' })
}
})
it('should throw for postgres ids starting with 0', function () {
try {
assert.postgresId('0123456', 'should be a postgres id')
expect.fail()
} catch (error) {
expect(error).to.be.instanceOf(TypeError)
expect(error.message).to.equal('should be a postgres id')
expect(OError.getFullInfo(error)).to.deep.equal({ arg: '0123456' })
}
})
it('should throw for postgres ids that are too long', function () {
try {
assert.postgresId('12345678901', 'should be a postgres id')
expect.fail()
} catch (error) {
expect(error).to.be.instanceOf(TypeError)
expect(error.message).to.equal('should be a postgres id')
expect(OError.getFullInfo(error)).to.deep.equal({ arg: '12345678901' })
}
})
})
describe('regex constants', function () {
it('MONGO_ID_REGEXP should match valid mongo ids', function () {
expect('507f1f77bcf86cd799439011').to.match(assert.MONGO_ID_REGEXP)
expect('abcdef0123456789abcdef01').to.match(assert.MONGO_ID_REGEXP)
})
it('MONGO_ID_REGEXP should not match invalid mongo ids', function () {
expect('invalid-id').to.not.match(assert.MONGO_ID_REGEXP)
expect('507f1f77bcf86cd79943901').to.not.match(assert.MONGO_ID_REGEXP) // too short
expect('507f1f77bcf86cd7994390111').to.not.match(assert.MONGO_ID_REGEXP) // too long
expect('507F1F77BCF86CD799439011').to.not.match(assert.MONGO_ID_REGEXP) // uppercase
})
it('POSTGRES_ID_REGEXP should match valid postgres ids', function () {
expect('123456789').to.match(assert.POSTGRES_ID_REGEXP)
expect('1').to.match(assert.POSTGRES_ID_REGEXP)
})
it('POSTGRES_ID_REGEXP should not match invalid postgres ids', function () {
expect('invalid-id').to.not.match(assert.POSTGRES_ID_REGEXP)
expect('0123456').to.not.match(assert.POSTGRES_ID_REGEXP) // starts with 0
expect('12345678901').to.not.match(assert.POSTGRES_ID_REGEXP) // too long (> 10 digits)
})
})
})

View file

@ -8,20 +8,20 @@ describe('BlobStore postgres backend', function () {
const projectId = new ObjectId().toString()
await expect(
postgresBackend.insertBlob(projectId, 'hash', 123, 99)
).to.be.rejectedWith(`bad projectId ${projectId}`)
).to.be.rejectedWith('bad projectId')
})
it('deleteBlobs rejects when called with bad projectId', async function () {
const projectId = new ObjectId().toString()
await expect(postgresBackend.deleteBlobs(projectId)).to.be.rejectedWith(
`bad projectId ${projectId}`
'bad projectId'
)
})
it('findBlobs rejects when called with bad projectId', async function () {
const projectId = new ObjectId().toString()
await expect(postgresBackend.findBlobs(projectId)).to.be.rejectedWith(
`bad projectId ${projectId}`
'bad projectId'
)
})
@ -29,14 +29,14 @@ describe('BlobStore postgres backend', function () {
const projectId = new ObjectId().toString()
await expect(
postgresBackend.findBlob(projectId, 'hash')
).to.be.rejectedWith(`bad projectId ${projectId}`)
).to.be.rejectedWith('bad projectId')
})
it('getProjectBlobs rejects when called with bad projectId', async function () {
const projectId = new ObjectId().toString()
await expect(
postgresBackend.getProjectBlobs(projectId)
).to.be.rejectedWith(`bad projectId ${projectId}`)
).to.be.rejectedWith('bad projectId')
})
})
})

Some files were not shown because too many files have changed in this diff Show more