+diff --git a/services/web/frontend/js/features/ui/components/bootstrap-5/navbar/default-navbar.tsx b/services/web/frontend/js/features/ui/components/bootstrap-5/navbar/default-navbar.tsx
+index 2480b7f061f..8e5429dbde6 100644
+--- a/services/web/frontend/js/features/ui/components/bootstrap-5/navbar/default-navbar.tsx
++++ b/services/web/frontend/js/features/ui/components/bootstrap-5/navbar/default-navbar.tsx
+@@ -1,4 +1,4 @@
+-import { useState } from 'react'
++import React, { useState } from 'react'
+ import { sendMB } from '@/infrastructure/event-tracking'
+ import { useTranslation } from 'react-i18next'
+ import { Button, Container, Nav, Navbar } from 'react-bootstrap'
+@@ -13,9 +13,15 @@ import MaterialIcon from '@/shared/components/material-icon'
+ import { useContactUsModal } from '@/shared/hooks/use-contact-us-modal'
+ import { UserProvider } from '@/shared/context/user-context'
+ import { X } from '@phosphor-icons/react'
++import overleafWhiteLogo from '@/shared/svgs/overleaf-white.svg'
++import overleafBlackLogo from '@/shared/svgs/overleaf-black.svg'
++import type { CSSPropertiesWithVariables } from '../../../../../../../types/css-properties-with-variables'
+
+-function DefaultNavbar(props: DefaultNavbarMetadata) {
++function DefaultNavbar(
++ props: DefaultNavbarMetadata & { overleafLogo?: string }
++) {
+ const {
++ overleafLogo,
+ customLogo,
+ title,
+ canDisplayAdminMenu,
+@@ -49,10 +55,20 @@ function DefaultNavbar(props: DefaultNavbarMetadata) {
+ className="navbar-default navbar-main"
+ expand="lg"
+ onToggle={expanded => setExpanded(expanded)}
++ style={
++ {
++ '--navbar-brand-image-default-url': `url("${overleafWhiteLogo}")`,
++ '--navbar-brand-image-redesign-url': `url("${overleafBlackLogo}")`,
++ } as CSSPropertiesWithVariables
++ }
+ >
+
+
+-
++
+ {enableUpgradeButton ? (
+
) {
++}: Pick & {
++ overleafLogo?: string
++}) {
+ const { appName } = getMeta('ol-ExposedSettings')
+-
+ if (customLogo) {
+ return (
+ // eslint-disable-next-line jsx-a11y/anchor-has-content
+@@ -24,9 +26,16 @@ export default function HeaderLogoOrTitle({
+
+ )
+ } else {
++ const style = overleafLogo
++ ? {
++ style: {
++ backgroundImage: `url("${overleafLogo}")`,
++ },
++ }
++ : null
+ return (
+ // eslint-disable-next-line jsx-a11y/anchor-has-content
+-
++
+ )
+ }
+ }
+diff --git a/services/web/frontend/js/shared/svgs/overleaf-black.svg b/services/web/frontend/js/shared/svgs/overleaf-black.svg
+new file mode 100644
+index 00000000000..ea0678438ba
+--- /dev/null
++++ b/services/web/frontend/js/shared/svgs/overleaf-black.svg
+@@ -0,0 +1,9 @@
++
++
++
++
++
++
++
++
++
+diff --git a/services/web/frontend/js/shared/svgs/overleaf-white.svg b/services/web/frontend/js/shared/svgs/overleaf-white.svg
+new file mode 100644
+index 00000000000..2ced81aa46d
+--- /dev/null
++++ b/services/web/frontend/js/shared/svgs/overleaf-white.svg
+@@ -0,0 +1 @@
++
+\ No newline at end of file
+diff --git a/services/web/frontend/stylesheets/bootstrap-5/components/nav.scss b/services/web/frontend/stylesheets/bootstrap-5/components/nav.scss
+index 5d28341cf53..dd0600ed15d 100644
+--- a/services/web/frontend/stylesheets/bootstrap-5/components/nav.scss
++++ b/services/web/frontend/stylesheets/bootstrap-5/components/nav.scss
+@@ -8,7 +8,10 @@
+ --navbar-padding-h: var(--spacing-05);
+ --navbar-padding: 0 var(--navbar-padding-h);
+ --navbar-brand-width: 130px;
+- --navbar-brand-image-url: url('../../../../public/img/ol-brand/overleaf-white.svg');
++ --navbar-brand-image-url: var(
++ --navbar-brand-image-default-url,
++ url('../../../../public/img/ol-brand/overleaf-white.svg')
++ );
+
+ // Title, when used instead of a logo
+ --navbar-title-font-size: var(--font-size-05);
+diff --git a/services/web/frontend/stylesheets/bootstrap-5/components/navbar.scss b/services/web/frontend/stylesheets/bootstrap-5/components/navbar.scss
+index 3b984bb6f36..a8855ea1ca3 100644
+--- a/services/web/frontend/stylesheets/bootstrap-5/components/navbar.scss
++++ b/services/web/frontend/stylesheets/bootstrap-5/components/navbar.scss
+@@ -216,7 +216,10 @@
+ .website-redesign .navbar-default {
+ --navbar-title-color: var(--content-primary);
+ --navbar-title-color-hover: var(--content-secondary);
+- --navbar-brand-image-url: url('../../../../public/img/ol-brand/overleaf-black.svg');
++ --navbar-brand-image-url: var(
++ --navbar-brand-image-redesign-url,
++ url('../../../../public/img/ol-brand/overleaf-black.svg')
++ );
+ --navbar-subdued-color: var(--content-primary);
+ --navbar-subdued-hover-bg: var(--bg-dark-primary);
+ --navbar-subdued-hover-color: var(--content-primary-dark);
+diff --git a/services/web/types/css-properties-with-variables.tsx b/services/web/types/css-properties-with-variables.tsx
+new file mode 100644
+index 00000000000..fe0e85902a6
+--- /dev/null
++++ b/services/web/types/css-properties-with-variables.tsx
+@@ -0,0 +1,4 @@
++import { CSSProperties } from 'react'
++
++export type CSSPropertiesWithVariables = CSSProperties &
++ Record<`--${string}`, number | string>
+--
+2.43.0
+
diff --git a/server-ce/hotfix/5.5.2/pr_26783.patch b/server-ce/hotfix/5.5.2/pr_26783.patch
new file mode 100644
index 0000000000..74db897a5f
--- /dev/null
+++ b/server-ce/hotfix/5.5.2/pr_26783.patch
@@ -0,0 +1,58 @@
+diff --git a/services/web/modules/server-ce-scripts/scripts/check-mongodb.mjs b/services/web/modules/server-ce-scripts/scripts/check-mongodb.mjs
+index 29f5e7ffd26..46be91a1d9c 100644
+--- a/services/web/modules/server-ce-scripts/scripts/check-mongodb.mjs
++++ b/services/web/modules/server-ce-scripts/scripts/check-mongodb.mjs
+@@ -9,6 +9,34 @@ const { ObjectId } = mongodb
+ const MIN_MONGO_VERSION = [6, 0]
+ const MIN_MONGO_FEATURE_COMPATIBILITY_VERSION = [6, 0]
+
++// Allow ignoring admin check failures via an environment variable
++const OVERRIDE_ENV_VAR_NAME = 'ALLOW_MONGO_ADMIN_CHECK_FAILURES'
++
++function shouldSkipAdminChecks() {
++ return process.env[OVERRIDE_ENV_VAR_NAME] === 'true'
++}
++
++function handleUnauthorizedError(err, feature) {
++ if (
++ err instanceof mongodb.MongoServerError &&
++ err.codeName === 'Unauthorized'
++ ) {
++ console.warn(`Warning: failed to check ${feature} (not authorised)`)
++ if (!shouldSkipAdminChecks()) {
++ console.error(
++ `Please ensure the MongoDB user has the required admin permissions, or\n` +
++ `set the environment variable ${OVERRIDE_ENV_VAR_NAME}=true to ignore this check.`
++ )
++ process.exit(1)
++ }
++ console.warn(
++ `Ignoring ${feature} check failure (${OVERRIDE_ENV_VAR_NAME}=${process.env[OVERRIDE_ENV_VAR_NAME]})`
++ )
++ } else {
++ throw err
++ }
++}
++
+ async function main() {
+ let mongoClient
+ try {
+@@ -18,8 +46,16 @@ async function main() {
+ throw err
+ }
+
+- await checkMongoVersion(mongoClient)
+- await checkFeatureCompatibilityVersion(mongoClient)
++ try {
++ await checkMongoVersion(mongoClient)
++ } catch (err) {
++ handleUnauthorizedError(err, 'MongoDB version')
++ }
++ try {
++ await checkFeatureCompatibilityVersion(mongoClient)
++ } catch (err) {
++ handleUnauthorizedError(err, 'MongoDB feature compatibility version')
++ }
+
+ try {
+ await testTransactions(mongoClient)
diff --git a/server-ce/hotfix/5.5.3/Dockerfile b/server-ce/hotfix/5.5.3/Dockerfile
new file mode 100644
index 0000000000..1a783bc0ba
--- /dev/null
+++ b/server-ce/hotfix/5.5.3/Dockerfile
@@ -0,0 +1,29 @@
+FROM sharelatex/sharelatex:5.5.2
+
+# ../../bin/import_pr_patch.sh 27147 27173 27230 27240 27249 27257 27273 27397
+# Remove CE tests
+# Remove tests
+# Remove cloudbuild changes
+# Remove SaaS changes
+# Fixup package.json and toolbar-items.tsx
+# Fix cron paths
+COPY *.patch .
+RUN --mount=type=cache,target=/root/.cache \
+ --mount=type=cache,target=/root/.npm \
+ --mount=type=cache,target=/overleaf/services/web/node_modules/.cache,id=server-ce-webpack-cache \
+ --mount=type=tmpfs,target=/tmp true \
+ && bash -ec 'for p in *.patch; do echo "=== Applying $p ==="; patch -p1 < "$p" && rm $p; done' \
+ && npm audit --audit-level=high \
+ && node genScript compile | bash \
+ && npm prune --omit=dev \
+ && apt remove -y linux-libc-dev
+
+# ../../bin/import_pr_patch.sh 27476
+# Remove tests
+# Remove SaaS changes
+COPY pr_27476.patch-stage-2 .
+RUN patch -p1 < pr_27476.patch-stage-2 && rm pr_27476.patch-stage-2
+
+# Extra tweaks to output
+COPY pr_27397.patch-stage-2 .
+RUN patch -p1 < pr_27397.patch-stage-2 && rm pr_27397.patch-stage-2
diff --git a/server-ce/hotfix/5.5.3/NOTES.md b/server-ce/hotfix/5.5.3/NOTES.md
new file mode 100644
index 0000000000..71f3f185a5
--- /dev/null
+++ b/server-ce/hotfix/5.5.3/NOTES.md
@@ -0,0 +1,54 @@
+# Get the base container running
+docker build -t base .
+
+CONTAINER_NAME=new
+
+# Start the container
+docker run -t -i --entrypoint /bin/bash --name $CONTAINER_NAME base
+
+# Clean any existing directories
+rm -rf /tmp/{a,b}
+
+# Take snapshot of initial container
+mkdir /tmp/a ; docker export $CONTAINER_NAME | tar --exclude node_modules -x -C /tmp/a --strip-components=1 overleaf
+
+# In the container, run the following commands
+docker exec -i $CONTAINER_NAME /bin/bash <<'EOF'
+npm install -g json
+json -I -f package.json -c 'this.overrides["swagger-tools"].multer="2.0.2"'
+json -I -f package.json -c 'this.overrides["request@2.88.2"]["form-data"]="2.5.5"'
+json -I -f package.json -c 'this.overrides["superagent@7.1.6"] ??= {}'
+json -I -f package.json -c 'this.overrides["superagent@7.1.6"]["form-data"]="4.0.4"'
+json -I -f package.json -c 'this.overrides["superagent@3.8.3"] ??= {}'
+json -I -f package.json -c 'this.overrides["superagent@3.8.3"]["form-data"]="2.5.5"'
+
+npm uninstall -w libraries/metrics @google-cloud/opentelemetry-cloud-trace-exporter @google-cloud/profiler
+npm uninstall -w libraries/logger @google-cloud/logging-bunyan
+npm uninstall -w services/web @slack/webhook contentful @contentful/rich-text-types @contentful/rich-text-html-renderer
+npm uninstall -w services/history-v1 @google-cloud/secret-manager
+
+npm uninstall -w services/web "@node-saml/passport-saml"
+npm install -w services/web "@node-saml/passport-saml@^5.1.0"
+
+npm uninstall -w services/web multer
+npm install -w services/web "multer@2.0.2"
+
+npm uninstall -w services/history-v1 swagger-tools
+npm install -w services/history-v1 swagger-tools@0.10.4
+
+npm uninstall -w services/clsi request
+npm install -w services/clsi request@2.88.2
+npm install
+
+npm audit --audit-level=high
+EOF
+
+# Take snapshot of final container
+mkdir /tmp/b ; docker export $CONTAINER_NAME | tar --exclude node_modules -x -C /tmp/b --strip-components=1 overleaf
+
+# Find the diff excluding node modules directories
+# The sec_ prefix ensures it applies after pr_* patches.
+(cd /tmp ; diff -u -x 'node_modules' -r a/ b/) > sec-npm.patch
+
+# In the docker file we also need to remove linux-libc-dev
+apt remove -y linux-libc-dev
diff --git a/server-ce/hotfix/5.5.3/multer.patch b/server-ce/hotfix/5.5.3/multer.patch
new file mode 100644
index 0000000000..9e941204d4
--- /dev/null
+++ b/server-ce/hotfix/5.5.3/multer.patch
@@ -0,0 +1,27 @@
+commit 43d0476e489cdf8e2e7261eb419810140d252a6d
+Author: Andrew Rumble
+Date: Fri Jul 25 12:18:26 2025 +0100
+
+ Add patch for multer 2.0.2
+
+ Co-authored-by: Ersun Warncke
+
+diff --git a/patches/multer+2.0.2.patch b/patches/multer+2.0.2.patch
+new file mode 100644
+index 00000000000..f9959effe15
+--- /dev/null
++++ b/patches/multer+2.0.2.patch
+@@ -0,0 +1,13 @@
++diff --git a/node_modules/multer/lib/make-middleware.js b/node_modules/multer/lib/make-middleware.js
++index 260dcb4..895b4b2 100644
++--- a/node_modules/multer/lib/make-middleware.js
+++++ b/node_modules/multer/lib/make-middleware.js
++@@ -113,7 +113,7 @@ function makeMiddleware (setup) {
++ if (fieldname == null) return abortWithCode('MISSING_FIELD_NAME')
++
++ // don't attach to the files object, if there is no file
++- if (!filename) return fileStream.resume()
+++ if (!filename) filename = 'undefined'
++
++ // Work around bug in Busboy (https://github.com/mscdex/busboy/issues/6)
++ if (limits && Object.prototype.hasOwnProperty.call(limits, 'fieldNameSize')) {
diff --git a/server-ce/hotfix/5.5.3/pr_27147.patch b/server-ce/hotfix/5.5.3/pr_27147.patch
new file mode 100644
index 0000000000..c7ea9fb3f6
--- /dev/null
+++ b/server-ce/hotfix/5.5.3/pr_27147.patch
@@ -0,0 +1,351 @@
+
+
+diff --git a/services/history-v1/storage/scripts/back_fill_file_hash.mjs b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
+index ba3e0d43598e..feb4612ddc23 100644
+--- a/services/history-v1/storage/scripts/back_fill_file_hash.mjs
++++ b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
+@@ -33,7 +33,6 @@ import {
+ makeProjectKey,
+ } from '../lib/blob_store/index.js'
+ import { backedUpBlobs as backedUpBlobsCollection, db } from '../lib/mongodb.js'
+-import filestorePersistor from '../lib/persistor.js'
+ import commandLineArgs from 'command-line-args'
+ import readline from 'node:readline'
+
+@@ -179,6 +178,37 @@ const STREAM_HIGH_WATER_MARK = parseInt(
+ const LOGGING_INTERVAL = parseInt(process.env.LOGGING_INTERVAL || '60000', 10)
+ const SLEEP_BEFORE_EXIT = parseInt(process.env.SLEEP_BEFORE_EXIT || '1000', 10)
+
++// Filestore endpoint location, the port is always hardcoded
++const FILESTORE_HOST = process.env.FILESTORE_HOST || '127.0.0.1'
++const FILESTORE_PORT = process.env.FILESTORE_PORT || '3009'
++
++async function fetchFromFilestore(projectId, fileId) {
++ const url = `http://${FILESTORE_HOST}:${FILESTORE_PORT}/project/${projectId}/file/${fileId}`
++ const response = await fetch(url)
++ if (!response.ok) {
++ if (response.status === 404) {
++ throw new NotFoundError('file not found in filestore', {
++ status: response.status,
++ })
++ }
++ const body = await response.text()
++ throw new OError('fetchFromFilestore failed', {
++ projectId,
++ fileId,
++ status: response.status,
++ body,
++ })
++ }
++ if (!response.body) {
++ throw new OError('fetchFromFilestore response has no body', {
++ projectId,
++ fileId,
++ status: response.status,
++ })
++ }
++ return response.body
++}
++
+ const projectsCollection = db.collection('projects')
+ /** @type {ProjectsCollection} */
+ const typedProjectsCollection = db.collection('projects')
+@@ -348,8 +378,7 @@ async function processFile(entry, filePath) {
+ } catch (err) {
+ if (gracefulShutdownInitiated) throw err
+ if (err instanceof NotFoundError) {
+- const { bucketName } = OError.getFullInfo(err)
+- if (bucketName === USER_FILES_BUCKET_NAME && !RETRY_FILESTORE_404) {
++ if (!RETRY_FILESTORE_404) {
+ throw err // disable retries for not found in filestore bucket case
+ }
+ }
+@@ -416,10 +445,8 @@ async function processFileOnce(entry, filePath) {
+ }
+
+ STATS.readFromGCSCount++
+- const src = await filestorePersistor.getObjectStream(
+- USER_FILES_BUCKET_NAME,
+- `${projectId}/${fileId}`
+- )
++ // make a fetch request to filestore itself
++ const src = await fetchFromFilestore(projectId, fileId)
+ const dst = fs.createWriteStream(filePath, {
+ highWaterMark: STREAM_HIGH_WATER_MARK,
+ })
+@@ -1327,14 +1354,21 @@ async function processDeletedProjects() {
+ }
+
+ async function main() {
++ console.log('Starting project file backup...')
+ await loadGlobalBlobs()
++ console.log('Loaded global blobs:', GLOBAL_BLOBS.size)
+ if (PROJECT_IDS_FROM) {
++ console.log(
++ `Processing projects from file: ${PROJECT_IDS_FROM}, this may take a while...`
++ )
+ await processProjectsFromFile()
+ } else {
+ if (PROCESS_NON_DELETED_PROJECTS) {
++ console.log('Processing non-deleted projects...')
+ await processNonDeletedProjects()
+ }
+ if (PROCESS_DELETED_PROJECTS) {
++ console.log('Processing deleted projects...')
+ await processDeletedProjects()
+ }
+ }
+diff --git a/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs b/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs
+index fd39369a7189..4e697b8bec2c 100644
+--- a/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs
++++ b/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs
+@@ -15,7 +15,6 @@ import { execFile } from 'node:child_process'
+ import chai, { expect } from 'chai'
+ import chaiExclude from 'chai-exclude'
+ import config from 'config'
+-import ObjectPersistor from '@overleaf/object-persistor'
+ import { WritableBuffer } from '@overleaf/stream-utils'
+ import {
+ backupPersistor,
+@@ -27,6 +26,9 @@ import {
+ makeProjectKey,
+ } from '../../../../storage/lib/blob_store/index.js'
+
++import express from 'express'
++import bodyParser from 'body-parser'
++
+ chai.use(chaiExclude)
+ const TIMEOUT = 20 * 1_000
+
+@@ -36,15 +38,60 @@ const { tieringStorageClass } = config.get('backupPersistor')
+ const projectsCollection = db.collection('projects')
+ const deletedProjectsCollection = db.collection('deletedProjects')
+
+-const FILESTORE_PERSISTOR = ObjectPersistor({
+- backend: 'gcs',
+- gcs: {
+- endpoint: {
+- apiEndpoint: process.env.GCS_API_ENDPOINT,
+- projectId: process.env.GCS_PROJECT_ID,
+- },
+- },
+-})
++class MockFilestore {
++ constructor() {
++ this.host = process.env.FILESTORE_HOST || '127.0.0.1'
++ this.port = process.env.FILESTORE_PORT || 3009
++ // create a server listening on this.host and this.port
++ this.files = {}
++
++ this.app = express()
++ this.app.use(bodyParser.json())
++ this.app.use(bodyParser.urlencoded({ extended: true }))
++
++ this.app.get('/project/:projectId/file/:fileId', (req, res) => {
++ const { projectId, fileId } = req.params
++ const content = this.files[projectId]?.[fileId]
++ if (!content) return res.status(404).end()
++ res.status(200).end(content)
++ })
++ }
++
++ start() {
++ // reset stored files
++ this.files = {}
++ // start the server
++ if (this.serverPromise) {
++ return this.serverPromise
++ } else {
++ this.serverPromise = new Promise((resolve, reject) => {
++ this.server = this.app.listen(this.port, this.host, err => {
++ if (err) return reject(err)
++ resolve()
++ })
++ })
++ return this.serverPromise
++ }
++ }
++
++ addFile(projectId, fileId, fileContent) {
++ if (!this.files[projectId]) {
++ this.files[projectId] = {}
++ }
++ this.files[projectId][fileId] = fileContent
++ }
++
++ deleteObject(projectId, fileId) {
++ if (this.files[projectId]) {
++ delete this.files[projectId][fileId]
++ if (Object.keys(this.files[projectId]).length === 0) {
++ delete this.files[projectId]
++ }
++ }
++ }
++}
++
++const mockFilestore = new MockFilestore()
+
+ /**
+ * @param {ObjectId} objectId
+@@ -472,67 +519,36 @@ describe('back_fill_file_hash script', function () {
+ }
+
+ async function populateFilestore() {
+- await FILESTORE_PERSISTOR.sendStream(
+- USER_FILES_BUCKET_NAME,
+- `${projectId0}/${fileId0}`,
+- Stream.Readable.from([fileId0.toString()])
+- )
+- await FILESTORE_PERSISTOR.sendStream(
+- USER_FILES_BUCKET_NAME,
+- `${projectId0}/${fileId6}`,
+- Stream.Readable.from([fileId6.toString()])
+- )
+- await FILESTORE_PERSISTOR.sendStream(
+- USER_FILES_BUCKET_NAME,
+- `${projectId0}/${fileId7}`,
+- Stream.Readable.from([contentFile7])
+- )
+- await FILESTORE_PERSISTOR.sendStream(
+- USER_FILES_BUCKET_NAME,
+- `${projectId1}/${fileId1}`,
+- Stream.Readable.from([fileId1.toString()])
+- )
+- await FILESTORE_PERSISTOR.sendStream(
+- USER_FILES_BUCKET_NAME,
+- `${projectId2}/${fileId2}`,
+- Stream.Readable.from([fileId2.toString()])
+- )
+- await FILESTORE_PERSISTOR.sendStream(
+- USER_FILES_BUCKET_NAME,
+- `${projectId3}/${fileId3}`,
+- Stream.Readable.from([fileId3.toString()])
+- )
+- await FILESTORE_PERSISTOR.sendStream(
+- USER_FILES_BUCKET_NAME,
+- `${projectId3}/${fileId10}`,
++ await mockFilestore.addFile(projectId0, fileId0, fileId0.toString())
++ await mockFilestore.addFile(projectId0, fileId6, fileId6.toString())
++ await mockFilestore.addFile(projectId0, fileId7, contentFile7)
++ await mockFilestore.addFile(projectId1, fileId1, fileId1.toString())
++ await mockFilestore.addFile(projectId2, fileId2, fileId2.toString())
++ await mockFilestore.addFile(projectId3, fileId3, fileId3.toString())
++ await mockFilestore.addFile(
++ projectId3,
++ fileId10,
+ // fileId10 is dupe of fileId3
+- Stream.Readable.from([fileId3.toString()])
++ fileId3.toString()
+ )
+- await FILESTORE_PERSISTOR.sendStream(
+- USER_FILES_BUCKET_NAME,
+- `${projectId3}/${fileId11}`,
++ await mockFilestore.addFile(
++ projectId3,
++ fileId11,
+ // fileId11 is dupe of fileId3
+- Stream.Readable.from([fileId3.toString()])
+- )
+- await FILESTORE_PERSISTOR.sendStream(
+- USER_FILES_BUCKET_NAME,
+- `${projectIdDeleted0}/${fileId4}`,
+- Stream.Readable.from([fileId4.toString()])
++ fileId3.toString()
+ )
+- await FILESTORE_PERSISTOR.sendStream(
+- USER_FILES_BUCKET_NAME,
+- `${projectIdDeleted1}/${fileId5}`,
+- Stream.Readable.from([fileId5.toString()])
+- )
+- await FILESTORE_PERSISTOR.sendStream(
+- USER_FILES_BUCKET_NAME,
+- `${projectIdBadFileTree3}/${fileId9}`,
+- Stream.Readable.from([fileId9.toString()])
++ await mockFilestore.addFile(projectIdDeleted0, fileId4, fileId4.toString())
++ await mockFilestore.addFile(projectIdDeleted1, fileId5, fileId5.toString())
++ await mockFilestore.addFile(
++ projectIdBadFileTree3,
++ fileId9,
++ fileId9.toString()
+ )
+ }
+
+ async function prepareEnvironment() {
+ await cleanup.everything()
++ await mockFilestore.start()
+ await populateMongo()
+ await populateHistoryV1()
+ await populateFilestore()
+@@ -1117,10 +1133,7 @@ describe('back_fill_file_hash script', function () {
+ beforeEach('prepare environment', prepareEnvironment)
+
+ it('should gracefully handle fatal errors', async function () {
+- await FILESTORE_PERSISTOR.deleteObject(
+- USER_FILES_BUCKET_NAME,
+- `${projectId0}/${fileId0}`
+- )
++ mockFilestore.deleteObject(projectId0, fileId0)
+ const t0 = Date.now()
+ const { stats, result } = await tryRunScript([], {
+ RETRIES: '10',
+@@ -1148,17 +1161,10 @@ describe('back_fill_file_hash script', function () {
+ })
+
+ it('should retry on error', async function () {
+- await FILESTORE_PERSISTOR.deleteObject(
+- USER_FILES_BUCKET_NAME,
+- `${projectId0}/${fileId0}`
+- )
++ mockFilestore.deleteObject(projectId0, fileId0)
+ const restoreFileAfter5s = async () => {
+ await setTimeout(5_000)
+- await FILESTORE_PERSISTOR.sendStream(
+- USER_FILES_BUCKET_NAME,
+- `${projectId0}/${fileId0}`,
+- Stream.Readable.from([fileId0.toString()])
+- )
++ mockFilestore.addFile(projectId0, fileId0, fileId0.toString())
+ }
+ // use Promise.allSettled to ensure the above sendStream call finishes before this test completes
+ const [
+
+
+
+diff --git a/services/history-v1/storage/scripts/back_fill_file_hash.mjs b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
+index feb4612ddc23..5a590e347a94 100644
+--- a/services/history-v1/storage/scripts/back_fill_file_hash.mjs
++++ b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
+@@ -178,7 +178,7 @@ const STREAM_HIGH_WATER_MARK = parseInt(
+ const LOGGING_INTERVAL = parseInt(process.env.LOGGING_INTERVAL || '60000', 10)
+ const SLEEP_BEFORE_EXIT = parseInt(process.env.SLEEP_BEFORE_EXIT || '1000', 10)
+
+-// Filestore endpoint location, the port is always hardcoded
++// Filestore endpoint location
+ const FILESTORE_HOST = process.env.FILESTORE_HOST || '127.0.0.1'
+ const FILESTORE_PORT = process.env.FILESTORE_PORT || '3009'
+
+
+
+
+diff --git a/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs b/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs
+index 4e697b8bec2c..8f861d393451 100644
+--- a/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs
++++ b/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs
+@@ -27,7 +27,6 @@ import {
+ } from '../../../../storage/lib/blob_store/index.js'
+
+ import express from 'express'
+-import bodyParser from 'body-parser'
+
+ chai.use(chaiExclude)
+ const TIMEOUT = 20 * 1_000
+@@ -46,8 +45,6 @@ class MockFilestore {
+ this.files = {}
+
+ this.app = express()
+- this.app.use(bodyParser.json())
+- this.app.use(bodyParser.urlencoded({ extended: true }))
+
+ this.app.get('/project/:projectId/file/:fileId', (req, res) => {
+ const { projectId, fileId } = req.params
+
diff --git a/server-ce/hotfix/5.5.3/pr_27173.patch b/server-ce/hotfix/5.5.3/pr_27173.patch
new file mode 100644
index 0000000000..e1c0e08c64
--- /dev/null
+++ b/server-ce/hotfix/5.5.3/pr_27173.patch
@@ -0,0 +1,961 @@
+
+
+diff --git a/services/history-v1/storage/scripts/back_fill_file_hash.mjs b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
+index 5a590e347a9..3be1c8a5407 100644
+--- a/services/history-v1/storage/scripts/back_fill_file_hash.mjs
++++ b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
+@@ -1,28 +1,20 @@
+ // @ts-check
+-import Crypto from 'node:crypto'
+ import Events from 'node:events'
+ import fs from 'node:fs'
+ import Path from 'node:path'
+ import { performance } from 'node:perf_hooks'
+ import Stream from 'node:stream'
+-import zLib from 'node:zlib'
+ import { setTimeout } from 'node:timers/promises'
+-import { Binary, ObjectId } from 'mongodb'
++import { ObjectId } from 'mongodb'
+ import pLimit from 'p-limit'
+ import logger from '@overleaf/logger'
+ import {
+ batchedUpdate,
+ objectIdFromInput,
+ renderObjectId,
+- READ_PREFERENCE_SECONDARY,
+ } from '@overleaf/mongo-utils/batchedUpdate.js'
+ import OError from '@overleaf/o-error'
+-import {
+- AlreadyWrittenError,
+- NoKEKMatchedError,
+- NotFoundError,
+-} from '@overleaf/object-persistor/src/Errors.js'
+-import { backupPersistor, projectBlobsBucket } from '../lib/backupPersistor.mjs'
++import { NotFoundError } from '@overleaf/object-persistor/src/Errors.js'
+ import {
+ BlobStore,
+ GLOBAL_BLOBS,
+@@ -30,9 +22,8 @@ import {
+ getProjectBlobsBatch,
+ getStringLengthOfFile,
+ makeBlobForFile,
+- makeProjectKey,
+ } from '../lib/blob_store/index.js'
+-import { backedUpBlobs as backedUpBlobsCollection, db } from '../lib/mongodb.js'
++import { db } from '../lib/mongodb.js'
+ import commandLineArgs from 'command-line-args'
+ import readline from 'node:readline'
+
+@@ -88,7 +79,7 @@ ObjectId.cacheHexString = true
+ */
+
+ /**
+- * @return {{PROJECT_IDS_FROM: string, PROCESS_HASHED_FILES: boolean, LOGGING_IDENTIFIER: string, BATCH_RANGE_START: string, PROCESS_BLOBS: boolean, BATCH_RANGE_END: string, PROCESS_NON_DELETED_PROJECTS: boolean, PROCESS_DELETED_PROJECTS: boolean, COLLECT_BACKED_UP_BLOBS: boolean}}
++ * @return {{PROJECT_IDS_FROM: string, PROCESS_HASHED_FILES: boolean, LOGGING_IDENTIFIER: string, BATCH_RANGE_START: string, PROCESS_BLOBS: boolean, BATCH_RANGE_END: string, PROCESS_NON_DELETED_PROJECTS: boolean, PROCESS_DELETED_PROJECTS: boolean}}
+ */
+ function parseArgs() {
+ const PUBLIC_LAUNCH_DATE = new Date('2012-01-01T00:00:00Z')
+@@ -98,7 +89,6 @@ function parseArgs() {
+ { name: 'processHashedFiles', type: String, defaultValue: 'false' },
+ { name: 'processBlobs', type: String, defaultValue: 'true' },
+ { name: 'projectIdsFrom', type: String, defaultValue: '' },
+- { name: 'collectBackedUpBlobs', type: String, defaultValue: 'true' },
+ {
+ name: 'BATCH_RANGE_START',
+ type: String,
+@@ -130,7 +120,6 @@ function parseArgs() {
+ PROCESS_DELETED_PROJECTS: boolVal('processDeletedProjects'),
+ PROCESS_BLOBS: boolVal('processBlobs'),
+ PROCESS_HASHED_FILES: boolVal('processHashedFiles'),
+- COLLECT_BACKED_UP_BLOBS: boolVal('collectBackedUpBlobs'),
+ BATCH_RANGE_START,
+ BATCH_RANGE_END,
+ LOGGING_IDENTIFIER: args['LOGGING_IDENTIFIER'] || BATCH_RANGE_START,
+@@ -143,7 +132,6 @@ const {
+ PROCESS_DELETED_PROJECTS,
+ PROCESS_BLOBS,
+ PROCESS_HASHED_FILES,
+- COLLECT_BACKED_UP_BLOBS,
+ BATCH_RANGE_START,
+ BATCH_RANGE_END,
+ LOGGING_IDENTIFIER,
+@@ -232,7 +220,6 @@ async function processConcurrently(array, fn) {
+ const STATS = {
+ projects: 0,
+ blobs: 0,
+- backedUpBlobs: 0,
+ filesWithHash: 0,
+ filesWithoutHash: 0,
+ filesDuplicated: 0,
+@@ -246,14 +233,8 @@ const STATS = {
+ projectHardDeleted: 0,
+ fileHardDeleted: 0,
+ mongoUpdates: 0,
+- deduplicatedWriteToAWSLocalCount: 0,
+- deduplicatedWriteToAWSLocalEgress: 0,
+- deduplicatedWriteToAWSRemoteCount: 0,
+- deduplicatedWriteToAWSRemoteEgress: 0,
+ readFromGCSCount: 0,
+ readFromGCSIngress: 0,
+- writeToAWSCount: 0,
+- writeToAWSEgress: 0,
+ writeToGCSCount: 0,
+ writeToGCSEgress: 0,
+ }
+@@ -275,7 +256,7 @@ function toMiBPerSecond(v, ms) {
+ /**
+ * @param {any} stats
+ * @param {number} ms
+- * @return {{writeToAWSThroughputMiBPerSecond: number, readFromGCSThroughputMiBPerSecond: number}}
++ * @return {{readFromGCSThroughputMiBPerSecond: number}}
+ */
+ function bandwidthStats(stats, ms) {
+ return {
+@@ -283,10 +264,6 @@ function bandwidthStats(stats, ms) {
+ stats.readFromGCSIngress,
+ ms
+ ),
+- writeToAWSThroughputMiBPerSecond: toMiBPerSecond(
+- stats.writeToAWSEgress,
+- ms
+- ),
+ }
+ }
+
+@@ -382,9 +359,6 @@ async function processFile(entry, filePath) {
+ throw err // disable retries for not found in filestore bucket case
+ }
+ }
+- if (err instanceof NoKEKMatchedError) {
+- throw err // disable retries when upload to S3 will fail again
+- }
+ STATS.filesRetries++
+ const {
+ ctx: { projectId },
+@@ -417,32 +391,8 @@ async function processFileOnce(entry, filePath) {
+ if (entry.blob) {
+ const { blob } = entry
+ const hash = blob.getHash()
+- if (entry.ctx.hasBackedUpBlob(hash)) {
+- STATS.deduplicatedWriteToAWSLocalCount++
+- STATS.deduplicatedWriteToAWSLocalEgress += estimateBlobSize(blob)
+- return hash
+- }
+- entry.ctx.recordPendingBlob(hash)
+- STATS.readFromGCSCount++
+- const src = await blobStore.getStream(hash)
+- const dst = fs.createWriteStream(filePath, {
+- highWaterMark: STREAM_HIGH_WATER_MARK,
+- })
+- try {
+- await Stream.promises.pipeline(src, dst)
+- } finally {
+- STATS.readFromGCSIngress += dst.bytesWritten
+- }
+- await uploadBlobToAWS(entry, blob, filePath)
+ return hash
+ }
+- if (entry.hash && entry.ctx.hasBackedUpBlob(entry.hash)) {
+- STATS.deduplicatedWriteToAWSLocalCount++
+- const blob = entry.ctx.getCachedHistoryBlob(entry.hash)
+- // blob might not exist on re-run with --PROCESS_BLOBS=false
+- if (blob) STATS.deduplicatedWriteToAWSLocalEgress += estimateBlobSize(blob)
+- return entry.hash
+- }
+
+ STATS.readFromGCSCount++
+ // make a fetch request to filestore itself
+@@ -469,16 +419,14 @@ async function processFileOnce(entry, filePath) {
+ STATS.globalBlobsEgress += estimateBlobSize(blob)
+ return hash
+ }
+- if (entry.ctx.hasBackedUpBlob(hash)) {
+- STATS.deduplicatedWriteToAWSLocalCount++
+- STATS.deduplicatedWriteToAWSLocalEgress += estimateBlobSize(blob)
++ if (entry.ctx.hasCompletedBlob(hash)) {
+ return hash
+ }
+ entry.ctx.recordPendingBlob(hash)
+
+ try {
+ await uploadBlobToGCS(blobStore, entry, blob, hash, filePath)
+- await uploadBlobToAWS(entry, blob, filePath)
++ entry.ctx.recordCompletedBlob(hash) // mark upload as completed
+ } catch (err) {
+ entry.ctx.recordFailedBlob(hash)
+ throw err
+@@ -515,76 +463,6 @@ async function uploadBlobToGCS(blobStore, entry, blob, hash, filePath) {
+
+ const GZ_SUFFIX = '.gz'
+
+-/**
+- * @param {QueueEntry} entry
+- * @param {Blob} blob
+- * @param {string} filePath
+- * @return {Promise}
+- */
+-async function uploadBlobToAWS(entry, blob, filePath) {
+- const { historyId } = entry.ctx
+- let backupSource
+- let contentEncoding
+- const md5 = Crypto.createHash('md5')
+- let size
+- if (blob.getStringLength()) {
+- const filePathCompressed = filePath + GZ_SUFFIX
+- backupSource = filePathCompressed
+- contentEncoding = 'gzip'
+- size = 0
+- await Stream.promises.pipeline(
+- fs.createReadStream(filePath, { highWaterMark: STREAM_HIGH_WATER_MARK }),
+- zLib.createGzip(),
+- async function* (source) {
+- for await (const chunk of source) {
+- size += chunk.byteLength
+- md5.update(chunk)
+- yield chunk
+- }
+- },
+- fs.createWriteStream(filePathCompressed, {
+- highWaterMark: STREAM_HIGH_WATER_MARK,
+- })
+- )
+- } else {
+- backupSource = filePath
+- size = blob.getByteLength()
+- await Stream.promises.pipeline(
+- fs.createReadStream(filePath, { highWaterMark: STREAM_HIGH_WATER_MARK }),
+- md5
+- )
+- }
+- const backendKeyPath = makeProjectKey(historyId, blob.getHash())
+- const persistor = await entry.ctx.getCachedPersistor(backendKeyPath)
+- try {
+- STATS.writeToAWSCount++
+- await persistor.sendStream(
+- projectBlobsBucket,
+- backendKeyPath,
+- fs.createReadStream(backupSource, {
+- highWaterMark: STREAM_HIGH_WATER_MARK,
+- }),
+- {
+- contentEncoding,
+- contentType: 'application/octet-stream',
+- contentLength: size,
+- sourceMd5: md5.digest('hex'),
+- ifNoneMatch: '*', // de-duplicate write (we pay for the request, but avoid egress)
+- }
+- )
+- STATS.writeToAWSEgress += size
+- } catch (err) {
+- if (err instanceof AlreadyWrittenError) {
+- STATS.deduplicatedWriteToAWSRemoteCount++
+- STATS.deduplicatedWriteToAWSRemoteEgress += size
+- } else {
+- STATS.writeToAWSEgress += size
+- throw err
+- }
+- }
+- entry.ctx.recordBackedUpBlob(blob.getHash())
+-}
+-
+ /**
+ * @param {Array} files
+ * @return {Promise}
+@@ -670,23 +548,18 @@ async function queueNextBatch(batch, prefix = 'rootFolder.0') {
+ * @return {Promise}
+ */
+ async function processBatch(batch, prefix = 'rootFolder.0') {
+- const [{ nBlobs, blobs }, { nBackedUpBlobs, backedUpBlobs }] =
+- await Promise.all([collectProjectBlobs(batch), collectBackedUpBlobs(batch)])
+- const files = Array.from(findFileInBatch(batch, prefix, blobs, backedUpBlobs))
++ const { nBlobs, blobs } = await collectProjectBlobs(batch)
++ const files = Array.from(findFileInBatch(batch, prefix, blobs))
+ STATS.projects += batch.length
+ STATS.blobs += nBlobs
+- STATS.backedUpBlobs += nBackedUpBlobs
+
+ // GC
+ batch.length = 0
+ blobs.clear()
+- backedUpBlobs.clear()
+
+ // The files are currently ordered by project-id.
+ // Order them by file-id ASC then blobs ASC to
+ // - process files before blobs
+- // - avoid head-of-line blocking from many project-files waiting on the generation of the projects DEK (round trip to AWS)
+- // - bonus: increase chance of de-duplicating write to AWS
+ files.sort(
+ /**
+ * @param {QueueEntry} a
+@@ -903,23 +776,15 @@ function* findFiles(ctx, folder, path, isInputLoop = false) {
+ * @param {Array} projects
+ * @param {string} prefix
+ * @param {Map>} blobs
+- * @param {Map>} backedUpBlobs
+ * @return Generator
+ */
+-function* findFileInBatch(projects, prefix, blobs, backedUpBlobs) {
++function* findFileInBatch(projects, prefix, blobs) {
+ for (const project of projects) {
+ const projectIdS = project._id.toString()
+ const historyIdS = project.overleaf.history.id.toString()
+ const projectBlobs = blobs.get(historyIdS) || []
+- const projectBackedUpBlobs = new Set(backedUpBlobs.get(projectIdS) || [])
+- const ctx = new ProjectContext(
+- project._id,
+- historyIdS,
+- projectBlobs,
+- projectBackedUpBlobs
+- )
++ const ctx = new ProjectContext(project._id, historyIdS, projectBlobs)
+ for (const blob of projectBlobs) {
+- if (projectBackedUpBlobs.has(blob.getHash())) continue
+ ctx.remainingQueueEntries++
+ yield {
+ ctx,
+@@ -951,42 +816,11 @@ async function collectProjectBlobs(batch) {
+ return await getProjectBlobsBatch(batch.map(p => p.overleaf.history.id))
+ }
+
+-/**
+- * @param {Array} projects
+- * @return {Promise<{nBackedUpBlobs:number,backedUpBlobs:Map>}>}
+- */
+-async function collectBackedUpBlobs(projects) {
+- let nBackedUpBlobs = 0
+- const backedUpBlobs = new Map()
+- if (!COLLECT_BACKED_UP_BLOBS) return { nBackedUpBlobs, backedUpBlobs }
+-
+- const cursor = backedUpBlobsCollection.find(
+- { _id: { $in: projects.map(p => p._id) } },
+- {
+- readPreference: READ_PREFERENCE_SECONDARY,
+- sort: { _id: 1 },
+- }
+- )
+- for await (const record of cursor) {
+- const blobs = record.blobs.map(b => b.toString('hex'))
+- backedUpBlobs.set(record._id.toString(), blobs)
+- nBackedUpBlobs += blobs.length
+- }
+- return { nBackedUpBlobs, backedUpBlobs }
+-}
+-
+-const BATCH_HASH_WRITES = 1_000
+ const BATCH_FILE_UPDATES = 100
+
+ const MONGO_PATH_SKIP_WRITE_HASH_TO_FILE_TREE = 'skip-write-to-file-tree'
+
+ class ProjectContext {
+- /** @type {Promise | null} */
+- #cachedPersistorPromise = null
+-
+- /** @type {Set} */
+- #backedUpBlobs
+-
+ /** @type {Map} */
+ #historyBlobs
+
+@@ -1000,12 +834,10 @@ class ProjectContext {
+ * @param {ObjectId} projectId
+ * @param {string} historyId
+ * @param {Array} blobs
+- * @param {Set} backedUpBlobs
+ */
+- constructor(projectId, historyId, blobs, backedUpBlobs) {
++ constructor(projectId, historyId, blobs) {
+ this.projectId = projectId
+ this.historyId = historyId
+- this.#backedUpBlobs = backedUpBlobs
+ this.#historyBlobs = new Map(blobs.map(b => [b.getHash(), b]))
+ }
+
+@@ -1034,75 +866,17 @@ class ProjectContext {
+ return false
+ }
+
+- /**
+- * @param {string} key
+- * @return {Promise}
+- */
+- getCachedPersistor(key) {
+- if (!this.#cachedPersistorPromise) {
+- // Fetch DEK once, but only if needed -- upon the first use
+- this.#cachedPersistorPromise = this.#getCachedPersistorWithRetries(key)
+- }
+- return this.#cachedPersistorPromise
+- }
+-
+- /**
+- * @param {string} key
+- * @return {Promise}
+- */
+- async #getCachedPersistorWithRetries(key) {
+- // Optimization: Skip GET on DEK in case no blobs are marked as backed up yet.
+- let tryGenerateDEKFirst = this.#backedUpBlobs.size === 0
+- for (let attempt = 0; attempt < RETRIES; attempt++) {
+- try {
+- if (tryGenerateDEKFirst) {
+- try {
+- return await backupPersistor.generateDataEncryptionKey(
+- projectBlobsBucket,
+- key
+- )
+- } catch (err) {
+- if (err instanceof AlreadyWrittenError) {
+- tryGenerateDEKFirst = false
+- // fall back to GET below
+- } else {
+- throw err
+- }
+- }
+- }
+- return await backupPersistor.forProject(projectBlobsBucket, key)
+- } catch (err) {
+- if (gracefulShutdownInitiated) throw err
+- if (err instanceof NoKEKMatchedError) {
+- throw err
+- } else {
+- logger.warn(
+- { err, projectId: this.projectId, attempt },
+- 'failed to get DEK, trying again'
+- )
+- const jitter = Math.random() * RETRY_DELAY_MS
+- await setTimeout(RETRY_DELAY_MS + jitter)
+- }
+- }
+- }
+- return await backupPersistor.forProject(projectBlobsBucket, key)
+- }
+-
+ async flushMongoQueuesIfNeeded() {
+ if (this.remainingQueueEntries === 0) {
+ await this.flushMongoQueues()
+ }
+
+- if (this.#completedBlobs.size > BATCH_HASH_WRITES) {
+- await this.#storeBackedUpBlobs()
+- }
+ if (this.#pendingFileWrites.length > BATCH_FILE_UPDATES) {
+ await this.#storeFileHashes()
+ }
+ }
+
+ async flushMongoQueues() {
+- await this.#storeBackedUpBlobs()
+ await this.#storeFileHashes()
+ }
+
+@@ -1111,20 +885,6 @@ class ProjectContext {
+ /** @type {Set} */
+ #completedBlobs = new Set()
+
+- async #storeBackedUpBlobs() {
+- if (this.#completedBlobs.size === 0) return
+- const blobs = Array.from(this.#completedBlobs).map(
+- hash => new Binary(Buffer.from(hash, 'hex'))
+- )
+- this.#completedBlobs.clear()
+- STATS.mongoUpdates++
+- await backedUpBlobsCollection.updateOne(
+- { _id: this.projectId },
+- { $addToSet: { blobs: { $each: blobs } } },
+- { upsert: true }
+- )
+- }
+-
+ /**
+ * @param {string} hash
+ */
+@@ -1142,8 +902,7 @@ class ProjectContext {
+ /**
+ * @param {string} hash
+ */
+- recordBackedUpBlob(hash) {
+- this.#backedUpBlobs.add(hash)
++ recordCompletedBlob(hash) {
+ this.#completedBlobs.add(hash)
+ this.#pendingBlobs.delete(hash)
+ }
+@@ -1152,12 +911,8 @@ class ProjectContext {
+ * @param {string} hash
+ * @return {boolean}
+ */
+- hasBackedUpBlob(hash) {
+- return (
+- this.#pendingBlobs.has(hash) ||
+- this.#completedBlobs.has(hash) ||
+- this.#backedUpBlobs.has(hash)
+- )
++ hasCompletedBlob(hash) {
++ return this.#pendingBlobs.has(hash) || this.#completedBlobs.has(hash)
+ }
+
+ /** @type {Array} */
+diff --git a/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs b/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs
+index 8f861d39345..62b0b1de25f 100644
+--- a/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs
++++ b/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs
+@@ -4,23 +4,17 @@ import Stream from 'node:stream'
+ import { setTimeout } from 'node:timers/promises'
+ import { promisify } from 'node:util'
+ import { ObjectId, Binary } from 'mongodb'
+-import {
+- db,
+- backedUpBlobs,
+- globalBlobs,
+-} from '../../../../storage/lib/mongodb.js'
++import { db, globalBlobs } from '../../../../storage/lib/mongodb.js'
+ import cleanup from './support/cleanup.js'
+ import testProjects from '../api/support/test_projects.js'
+ import { execFile } from 'node:child_process'
+ import chai, { expect } from 'chai'
+ import chaiExclude from 'chai-exclude'
+-import config from 'config'
+ import { WritableBuffer } from '@overleaf/stream-utils'
+ import {
+ backupPersistor,
+ projectBlobsBucket,
+ } from '../../../../storage/lib/backupPersistor.mjs'
+-import projectKey from '../../../../storage/lib/project_key.js'
+ import {
+ BlobStore,
+ makeProjectKey,
+@@ -31,9 +25,6 @@ import express from 'express'
+ chai.use(chaiExclude)
+ const TIMEOUT = 20 * 1_000
+
+-const { deksBucket } = config.get('backupStore')
+-const { tieringStorageClass } = config.get('backupPersistor')
+-
+ const projectsCollection = db.collection('projects')
+ const deletedProjectsCollection = db.collection('deletedProjects')
+
+@@ -117,17 +108,6 @@ function binaryForGitBlobHash(gitBlobHash) {
+ return new Binary(Buffer.from(gitBlobHash, 'hex'))
+ }
+
+-async function listS3Bucket(bucket, wantStorageClass) {
+- const client = backupPersistor._getClientForBucket(bucket)
+- const response = await client.listObjectsV2({ Bucket: bucket }).promise()
+-
+- for (const object of response.Contents || []) {
+- expect(object).to.have.property('StorageClass', wantStorageClass)
+- }
+-
+- return (response.Contents || []).map(item => item.Key || '')
+-}
+-
+ function objectIdFromTime(timestamp) {
+ return ObjectId.createFromTime(new Date(timestamp).getTime() / 1000)
+ }
+@@ -591,11 +571,7 @@ describe('back_fill_file_hash script', function () {
+ expect((await fs.promises.readdir('/tmp')).join(';')).to.not.match(
+ /back_fill_file_hash/
+ )
+- const extraStatsKeys = [
+- 'eventLoop',
+- 'readFromGCSThroughputMiBPerSecond',
+- 'writeToAWSThroughputMiBPerSecond',
+- ]
++ const extraStatsKeys = ['eventLoop', 'readFromGCSThroughputMiBPerSecond']
+ const stats = JSON.parse(
+ result.stderr
+ .split('\n')
+@@ -610,7 +586,6 @@ describe('back_fill_file_hash script', function () {
+ delete stats.time
+ if (shouldHaveWritten) {
+ expect(stats.readFromGCSThroughputMiBPerSecond).to.be.greaterThan(0)
+- expect(stats.writeToAWSThroughputMiBPerSecond).to.be.greaterThan(0)
+ }
+ for (const key of extraStatsKeys) {
+ delete stats[key]
+@@ -856,109 +831,6 @@ describe('back_fill_file_hash script', function () {
+ },
+ },
+ ])
+- expect(
+- (await backedUpBlobs.find({}, { sort: { _id: 1 } }).toArray()).map(
+- entry => {
+- // blobs are pushed unordered into mongo. Sort the list for consistency.
+- entry.blobs.sort()
+- return entry
+- }
+- )
+- ).to.deep.equal([
+- {
+- _id: projectId0,
+- blobs: [
+- binaryForGitBlobHash(gitBlobHash(fileId0)),
+- binaryForGitBlobHash(hashFile7),
+- binaryForGitBlobHash(hashTextBlob0),
+- ].sort(),
+- },
+- {
+- _id: projectId1,
+- blobs: [
+- binaryForGitBlobHash(gitBlobHash(fileId1)),
+- binaryForGitBlobHash(hashTextBlob1),
+- ].sort(),
+- },
+- {
+- _id: projectId2,
+- blobs: [binaryForGitBlobHash(hashTextBlob2)]
+- .concat(
+- processHashedFiles
+- ? [binaryForGitBlobHash(gitBlobHash(fileId2))]
+- : []
+- )
+- .sort(),
+- },
+- {
+- _id: projectIdDeleted0,
+- blobs: [binaryForGitBlobHash(gitBlobHash(fileId4))].sort(),
+- },
+- {
+- _id: projectId3,
+- blobs: [binaryForGitBlobHash(gitBlobHash(fileId3))].sort(),
+- },
+- ...(processHashedFiles
+- ? [
+- {
+- _id: projectIdDeleted1,
+- blobs: [binaryForGitBlobHash(gitBlobHash(fileId5))].sort(),
+- },
+- ]
+- : []),
+- {
+- _id: projectIdBadFileTree0,
+- blobs: [binaryForGitBlobHash(hashTextBlob3)].sort(),
+- },
+- {
+- _id: projectIdBadFileTree3,
+- blobs: [binaryForGitBlobHash(gitBlobHash(fileId9))].sort(),
+- },
+- ])
+- })
+- it('should have backed up all the files', async function () {
+- expect(tieringStorageClass).to.exist
+- const blobs = await listS3Bucket(projectBlobsBucket, tieringStorageClass)
+- expect(blobs.sort()).to.deep.equal(
+- Array.from(
+- new Set(
+- writtenBlobs
+- .map(({ historyId, fileId, hash }) =>
+- makeProjectKey(historyId, hash || gitBlobHash(fileId))
+- )
+- .sort()
+- )
+- )
+- )
+- for (let { historyId, fileId, hash, content } of writtenBlobs) {
+- hash = hash || gitBlobHash(fileId.toString())
+- const s = await backupPersistor.getObjectStream(
+- projectBlobsBucket,
+- makeProjectKey(historyId, hash),
+- { autoGunzip: true }
+- )
+- const buf = new WritableBuffer()
+- await Stream.promises.pipeline(s, buf)
+- expect(gitBlobHashBuffer(buf.getContents())).to.equal(hash)
+- if (content) {
+- expect(buf.getContents()).to.deep.equal(content)
+- } else {
+- const id = buf.getContents().toString('utf-8')
+- expect(id).to.equal(fileId.toString())
+- // double check we are not comparing 'undefined' or '[object Object]' above
+- expect(id).to.match(/^[a-f0-9]{24}$/)
+- }
+- }
+- const deks = await listS3Bucket(deksBucket, 'STANDARD')
+- expect(deks.sort()).to.deep.equal(
+- Array.from(
+- new Set(
+- writtenBlobs.map(
+- ({ historyId }) => projectKey.format(historyId) + '/dek'
+- )
+- )
+- ).sort()
+- )
+ })
+ it('should have written the back filled files to history v1', async function () {
+ for (const { historyId, hash, fileId, content } of writtenBlobs) {
+@@ -991,14 +863,13 @@ describe('back_fill_file_hash script', function () {
+ // We still need to iterate over all the projects and blobs.
+ projects: 10,
+ blobs: 10,
+- backedUpBlobs: 10,
++
+ badFileTrees: 4,
+ }
+ if (processHashedFiles) {
+ stats = sumStats(stats, {
+ ...STATS_ALL_ZERO,
+ blobs: 2,
+- backedUpBlobs: 2,
+ })
+ }
+ expect(rerun.stats).deep.equal(stats)
+@@ -1024,7 +895,6 @@ describe('back_fill_file_hash script', function () {
+ const STATS_ALL_ZERO = {
+ projects: 0,
+ blobs: 0,
+- backedUpBlobs: 0,
+ filesWithHash: 0,
+ filesWithoutHash: 0,
+ filesDuplicated: 0,
+@@ -1038,21 +908,14 @@ describe('back_fill_file_hash script', function () {
+ fileHardDeleted: 0,
+ badFileTrees: 0,
+ mongoUpdates: 0,
+- deduplicatedWriteToAWSLocalCount: 0,
+- deduplicatedWriteToAWSLocalEgress: 0,
+- deduplicatedWriteToAWSRemoteCount: 0,
+- deduplicatedWriteToAWSRemoteEgress: 0,
+ readFromGCSCount: 0,
+ readFromGCSIngress: 0,
+- writeToAWSCount: 0,
+- writeToAWSEgress: 0,
+ writeToGCSCount: 0,
+ writeToGCSEgress: 0,
+ }
+ const STATS_UP_TO_PROJECT1 = {
+ projects: 2,
+ blobs: 2,
+- backedUpBlobs: 0,
+ filesWithHash: 0,
+ filesWithoutHash: 5,
+ filesDuplicated: 1,
+@@ -1065,22 +928,15 @@ describe('back_fill_file_hash script', function () {
+ projectHardDeleted: 0,
+ fileHardDeleted: 0,
+ badFileTrees: 0,
+- mongoUpdates: 4,
+- deduplicatedWriteToAWSLocalCount: 0,
+- deduplicatedWriteToAWSLocalEgress: 0,
+- deduplicatedWriteToAWSRemoteCount: 0,
+- deduplicatedWriteToAWSRemoteEgress: 0,
+- readFromGCSCount: 6,
+- readFromGCSIngress: 4000086,
+- writeToAWSCount: 5,
+- writeToAWSEgress: 4026,
++ mongoUpdates: 2, // 4-2 blobs written to backedUpBlobs collection
++ readFromGCSCount: 4,
++ readFromGCSIngress: 4000072,
+ writeToGCSCount: 3,
+ writeToGCSEgress: 4000048,
+ }
+ const STATS_UP_FROM_PROJECT1_ONWARD = {
+ projects: 8,
+ blobs: 2,
+- backedUpBlobs: 0,
+ filesWithHash: 0,
+ filesWithoutHash: 4,
+ filesDuplicated: 0,
+@@ -1093,26 +949,18 @@ describe('back_fill_file_hash script', function () {
+ projectHardDeleted: 0,
+ fileHardDeleted: 0,
+ badFileTrees: 4,
+- mongoUpdates: 8,
+- deduplicatedWriteToAWSLocalCount: 1,
+- deduplicatedWriteToAWSLocalEgress: 30,
+- deduplicatedWriteToAWSRemoteCount: 0,
+- deduplicatedWriteToAWSRemoteEgress: 0,
+- readFromGCSCount: 6,
+- readFromGCSIngress: 110,
+- writeToAWSCount: 5,
+- writeToAWSEgress: 143,
++ mongoUpdates: 3, // previously 5 blobs written to backedUpBlobs collection
++ readFromGCSCount: 4,
++ readFromGCSIngress: 96,
+ writeToGCSCount: 3,
+ writeToGCSEgress: 72,
+ }
+ const STATS_FILES_HASHED_EXTRA = {
+ ...STATS_ALL_ZERO,
+ filesWithHash: 2,
+- mongoUpdates: 2,
++ mongoUpdates: 0, // previously 2 blobs written to backedUpBlobs collection
+ readFromGCSCount: 2,
+ readFromGCSIngress: 48,
+- writeToAWSCount: 2,
+- writeToAWSEgress: 60,
+ writeToGCSCount: 2,
+ writeToGCSEgress: 48,
+ }
+@@ -1144,8 +992,6 @@ describe('back_fill_file_hash script', function () {
+ ...STATS_ALL_ZERO,
+ filesFailed: 1,
+ readFromGCSIngress: -24,
+- writeToAWSCount: -1,
+- writeToAWSEgress: -28,
+ writeToGCSCount: -1,
+ writeToGCSEgress: -24,
+ })
+@@ -1269,13 +1115,14 @@ describe('back_fill_file_hash script', function () {
+ before('run script with hashed files', async function () {
+ output2 = await runScript(['--processHashedFiles=true'], {})
+ })
+- it('should print stats', function () {
++ it('should print stats for the first run without hashed files', function () {
+ expect(output1.stats).deep.equal(STATS_ALL)
++ })
++ it('should print stats for the hashed files run', function () {
+ expect(output2.stats).deep.equal({
+ ...STATS_FILES_HASHED_EXTRA,
+ projects: 10,
+ blobs: 10,
+- backedUpBlobs: 10,
+ badFileTrees: 4,
+ })
+ })
+@@ -1322,9 +1169,7 @@ describe('back_fill_file_hash script', function () {
+ ...STATS_FILES_HASHED_EXTRA,
+ readFromGCSCount: 3,
+ readFromGCSIngress: 72,
+- deduplicatedWriteToAWSLocalCount: 1,
+- deduplicatedWriteToAWSLocalEgress: 30,
+- mongoUpdates: 1,
++ mongoUpdates: 0,
+ filesWithHash: 3,
+ })
+ )
+@@ -1354,48 +1199,6 @@ describe('back_fill_file_hash script', function () {
+ expect(output.stats).deep.equal(
+ sumStats(STATS_ALL, {
+ ...STATS_ALL_ZERO,
+- // one remote deduplicate
+- deduplicatedWriteToAWSRemoteCount: 1,
+- deduplicatedWriteToAWSRemoteEgress: 28,
+- writeToAWSEgress: -28, // subtract skipped egress
+- })
+- )
+- })
+- commonAssertions()
+- })
+-
+- describe('with something in the bucket and marked as processed', function () {
+- before('prepare environment', prepareEnvironment)
+- before('create a file in s3', async function () {
+- await backupPersistor.sendStream(
+- projectBlobsBucket,
+- makeProjectKey(historyId0, hashTextBlob0),
+- Stream.Readable.from([contentTextBlob0]),
+- { contentLength: contentTextBlob0.byteLength }
+- )
+- await backedUpBlobs.insertMany([
+- {
+- _id: projectId0,
+- blobs: [binaryForGitBlobHash(hashTextBlob0)],
+- },
+- ])
+- })
+- let output
+- before('run script', async function () {
+- output = await runScript([], {
+- CONCURRENCY: '1',
+- })
+- })
+-
+- it('should print stats', function () {
+- expect(output.stats).deep.equal(
+- sumStats(STATS_ALL, {
+- ...STATS_ALL_ZERO,
+- backedUpBlobs: 1,
+- writeToAWSCount: -1,
+- writeToAWSEgress: -27,
+- readFromGCSCount: -1,
+- readFromGCSIngress: -7,
+ })
+ )
+ })
+@@ -1418,8 +1221,10 @@ describe('back_fill_file_hash script', function () {
+ })
+ })
+
+- it('should print stats', function () {
++ it('should print stats for part 0', function () {
+ expect(outputPart0.stats).to.deep.equal(STATS_UP_TO_PROJECT1)
++ })
++ it('should print stats for part 1', function () {
+ expect(outputPart1.stats).to.deep.equal(STATS_UP_FROM_PROJECT1_ONWARD)
+ })
+ commonAssertions()
+
+
+
+diff --git a/services/history-v1/storage/scripts/back_fill_file_hash.mjs b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
+index 3be1c8a5407..c9ed13c6cb4 100644
+--- a/services/history-v1/storage/scripts/back_fill_file_hash.mjs
++++ b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
+@@ -388,12 +388,6 @@ async function processFileOnce(entry, filePath) {
+ fileId,
+ } = entry
+ const blobStore = new BlobStore(historyId)
+- if (entry.blob) {
+- const { blob } = entry
+- const hash = blob.getHash()
+- return hash
+- }
+-
+ STATS.readFromGCSCount++
+ // make a fetch request to filestore itself
+ const src = await fetchFromFilestore(projectId, fileId)
+@@ -784,16 +778,6 @@ function* findFileInBatch(projects, prefix, blobs) {
+ const historyIdS = project.overleaf.history.id.toString()
+ const projectBlobs = blobs.get(historyIdS) || []
+ const ctx = new ProjectContext(project._id, historyIdS, projectBlobs)
+- for (const blob of projectBlobs) {
+- ctx.remainingQueueEntries++
+- yield {
+- ctx,
+- cacheKey: blob.getHash(),
+- path: MONGO_PATH_SKIP_WRITE_HASH_TO_FILE_TREE,
+- blob,
+- hash: blob.getHash(),
+- }
+- }
+ try {
+ yield* findFiles(ctx, project.rootFolder?.[0], prefix, true)
+ } catch (err) {
+
+
+
+diff --git a/services/history-v1/storage/scripts/back_fill_file_hash.mjs b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
+index c9ed13c6cb4..f24ce4a6605 100644
+--- a/services/history-v1/storage/scripts/back_fill_file_hash.mjs
++++ b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
+@@ -387,6 +387,13 @@ async function processFileOnce(entry, filePath) {
+ ctx: { projectId, historyId },
+ fileId,
+ } = entry
++ if (entry.hash && entry.ctx.hasCompletedBlob(entry.hash)) {
++ // We can enter this case for two identical files in the same project,
++ // one with hash, the other without. When the one without hash gets
++ // processed first, we can skip downloading the other one we already
++ // know the hash of.
++ return entry.hash
++ }
+ const blobStore = new BlobStore(historyId)
+ STATS.readFromGCSCount++
+ // make a fetch request to filestore itself
+
+
+
+diff --git a/services/history-v1/storage/scripts/back_fill_file_hash.mjs b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
+index f24ce4a6605..0ccadaf5a95 100644
+--- a/services/history-v1/storage/scripts/back_fill_file_hash.mjs
++++ b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
+@@ -559,8 +559,9 @@ async function processBatch(batch, prefix = 'rootFolder.0') {
+ blobs.clear()
+
+ // The files are currently ordered by project-id.
+- // Order them by file-id ASC then blobs ASC to
+- // - process files before blobs
++ // Order them by file-id ASC then hash ASC to
++ // increase the hit rate on the "already processed
++ // hash for project" checks.
+ files.sort(
+ /**
+ * @param {QueueEntry} a
+
diff --git a/server-ce/hotfix/5.5.3/pr_27230.patch b/server-ce/hotfix/5.5.3/pr_27230.patch
new file mode 100644
index 0000000000..79d16f32f4
--- /dev/null
+++ b/server-ce/hotfix/5.5.3/pr_27230.patch
@@ -0,0 +1,191 @@
+
+
+diff --git a/services/web/app.mjs b/services/web/app.mjs
+index b7c723da3d77..3f54cc36a8c3 100644
+--- a/services/web/app.mjs
++++ b/services/web/app.mjs
+@@ -56,14 +56,8 @@ if (Settings.catchErrors) {
+ // Create ./data/dumpFolder if needed
+ FileWriter.ensureDumpFolderExists()
+
+-if (
+- !Features.hasFeature('project-history-blobs') &&
+- !Features.hasFeature('filestore')
+-) {
+- throw new Error(
+- 'invalid config: must enable either project-history-blobs (Settings.enableProjectHistoryBlobs=true) or enable filestore (Settings.disableFilestore=false)'
+- )
+-}
++// Validate combination of feature flags.
++Features.validateSettings()
+
+ // handle SIGTERM for graceful shutdown in kubernetes
+ process.on('SIGTERM', function (signal) {
+diff --git a/services/web/app/src/Features/History/HistoryURLHelper.js b/services/web/app/src/Features/History/HistoryURLHelper.js
+index 8b8d8cbdd730..acb43ced68e0 100644
+--- a/services/web/app/src/Features/History/HistoryURLHelper.js
++++ b/services/web/app/src/Features/History/HistoryURLHelper.js
+@@ -8,7 +8,7 @@ function projectHistoryURLWithFilestoreFallback(
+ ) {
+ const filestoreURL = `${Settings.apis.filestore.url}/project/${projectId}/file/${fileRef._id}?from=${origin}`
+ // TODO: When this file is converted to ES modules we will be able to use Features.hasFeature('project-history-blobs'). Currently we can't stub the feature return value in tests.
+- if (fileRef.hash && Settings.enableProjectHistoryBlobs) {
++ if (fileRef.hash && Settings.filestoreMigrationLevel >= 1) {
+ return {
+ url: `${Settings.apis.project_history.url}/project/${historyId}/blob/${fileRef.hash}`,
+ fallbackURL: filestoreURL,
+diff --git a/services/web/app/src/infrastructure/Features.js b/services/web/app/src/infrastructure/Features.js
+index aaf51103b9b8..89c8e6b841d0 100644
+--- a/services/web/app/src/infrastructure/Features.js
++++ b/services/web/app/src/infrastructure/Features.js
+@@ -19,8 +19,7 @@ const trackChangesModuleAvailable =
+ * @property {boolean | undefined} enableGithubSync
+ * @property {boolean | undefined} enableGitBridge
+ * @property {boolean | undefined} enableHomepage
+- * @property {boolean | undefined} enableProjectHistoryBlobs
+- * @property {boolean | undefined} disableFilestore
++ * @property {number} filestoreMigrationLevel
+ * @property {boolean | undefined} enableSaml
+ * @property {boolean | undefined} ldap
+ * @property {boolean | undefined} oauth
+@@ -29,7 +28,39 @@ const trackChangesModuleAvailable =
+ * @property {boolean | undefined} saml
+ */
+
++/**
++ * @return {{'project-history-blobs': boolean, filestore: boolean}}
++ */
++function getFilestoreMigrationOptions() {
++ switch (Settings.filestoreMigrationLevel) {
++ case 0:
++ return {
++ 'project-history-blobs': false,
++ filestore: true,
++ }
++ case 1:
++ return {
++ 'project-history-blobs': true,
++ filestore: true,
++ }
++
++ case 2:
++ return {
++ 'project-history-blobs': true,
++ filestore: false,
++ }
++ default:
++ throw new Error(
++ `invalid OVERLEAF_FILESTORE_MIGRATION_LEVEL=${Settings.filestoreMigrationLevel}, expected 0, 1 or 2`
++ )
++ }
++}
++
+ const Features = {
++ validateSettings() {
++ getFilestoreMigrationOptions() // throws for invalid settings
++ },
++
+ /**
+ * @returns {boolean}
+ */
+@@ -89,9 +120,9 @@ const Features = {
+ Settings.enabledLinkedFileTypes.includes('url')
+ )
+ case 'project-history-blobs':
+- return Boolean(Settings.enableProjectHistoryBlobs)
++ return getFilestoreMigrationOptions()['project-history-blobs']
+ case 'filestore':
+- return Boolean(Settings.disableFilestore) === false
++ return getFilestoreMigrationOptions().filestore
+ case 'support':
+ return supportModuleAvailable
+ case 'symbol-palette':
+diff --git a/services/web/config/settings.defaults.js b/services/web/config/settings.defaults.js
+index bd0730d5d00c..4df63ebd7c6c 100644
+--- a/services/web/config/settings.defaults.js
++++ b/services/web/config/settings.defaults.js
+@@ -440,6 +440,9 @@ module.exports = {
+ ','
+ ),
+
++ filestoreMigrationLevel:
++ parseInt(process.env.OVERLEAF_FILESTORE_MIGRATION_LEVEL, 10) || 0,
++
+ // i18n
+ // ------
+ //
+
+diff --git a/services/history-v1/storage/scripts/back_fill_file_hash.mjs b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
+index 0ccadaf5a955..2e12328e5c49 100644
+--- a/services/history-v1/storage/scripts/back_fill_file_hash.mjs
++++ b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
+@@ -150,10 +150,6 @@ const CONCURRENT_BATCHES = parseInt(process.env.CONCURRENT_BATCHES || '2', 10)
+ const RETRIES = parseInt(process.env.RETRIES || '10', 10)
+ const RETRY_DELAY_MS = parseInt(process.env.RETRY_DELAY_MS || '100', 10)
+
+-const USER_FILES_BUCKET_NAME = process.env.USER_FILES_BUCKET_NAME || ''
+-if (!USER_FILES_BUCKET_NAME) {
+- throw new Error('env var USER_FILES_BUCKET_NAME is missing')
+-}
+ const RETRY_FILESTORE_404 = process.env.RETRY_FILESTORE_404 === 'true'
+ const BUFFER_DIR = fs.mkdtempSync(
+ process.env.BUFFER_DIR_PREFIX || '/tmp/back_fill_file_hash-'
+
+diff --git a/services/web/app/src/infrastructure/Features.js b/services/web/app/src/infrastructure/Features.js
+index 89c8e6b841d0..6147e70e0faf 100644
+--- a/services/web/app/src/infrastructure/Features.js
++++ b/services/web/app/src/infrastructure/Features.js
+@@ -28,37 +28,13 @@ const trackChangesModuleAvailable =
+ * @property {boolean | undefined} saml
+ */
+
+-/**
+- * @return {{'project-history-blobs': boolean, filestore: boolean}}
+- */
+-function getFilestoreMigrationOptions() {
+- switch (Settings.filestoreMigrationLevel) {
+- case 0:
+- return {
+- 'project-history-blobs': false,
+- filestore: true,
+- }
+- case 1:
+- return {
+- 'project-history-blobs': true,
+- filestore: true,
+- }
+-
+- case 2:
+- return {
+- 'project-history-blobs': true,
+- filestore: false,
+- }
+- default:
++const Features = {
++ validateSettings() {
++ if (![0, 1, 2].includes(Settings.filestoreMigrationLevel)) {
+ throw new Error(
+ `invalid OVERLEAF_FILESTORE_MIGRATION_LEVEL=${Settings.filestoreMigrationLevel}, expected 0, 1 or 2`
+ )
+- }
+-}
+-
+-const Features = {
+- validateSettings() {
+- getFilestoreMigrationOptions() // throws for invalid settings
++ }
+ },
+
+ /**
+@@ -120,9 +96,9 @@ const Features = {
+ Settings.enabledLinkedFileTypes.includes('url')
+ )
+ case 'project-history-blobs':
+- return getFilestoreMigrationOptions()['project-history-blobs']
++ return Settings.filestoreMigrationLevel > 0
+ case 'filestore':
+- return getFilestoreMigrationOptions().filestore
++ return Settings.filestoreMigrationLevel < 2
+ case 'support':
+ return supportModuleAvailable
+ case 'symbol-palette':
diff --git a/server-ce/hotfix/5.5.3/pr_27240.patch b/server-ce/hotfix/5.5.3/pr_27240.patch
new file mode 100644
index 0000000000..f205bf2091
--- /dev/null
+++ b/server-ce/hotfix/5.5.3/pr_27240.patch
@@ -0,0 +1,84 @@
+diff --git a/cron/deactivate-projects.sh b/cron/deactivate-projects.sh
+index fab0fbfbf667..a391f99a5bd8 100755
+--- a/cron/deactivate-projects.sh
++++ b/cron/deactivate-projects.sh
+@@ -1,6 +1,6 @@
+ #!/usr/bin/env bash
+
+-set -eux
++set -eu
+
+ echo "-------------------------"
+ echo "Deactivating old projects"
+diff --git a/cron/delete-projects.sh b/cron/delete-projects.sh
+index e1ea5ac5e621..7cd45771716a 100755
+--- a/cron/delete-projects.sh
++++ b/cron/delete-projects.sh
+@@ -1,6 +1,6 @@
+ #!/usr/bin/env bash
+
+-set -eux
++set -eu
+
+ echo "-------------------------"
+ echo "Expiring deleted projects"
+diff --git a/cron/delete-users.sh b/cron/delete-users.sh
+index fe97bffeea0b..30872ac55657 100755
+--- a/cron/delete-users.sh
++++ b/cron/delete-users.sh
+@@ -1,6 +1,6 @@
+ #!/usr/bin/env bash
+
+-set -eux
++set -eu
+
+ echo "----------------------"
+ echo "Expiring deleted users"
+diff --git a/cron/project-history-flush-all.sh b/cron/project-history-flush-all.sh
+index d8bbb184aa37..8fe9eea5fc55 100755
+--- a/cron/project-history-flush-all.sh
++++ b/cron/project-history-flush-all.sh
+@@ -1,6 +1,6 @@
+ #!/usr/bin/env bash
+
+-set -eux
++set -eu
+
+ echo "---------------------------------"
+ echo "Flush all project-history changes"
+diff --git a/cron/project-history-periodic-flush.sh b/cron/project-history-periodic-flush.sh
+index 76feae410e26..1b8efff6cc7c 100755
+--- a/cron/project-history-periodic-flush.sh
++++ b/cron/project-history-periodic-flush.sh
+@@ -1,6 +1,6 @@
+ #!/usr/bin/env bash
+
+-set -eux
++set -eu
+
+ echo "--------------------------"
+ echo "Flush project-history queue"
+diff --git a/cron/project-history-retry-hard.sh b/cron/project-history-retry-hard.sh
+index 651a6615f22d..df9b4703a58e 100755
+--- a/cron/project-history-retry-hard.sh
++++ b/cron/project-history-retry-hard.sh
+@@ -1,6 +1,6 @@
+ #!/usr/bin/env bash
+
+-set -eux
++set -eu
+
+ echo "-----------------------------------"
+ echo "Retry project-history errors (hard)"
+diff --git a/cron/project-history-retry-soft.sh b/cron/project-history-retry-soft.sh
+index 70c597021b28..cbb6e714cae7 100755
+--- a/cron/project-history-retry-soft.sh
++++ b/cron/project-history-retry-soft.sh
+@@ -1,6 +1,6 @@
+ #!/usr/bin/env bash
+
+-set -eux
++set -eu
+
+ echo "-----------------------------------"
+ echo "Retry project-history errors (soft)"
diff --git a/server-ce/hotfix/5.5.3/pr_27249.patch b/server-ce/hotfix/5.5.3/pr_27249.patch
new file mode 100644
index 0000000000..60014f6b99
--- /dev/null
+++ b/server-ce/hotfix/5.5.3/pr_27249.patch
@@ -0,0 +1,76 @@
+
+
+diff --git a/package-lock.json b/package-lock.json
+index 2b3a5868a20..d9d8285618d 100644
+--- a/package-lock.json
++++ b/package-lock.json
+@@ -35581,6 +35581,7 @@
+ "resolved": "https://registry.npmjs.org/request/-/request-2.88.2.tgz",
+ "integrity": "sha512-MsvtOrfG9ZcrOwAW+Qi+F6HbD0CWXEh9ou77uOb7FM2WPhwT7smM833PzanhJLsgXjN89Ir6V2PczXNnMpwKhw==",
+ "deprecated": "request has been deprecated, see https://github.com/request/request/issues/3142",
++ "license": "Apache-2.0",
+ "dependencies": {
+ "aws-sign2": "~0.7.0",
+ "aws4": "^1.8.0",
+@@ -35638,15 +35639,15 @@
+ }
+ },
+ "node_modules/request/node_modules/tough-cookie": {
+- "version": "2.5.0",
+- "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.5.0.tgz",
+- "integrity": "sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==",
++ "version": "5.1.2",
++ "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-5.1.2.tgz",
++ "integrity": "sha512-FVDYdxtnj0G6Qm/DhNPSb8Ju59ULcup3tuJxkFb5K8Bv2pUXILbf0xZWU8PX8Ov19OXljbUyveOFwRMwkXzO+A==",
++ "license": "BSD-3-Clause",
+ "dependencies": {
+- "psl": "^1.1.28",
+- "punycode": "^2.1.1"
++ "tldts": "^6.1.32"
+ },
+ "engines": {
+- "node": ">=0.8"
++ "node": ">=16"
+ }
+ },
+ "node_modules/requestretry": {
+@@ -39612,6 +39613,24 @@
+ "tlds": "bin.js"
+ }
+ },
++ "node_modules/tldts": {
++ "version": "6.1.86",
++ "resolved": "https://registry.npmjs.org/tldts/-/tldts-6.1.86.tgz",
++ "integrity": "sha512-WMi/OQ2axVTf/ykqCQgXiIct+mSQDFdH2fkwhPwgEwvJ1kSzZRiinb0zF2Xb8u4+OqPChmyI6MEu4EezNJz+FQ==",
++ "license": "MIT",
++ "dependencies": {
++ "tldts-core": "^6.1.86"
++ },
++ "bin": {
++ "tldts": "bin/cli.js"
++ }
++ },
++ "node_modules/tldts-core": {
++ "version": "6.1.86",
++ "resolved": "https://registry.npmjs.org/tldts-core/-/tldts-core-6.1.86.tgz",
++ "integrity": "sha512-Je6p7pkk+KMzMv2XXKmAE3McmolOQFdxkKw0R8EYNr7sELW46JqnNeTX8ybPiQgvg1ymCoF8LXs5fzFaZvJPTA==",
++ "license": "MIT"
++ },
+ "node_modules/tmp": {
+ "version": "0.2.3",
+ "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.3.tgz",
+diff --git a/package.json b/package.json
+index 388b750c3d2..44fffc4664a 100644
+--- a/package.json
++++ b/package.json
+@@ -33,6 +33,9 @@
+ "multer": "2.0.1",
+ "path-to-regexp": "3.3.0",
+ "qs": "6.13.0"
++ },
++ "request@2.88.2": {
++ "tough-cookie": "5.1.2"
+ }
+ },
+ "scripts": {
+
diff --git a/server-ce/hotfix/5.5.3/pr_27257.patch b/server-ce/hotfix/5.5.3/pr_27257.patch
new file mode 100644
index 0000000000..ca7e47c84b
--- /dev/null
+++ b/server-ce/hotfix/5.5.3/pr_27257.patch
@@ -0,0 +1,1469 @@
+
+
+diff --git a/services/history-v1/storage/scripts/back_fill_file_hash.mjs b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
+index 0ccadaf5a95..4111c42c4d1 100644
+--- a/services/history-v1/storage/scripts/back_fill_file_hash.mjs
++++ b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
+@@ -111,10 +111,8 @@ function parseArgs() {
+ if (['true', 'false'].includes(v)) return v === 'true'
+ throw new Error(`expected "true" or "false" for boolean option ${name}`)
+ }
+- const BATCH_RANGE_START = objectIdFromInput(
+- args['BATCH_RANGE_START']
+- ).toString()
+- const BATCH_RANGE_END = objectIdFromInput(args['BATCH_RANGE_END']).toString()
++ const BATCH_RANGE_START = objectIdFromInput(args.BATCH_RANGE_START).toString()
++ const BATCH_RANGE_END = objectIdFromInput(args.BATCH_RANGE_END).toString()
+ return {
+ PROCESS_NON_DELETED_PROJECTS: boolVal('processNonDeletedProjects'),
+ PROCESS_DELETED_PROJECTS: boolVal('processDeletedProjects'),
+@@ -122,8 +120,8 @@ function parseArgs() {
+ PROCESS_HASHED_FILES: boolVal('processHashedFiles'),
+ BATCH_RANGE_START,
+ BATCH_RANGE_END,
+- LOGGING_IDENTIFIER: args['LOGGING_IDENTIFIER'] || BATCH_RANGE_START,
+- PROJECT_IDS_FROM: args['projectIdsFrom'],
++ LOGGING_IDENTIFIER: args.LOGGING_IDENTIFIER || BATCH_RANGE_START,
++ PROJECT_IDS_FROM: args.projectIdsFrom,
+ }
+ }
+
+@@ -249,8 +247,8 @@ let lastEventLoopStats = performance.eventLoopUtilization()
+ * @param {number} ms
+ */
+ function toMiBPerSecond(v, ms) {
+- const ONE_MiB = 1024 * 1024
+- return v / ONE_MiB / (ms / 1000)
++ const MiB = 1024 * 1024
++ return v / MiB / (ms / 1000)
+ }
+
+ /**
+
+
+
+diff --git a/services/history-v1/storage/scripts/back_fill_file_hash.mjs b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
+index 4111c42c4d1..2d55b41b43e 100644
+--- a/services/history-v1/storage/scripts/back_fill_file_hash.mjs
++++ b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
+@@ -84,11 +84,11 @@ ObjectId.cacheHexString = true
+ function parseArgs() {
+ const PUBLIC_LAUNCH_DATE = new Date('2012-01-01T00:00:00Z')
+ const args = commandLineArgs([
+- { name: 'processNonDeletedProjects', type: String, defaultValue: 'false' },
+- { name: 'processDeletedProjects', type: String, defaultValue: 'false' },
+- { name: 'processHashedFiles', type: String, defaultValue: 'false' },
+- { name: 'processBlobs', type: String, defaultValue: 'true' },
+- { name: 'projectIdsFrom', type: String, defaultValue: '' },
++ { name: 'projects', type: Boolean },
++ { name: 'deleted-projects', type: Boolean },
++ { name: 'include-hashed-files', type: Boolean },
++ { name: 'skip-existing-blobs', type: Boolean },
++ { name: 'from-file', type: String, defaultValue: '' },
+ {
+ name: 'BATCH_RANGE_START',
+ type: String,
+@@ -99,29 +99,20 @@ function parseArgs() {
+ type: String,
+ defaultValue: new Date().toISOString(),
+ },
+- { name: 'LOGGING_IDENTIFIER', type: String, defaultValue: '' },
++ { name: 'logging-id', type: String, defaultValue: '' },
+ ])
+- /**
+- * commandLineArgs cannot handle --foo=false, so go the long way
+- * @param {string} name
+- * @return {boolean}
+- */
+- function boolVal(name) {
+- const v = args[name]
+- if (['true', 'false'].includes(v)) return v === 'true'
+- throw new Error(`expected "true" or "false" for boolean option ${name}`)
+- }
++
+ const BATCH_RANGE_START = objectIdFromInput(args.BATCH_RANGE_START).toString()
+ const BATCH_RANGE_END = objectIdFromInput(args.BATCH_RANGE_END).toString()
+ return {
+- PROCESS_NON_DELETED_PROJECTS: boolVal('processNonDeletedProjects'),
+- PROCESS_DELETED_PROJECTS: boolVal('processDeletedProjects'),
+- PROCESS_BLOBS: boolVal('processBlobs'),
+- PROCESS_HASHED_FILES: boolVal('processHashedFiles'),
++ PROCESS_NON_DELETED_PROJECTS: args.projects,
++ PROCESS_DELETED_PROJECTS: args['deleted-projects'],
++ PROCESS_HASHED_FILES: args['include-hashed-files'],
++ PROCESS_BLOBS: !args['skip-existing-blobs'],
+ BATCH_RANGE_START,
+ BATCH_RANGE_END,
+- LOGGING_IDENTIFIER: args.LOGGING_IDENTIFIER || BATCH_RANGE_START,
+- PROJECT_IDS_FROM: args.projectIdsFrom,
++ LOGGING_IDENTIFIER: args['logging-id'] || BATCH_RANGE_START,
++ PROJECT_IDS_FROM: args['from-file'],
+ }
+ }
+
+diff --git a/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs b/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs
+index 62b0b1de25f..0f8bdbf3e1a 100644
+--- a/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs
++++ b/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs
+@@ -544,8 +544,8 @@ describe('back_fill_file_hash script', function () {
+ process.argv0,
+ [
+ 'storage/scripts/back_fill_file_hash.mjs',
+- '--processNonDeletedProjects=true',
+- '--processDeletedProjects=true',
++ '--projects',
++ '--deleted-projects',
+ ...args,
+ ],
+ {
+@@ -854,7 +854,7 @@ describe('back_fill_file_hash script', function () {
+ // Practically, this is slow and moving it to the end of the tests gets us there most of the way.
+ it('should process nothing on re-run', async function () {
+ const rerun = await runScript(
+- processHashedFiles ? ['--processHashedFiles=true'] : [],
++ processHashedFiles ? ['--include-hashed-files'] : [],
+ {},
+ false
+ )
+@@ -1113,7 +1113,7 @@ describe('back_fill_file_hash script', function () {
+ output1 = await runScript([], {})
+ })
+ before('run script with hashed files', async function () {
+- output2 = await runScript(['--processHashedFiles=true'], {})
++ output2 = await runScript(['--include-hashed-files'], {})
+ })
+ it('should print stats for the first run without hashed files', function () {
+ expect(output1.stats).deep.equal(STATS_ALL)
+@@ -1161,7 +1161,7 @@ describe('back_fill_file_hash script', function () {
+ let output
+ before('prepare environment', prepareEnvironment)
+ before('run script', async function () {
+- output = await runScript(['--processHashedFiles=true'], {})
++ output = await runScript(['--include-hashed-files'], {})
+ })
+ it('should print stats', function () {
+ expect(output.stats).deep.equal(
+@@ -1263,10 +1263,10 @@ describe('back_fill_file_hash script', function () {
+
+ let outputPart0, outputPart1
+ before('run script on part 0', async function () {
+- outputPart0 = await runScript([`--projectIdsFrom=${path0}`])
++ outputPart0 = await runScript([`--from-file=${path0}`])
+ })
+ before('run script on part 1', async function () {
+- outputPart1 = await runScript([`--projectIdsFrom=${path1}`])
++ outputPart1 = await runScript([`--from-file=${path1}`])
+ })
+
+ /**
+
+
+
+diff --git a/services/history-v1/storage/scripts/back_fill_file_hash.mjs b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
+index 2d55b41b43e..68ce4b67aa2 100644
+--- a/services/history-v1/storage/scripts/back_fill_file_hash.mjs
++++ b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
+@@ -79,7 +79,7 @@ ObjectId.cacheHexString = true
+ */
+
+ /**
+- * @return {{PROJECT_IDS_FROM: string, PROCESS_HASHED_FILES: boolean, LOGGING_IDENTIFIER: string, BATCH_RANGE_START: string, PROCESS_BLOBS: boolean, BATCH_RANGE_END: string, PROCESS_NON_DELETED_PROJECTS: boolean, PROCESS_DELETED_PROJECTS: boolean}}
++ * @return {{PROJECT_IDS_FROM: string, PROCESS_HASHED_FILES: boolean, LOGGING_IDENTIFIER: string, BATCH_RANGE_START: string, PROCESS_BLOBS: boolean, BATCH_RANGE_END: string, PROCESS_NON_DELETED_PROJECTS: boolean, PROCESS_DELETED_PROJECTS: boolean, DRY_RUN: boolean}}
+ */
+ function parseArgs() {
+ const PUBLIC_LAUNCH_DATE = new Date('2012-01-01T00:00:00Z')
+@@ -89,6 +89,7 @@ function parseArgs() {
+ { name: 'include-hashed-files', type: Boolean },
+ { name: 'skip-existing-blobs', type: Boolean },
+ { name: 'from-file', type: String, defaultValue: '' },
++ { name: 'dry-run', type: Boolean },
+ {
+ name: 'BATCH_RANGE_START',
+ type: String,
+@@ -109,6 +110,7 @@ function parseArgs() {
+ PROCESS_DELETED_PROJECTS: args['deleted-projects'],
+ PROCESS_HASHED_FILES: args['include-hashed-files'],
+ PROCESS_BLOBS: !args['skip-existing-blobs'],
++ DRY_RUN: args['dry-run'],
+ BATCH_RANGE_START,
+ BATCH_RANGE_END,
+ LOGGING_IDENTIFIER: args['logging-id'] || BATCH_RANGE_START,
+@@ -121,6 +123,7 @@ const {
+ PROCESS_DELETED_PROJECTS,
+ PROCESS_BLOBS,
+ PROCESS_HASHED_FILES,
++ DRY_RUN,
+ BATCH_RANGE_START,
+ BATCH_RANGE_END,
+ LOGGING_IDENTIFIER,
+@@ -325,10 +328,12 @@ async function processFileWithCleanup(entry) {
+ try {
+ return await processFile(entry, filePath)
+ } finally {
+- await Promise.all([
+- fs.promises.rm(filePath, { force: true }),
+- fs.promises.rm(filePath + GZ_SUFFIX, { force: true }),
+- ])
++ if (!DRY_RUN) {
++ await Promise.all([
++ fs.promises.rm(filePath, { force: true }),
++ fs.promises.rm(filePath + GZ_SUFFIX, { force: true }),
++ ])
++ }
+ }
+ }
+
+@@ -383,6 +388,12 @@ async function processFileOnce(entry, filePath) {
+ // know the hash of.
+ return entry.hash
+ }
++ if (DRY_RUN) {
++ console.log(
++ `DRY-RUN: would process file ${fileId} for project ${projectId}`
++ )
++ return 'dry-run'
++ }
+ const blobStore = new BlobStore(historyId)
+ STATS.readFromGCSCount++
+ // make a fetch request to filestore itself
+
+
+
+diff --git a/libraries/logger/logging-manager.js b/libraries/logger/logging-manager.js
+index edf922be72b..9fb4f284053 100644
+--- a/libraries/logger/logging-manager.js
++++ b/libraries/logger/logging-manager.js
+@@ -11,7 +11,7 @@ const LoggingManager = {
+ /**
+ * @param {string} name - The name of the logger
+ */
+- initialize(name) {
++ initialize(name, options = {}) {
+ this.isProduction =
+ (process.env.NODE_ENV || '').toLowerCase() === 'production'
+ const isTest = (process.env.NODE_ENV || '').toLowerCase() === 'test'
+@@ -27,7 +27,7 @@ const LoggingManager = {
+ req: Serializers.req,
+ res: Serializers.res,
+ },
+- streams: [this._getOutputStreamConfig()],
++ streams: options.streams ?? [this._getOutputStreamConfig()],
+ })
+ this._setupRingBuffer()
+ this._setupLogLevelChecker()
+
+
+
+diff --git a/services/history-v1/storage/scripts/back_fill_file_hash.mjs b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
+index 68ce4b67aa2..a7f220ec362 100644
+--- a/services/history-v1/storage/scripts/back_fill_file_hash.mjs
++++ b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
+@@ -79,10 +79,14 @@ ObjectId.cacheHexString = true
+ */
+
+ /**
+- * @return {{PROJECT_IDS_FROM: string, PROCESS_HASHED_FILES: boolean, LOGGING_IDENTIFIER: string, BATCH_RANGE_START: string, PROCESS_BLOBS: boolean, BATCH_RANGE_END: string, PROCESS_NON_DELETED_PROJECTS: boolean, PROCESS_DELETED_PROJECTS: boolean, DRY_RUN: boolean}}
++ * @return {{PROJECT_IDS_FROM: string, PROCESS_HASHED_FILES: boolean, LOGGING_IDENTIFIER: string, BATCH_RANGE_START: string, PROCESS_BLOBS: boolean, BATCH_RANGE_END: string, PROCESS_NON_DELETED_PROJECTS: boolean, PROCESS_DELETED_PROJECTS: boolean, DRY_RUN: boolean, OUTPUT_FILE: string, PROCESS_BLOBS: boolean}}
+ */
+ function parseArgs() {
+ const PUBLIC_LAUNCH_DATE = new Date('2012-01-01T00:00:00Z')
++ const DEFAULT_OUTPUT_FILE = `file-migration-${new Date()
++ .toISOString()
++ .replace(/[:.]/g, '_')}.log`
++
+ const args = commandLineArgs([
+ { name: 'projects', type: Boolean },
+ { name: 'deleted-projects', type: Boolean },
+@@ -90,6 +94,7 @@ function parseArgs() {
+ { name: 'skip-existing-blobs', type: Boolean },
+ { name: 'from-file', type: String, defaultValue: '' },
+ { name: 'dry-run', type: Boolean },
++ { name: 'output', type: String, defaultValue: DEFAULT_OUTPUT_FILE },
+ {
+ name: 'BATCH_RANGE_START',
+ type: String,
+@@ -111,6 +116,7 @@ function parseArgs() {
+ PROCESS_HASHED_FILES: args['include-hashed-files'],
+ PROCESS_BLOBS: !args['skip-existing-blobs'],
+ DRY_RUN: args['dry-run'],
++ OUTPUT_FILE: args.output,
+ BATCH_RANGE_START,
+ BATCH_RANGE_END,
+ LOGGING_IDENTIFIER: args['logging-id'] || BATCH_RANGE_START,
+@@ -124,6 +130,7 @@ const {
+ PROCESS_BLOBS,
+ PROCESS_HASHED_FILES,
+ DRY_RUN,
++ OUTPUT_FILE,
+ BATCH_RANGE_START,
+ BATCH_RANGE_END,
+ LOGGING_IDENTIFIER,
+@@ -158,6 +165,21 @@ const STREAM_HIGH_WATER_MARK = parseInt(
+ const LOGGING_INTERVAL = parseInt(process.env.LOGGING_INTERVAL || '60000', 10)
+ const SLEEP_BEFORE_EXIT = parseInt(process.env.SLEEP_BEFORE_EXIT || '1000', 10)
+
++// Log output to a file
++logger.initialize('file-migration', {
++ streams: [
++ {
++ stream:
++ OUTPUT_FILE === '-'
++ ? process.stdout
++ : fs.createWriteStream(OUTPUT_FILE, { flags: 'a' }),
++ },
++ ],
++})
++async function trackProgress(progress) {
++ logger.info({}, progress)
++}
++
+ // Filestore endpoint location
+ const FILESTORE_HOST = process.env.FILESTORE_HOST || '127.0.0.1'
+ const FILESTORE_PORT = process.env.FILESTORE_PORT || '3009'
+@@ -525,8 +547,9 @@ async function queueNextBatch(batch, prefix = 'rootFolder.0') {
+ const end = renderObjectId(batch[batch.length - 1]._id)
+ const deferred = processBatch(batch, prefix)
+ .then(() => {
+- console.error(`Actually completed batch ending ${end}`)
++ logger.info({ end }, 'actually completed batch')
+ })
++
+ .catch(err => {
+ logger.error({ err, start, end }, 'fatal error processing batch')
+ throw err
+@@ -1062,6 +1085,7 @@ async function processNonDeletedProjects() {
+ {
+ BATCH_RANGE_START,
+ BATCH_RANGE_END,
++ trackProgress,
+ }
+ )
+ } catch (err) {
+diff --git a/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs b/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs
+index 0f8bdbf3e1a..117352d6164 100644
+--- a/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs
++++ b/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs
+@@ -544,6 +544,7 @@ describe('back_fill_file_hash script', function () {
+ process.argv0,
+ [
+ 'storage/scripts/back_fill_file_hash.mjs',
++ '--output=-',
+ '--projects',
+ '--deleted-projects',
+ ...args,
+
+
+
+diff --git a/services/history-v1/storage/scripts/back_fill_file_hash.mjs b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
+index a7f220ec362..4beba19cf4c 100644
+--- a/services/history-v1/storage/scripts/back_fill_file_hash.mjs
++++ b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
+@@ -88,6 +88,7 @@ function parseArgs() {
+ .replace(/[:.]/g, '_')}.log`
+
+ const args = commandLineArgs([
++ { name: 'all', alias: 'a', type: Boolean },
+ { name: 'projects', type: Boolean },
+ { name: 'deleted-projects', type: Boolean },
+ { name: 'include-hashed-files', type: Boolean },
+@@ -108,6 +109,36 @@ function parseArgs() {
+ { name: 'logging-id', type: String, defaultValue: '' },
+ ])
+
++ // If no arguments are provided, display a usage message
++ if (process.argv.length <= 2) {
++ console.error(
++ 'Usage: node back_fill_file_hash.mjs --all | --projects | --deleted-projects'
++ )
++ process.exit(1)
++ }
++
++ // Require at least one of --projects, --deleted-projects and --all
++ if (!args.projects && !args['deleted-projects'] && !args.all) {
++ console.error(
++ 'Must specify at least one of --projects and --deleted-projects, or --all'
++ )
++ process.exit(1)
++ }
++
++ // Forbid --all with --projects or --deleted-projects
++ if (args.all && (args.projects || args['deleted-projects'])) {
++ console.error('Cannot use --all with --projects or --deleted-projects')
++ process.exit(1)
++ }
++
++ // The --all option processes all projects, including deleted ones
++ // and checks existing hashed files are present in the blob store.
++ if (args.all) {
++ args.projects = true
++ args['deleted-projects'] = true
++ args['include-hashed-files'] = true
++ }
++
+ const BATCH_RANGE_START = objectIdFromInput(args.BATCH_RANGE_START).toString()
+ const BATCH_RANGE_END = objectIdFromInput(args.BATCH_RANGE_END).toString()
+ return {
+
+
+
+diff --git a/services/history-v1/storage/scripts/back_fill_file_hash.mjs b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
+index 4beba19cf4c..492c5ad939d 100644
+--- a/services/history-v1/storage/scripts/back_fill_file_hash.mjs
++++ b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
+@@ -88,6 +88,7 @@ function parseArgs() {
+ .replace(/[:.]/g, '_')}.log`
+
+ const args = commandLineArgs([
++ { name: 'help', alias: 'h', type: Boolean },
+ { name: 'all', alias: 'a', type: Boolean },
+ { name: 'projects', type: Boolean },
+ { name: 'deleted-projects', type: Boolean },
+@@ -117,6 +118,48 @@ function parseArgs() {
+ process.exit(1)
+ }
+
++ // If --help is provided, display the help message
++ if (args.help) {
++ console.log(`Usage: node back_fill_file_hash.mjs [options]
++
++Project selection options:
++ --all, -a Process all projects, including deleted ones
++ --projects Process projects (excluding deleted ones)
++ --deleted-projects Process deleted projects
++ --from-file Process selected projects ids from file
++
++File selection options:
++ --include-hashed-files Process files that already have a hash
++ --skip-existing-blobs Skip processing files already in the blob store
++
++Logging options:
++ --output Output log to the specified file
++ (default: file-migration-.log)
++ --logging-id Identifier for logging
++ (default: BATCH_RANGE_START)
++
++Batch range options:
++ --BATCH_RANGE_START Start date for processing
++ (default: ${args.BATCH_RANGE_START})
++ --BATCH_RANGE_END End date for processing
++ (default: ${args.BATCH_RANGE_END})
++
++Other options:
++ --dry-run Perform a dry run without making changes
++ --help, -h Show this help message
++
++Typical usage:
++
++ node back_fill_file_hash.mjs --all
++
++is equivalent to
++
++ node back_fill_file_hash.mjs --projects --deleted-projects \\
++ --include-hashed-files
++`)
++ process.exit(0)
++ }
++
+ // Require at least one of --projects, --deleted-projects and --all
+ if (!args.projects && !args['deleted-projects'] && !args.all) {
+ console.error(
+
+
+
+diff --git a/services/history-v1/storage/scripts/back_fill_file_hash.mjs b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
+index 492c5ad939d..b20e365c4ff 100644
+--- a/services/history-v1/storage/scripts/back_fill_file_hash.mjs
++++ b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
+@@ -79,7 +79,7 @@ ObjectId.cacheHexString = true
+ */
+
+ /**
+- * @return {{PROJECT_IDS_FROM: string, PROCESS_HASHED_FILES: boolean, LOGGING_IDENTIFIER: string, BATCH_RANGE_START: string, PROCESS_BLOBS: boolean, BATCH_RANGE_END: string, PROCESS_NON_DELETED_PROJECTS: boolean, PROCESS_DELETED_PROJECTS: boolean, DRY_RUN: boolean, OUTPUT_FILE: string, PROCESS_BLOBS: boolean}}
++ * @return {{PROJECT_IDS_FROM: string, PROCESS_HASHED_FILES: boolean, LOGGING_IDENTIFIER: string, BATCH_RANGE_START: string, BATCH_RANGE_END: string, PROCESS_NON_DELETED_PROJECTS: boolean, PROCESS_DELETED_PROJECTS: boolean, PROCESS_BLOBS: boolean, DRY_RUN: boolean, OUTPUT_FILE: string, DISPLAY_REPORT: boolean}}
+ */
+ function parseArgs() {
+ const PUBLIC_LAUNCH_DATE = new Date('2012-01-01T00:00:00Z')
+@@ -97,6 +97,7 @@ function parseArgs() {
+ { name: 'from-file', type: String, defaultValue: '' },
+ { name: 'dry-run', type: Boolean },
+ { name: 'output', type: String, defaultValue: DEFAULT_OUTPUT_FILE },
++ { name: 'report', type: Boolean },
+ {
+ name: 'BATCH_RANGE_START',
+ type: String,
+@@ -145,6 +146,7 @@ Batch range options:
+ (default: ${args.BATCH_RANGE_END})
+
+ Other options:
++ --report Display a report of the current status
+ --dry-run Perform a dry run without making changes
+ --help, -h Show this help message
+
+@@ -160,10 +162,15 @@ is equivalent to
+ process.exit(0)
+ }
+
+- // Require at least one of --projects, --deleted-projects and --all
+- if (!args.projects && !args['deleted-projects'] && !args.all) {
++ // Require at least one of --projects, --deleted-projects and --all or --report
++ if (
++ !args.projects &&
++ !args['deleted-projects'] &&
++ !args.all &&
++ !args.report
++ ) {
+ console.error(
+- 'Must specify at least one of --projects and --deleted-projects, or --all'
++ 'Must specify at least one of --projects and --deleted-projects, --all or --report'
+ )
+ process.exit(1)
+ }
+@@ -174,6 +181,14 @@ is equivalent to
+ process.exit(1)
+ }
+
++ // Forbid --all, --projects, --deleted-projects with --report
++ if (args.report && (args.all || args.projects || args['deleted-projects'])) {
++ console.error(
++ 'Cannot use --report with --all, --projects or --deleted-projects'
++ )
++ process.exit(1)
++ }
++
+ // The --all option processes all projects, including deleted ones
+ // and checks existing hashed files are present in the blob store.
+ if (args.all) {
+@@ -195,6 +210,7 @@ is equivalent to
+ BATCH_RANGE_END,
+ LOGGING_IDENTIFIER: args['logging-id'] || BATCH_RANGE_START,
+ PROJECT_IDS_FROM: args['from-file'],
++ DISPLAY_REPORT: args.report,
+ }
+ }
+
+@@ -209,6 +225,7 @@ const {
+ BATCH_RANGE_END,
+ LOGGING_IDENTIFIER,
+ PROJECT_IDS_FROM,
++ DISPLAY_REPORT,
+ } = parseArgs()
+
+ // We need to handle the start and end differently as ids of deleted projects are created at time of deletion.
+@@ -254,6 +271,108 @@ async function trackProgress(progress) {
+ logger.info({}, progress)
+ }
+
++/**
++ * Display the stats for the projects or deletedProjects collections.
++ *
++ * @param {number} N - Number of samples to take from the collection.
++ * @param {string} name - Name of the collection being sampled.
++ * @param {Collection} collection - MongoDB collection to query.
++ * @param {Object} query - MongoDB query to filter documents.
++ * @param {Object} projection - MongoDB projection to select fields.
++ * @param {number} collectionCount - Total number of documents in the collection.
++ * @returns {Promise} Resolves when stats have been displayed.
++ */
++async function getStatsForCollection(
++ N,
++ name,
++ collection,
++ query,
++ projection,
++ collectionCount
++) {
++ const stats = {
++ projectCount: 0,
++ projectsWithAllHashes: 0,
++ fileCount: 0,
++ fileWithHashCount: 0,
++ }
++ // Pick a random sample of projects and estimate the number of files without hashes
++ const result = await collection
++ .aggregate([
++ { $sample: { size: N } },
++ { $match: query },
++ {
++ $project: projection,
++ },
++ ])
++ .toArray()
++
++ for (const project of result) {
++ const fileTree = JSON.stringify(project, [
++ 'rootFolder',
++ 'folders',
++ 'fileRefs',
++ 'hash',
++ '_id',
++ ])
++ // count the number of files without a hash, these are uniquely identified
++ // by entries with {"_id":"...."} since we have filtered the file tree
++ const filesWithoutHash = fileTree.match(/\{"_id":"[0-9a-f]{24}"\}/g) || []
++ // count the number of files with a hash, these are uniquely identified
++ // by the number of "hash" strings due to the filtering
++ const filesWithHash = fileTree.match(/"hash"/g) || []
++ stats.fileCount += filesWithoutHash.length + filesWithHash.length
++ stats.fileWithHashCount += filesWithHash.length
++ stats.projectCount++
++ stats.projectsWithAllHashes += filesWithoutHash.length === 0 ? 1 : 0
++ }
++ console.log(`Sampled stats for ${name}:`)
++ const fractionSampled = stats.projectCount / collectionCount
++ const percentageSampled = (fractionSampled * 100).toFixed(1)
++ const fractionConverted = stats.projectsWithAllHashes / stats.projectCount
++ const percentageConverted = (fractionConverted * 100).toFixed(1)
++ console.log(
++ `- Sampled ${name}: ${stats.projectCount} (${percentageSampled}%)`
++ )
++ console.log(
++ `- Sampled ${name} with all hashes present: ${stats.projectsWithAllHashes}`
++ )
++ console.log(
++ `- Percentage of ${name} converted: ${percentageConverted}% (estimated)`
++ )
++}
++
++/**
++ * Displays a report of the current status of projects and deleted projects,
++ * including counts and estimated progress based on a sample.
++ */
++async function displayReport() {
++ const projectsCountResult = await projectsCollection.countDocuments()
++ const deletedProjectsCountResult =
++ await deletedProjectsCollection.countDocuments()
++ const sampleSize = 1000
++ console.log('Current status:')
++ console.log(`- Projects: ${projectsCountResult}`)
++ console.log(`- Deleted projects: ${deletedProjectsCountResult}`)
++ console.log(`Sampling ${sampleSize} projects to estimate progress...`)
++ await getStatsForCollection(
++ sampleSize,
++ 'projects',
++ projectsCollection,
++ { rootFolder: { $exists: true } },
++ { rootFolder: 1 },
++ projectsCountResult
++ )
++ await getStatsForCollection(
++ sampleSize,
++ 'deleted projects',
++ deletedProjectsCollection,
++ { 'project.rootFolder': { $exists: true } },
++ { 'project.rootFolder': 1 },
++ deletedProjectsCountResult
++ )
++}
++
+ // Filestore endpoint location
+ const FILESTORE_HOST = process.env.FILESTORE_HOST || '127.0.0.1'
+ const FILESTORE_PORT = process.env.FILESTORE_PORT || '3009'
+@@ -1220,6 +1339,12 @@ async function main() {
+ console.warn('Done.')
+ }
+
++if (DISPLAY_REPORT) {
++ console.warn('Displaying report...')
++ await displayReport()
++ process.exit(0)
++}
++
+ try {
+ try {
+ await main()
+
+
+
+diff --git a/services/history-v1/storage/scripts/back_fill_file_hash.mjs b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
+index b20e365c4ff..2bfc4051622 100644
+--- a/services/history-v1/storage/scripts/back_fill_file_hash.mjs
++++ b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
+@@ -267,8 +267,20 @@ logger.initialize('file-migration', {
+ },
+ ],
+ })
++
++let lastElapsedTime = 0
+ async function trackProgress(progress) {
+- logger.info({}, progress)
++ const elapsedTime = Math.floor((performance.now() - processStart) / 1000)
++ if (lastElapsedTime === elapsedTime) {
++ // Avoid spamming the console with the same progress message
++ return
++ }
++ lastElapsedTime = elapsedTime
++ readline.clearLine(process.stdout, 0)
++ readline.cursorTo(process.stdout, 0)
++ process.stdout.write(
++ `Processed ${STATS.projects} projects, elapsed time ${elapsedTime}s`
++ )
+ }
+
+ /**
+@@ -1287,7 +1299,7 @@ async function processNonDeletedProjects() {
+ } finally {
+ await waitForDeferredQueues()
+ }
+- console.warn('Done updating live projects')
++ console.warn('\nDone updating live projects')
+ }
+
+ async function processDeletedProjects() {
+@@ -1306,7 +1318,9 @@ async function processDeletedProjects() {
+ 'project.rootFolder': 1,
+ 'project._id': 1,
+ 'project.overleaf.history.id': 1,
+- }
++ },
++ {},
++ { trackProgress }
+ )
+ } catch (err) {
+ gracefulShutdownInitiated = true
+@@ -1314,7 +1328,7 @@ async function processDeletedProjects() {
+ } finally {
+ await waitForDeferredQueues()
+ }
+- console.warn('Done updating deleted projects')
++ console.warn('\nDone updating deleted projects')
+ }
+
+ async function main() {
+
+
+
+diff --git a/services/history-v1/storage/scripts/back_fill_file_hash.mjs b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
+index 2bfc4051622..c9fd7d233a7 100644
+--- a/services/history-v1/storage/scripts/back_fill_file_hash.mjs
++++ b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
+@@ -94,9 +94,14 @@ function parseArgs() {
+ { name: 'deleted-projects', type: Boolean },
+ { name: 'include-hashed-files', type: Boolean },
+ { name: 'skip-existing-blobs', type: Boolean },
+- { name: 'from-file', type: String, defaultValue: '' },
+- { name: 'dry-run', type: Boolean },
+- { name: 'output', type: String, defaultValue: DEFAULT_OUTPUT_FILE },
++ { name: 'from-file', alias: 'f', type: String, defaultValue: '' },
++ { name: 'dry-run', alias: 'n', type: Boolean },
++ {
++ name: 'output',
++ alias: 'o',
++ type: String,
++ defaultValue: DEFAULT_OUTPUT_FILE,
++ },
+ { name: 'report', type: Boolean },
+ {
+ name: 'BATCH_RANGE_START',
+@@ -127,14 +132,14 @@ Project selection options:
+ --all, -a Process all projects, including deleted ones
+ --projects Process projects (excluding deleted ones)
+ --deleted-projects Process deleted projects
+- --from-file Process selected projects ids from file
++ --from-file , -f Process selected projects ids from file
+
+ File selection options:
+ --include-hashed-files Process files that already have a hash
+ --skip-existing-blobs Skip processing files already in the blob store
+
+ Logging options:
+- --output Output log to the specified file
++ --output , -o Output log to the specified file
+ (default: file-migration-.log)
+ --logging-id Identifier for logging
+ (default: BATCH_RANGE_START)
+@@ -147,7 +152,7 @@ Batch range options:
+
+ Other options:
+ --report Display a report of the current status
+- --dry-run Perform a dry run without making changes
++ --dry-run, -n Perform a dry run without making changes
+ --help, -h Show this help message
+
+ Typical usage:
+
+
+
+diff --git a/services/history-v1/storage/scripts/back_fill_file_hash.mjs b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
+index c9fd7d233a7..8f28e8a4d78 100644
+--- a/services/history-v1/storage/scripts/back_fill_file_hash.mjs
++++ b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
+@@ -326,6 +326,7 @@ async function getStatsForCollection(
+
+ for (const project of result) {
+ const fileTree = JSON.stringify(project, [
++ 'project',
+ 'rootFolder',
+ 'folders',
+ 'fileRefs',
+
+
+
+diff --git a/libraries/mongo-utils/batchedUpdate.js b/libraries/mongo-utils/batchedUpdate.js
+index 41af41f0d4a..f1253c587d3 100644
+--- a/libraries/mongo-utils/batchedUpdate.js
++++ b/libraries/mongo-utils/batchedUpdate.js
+@@ -35,7 +35,7 @@ let BATCHED_UPDATE_RUNNING = false
+ * @property {string} [BATCH_RANGE_START]
+ * @property {string} [BATCH_SIZE]
+ * @property {string} [VERBOSE_LOGGING]
+- * @property {(progress: string) => Promise} [trackProgress]
++ * @property {(progress: string, options?: object) => Promise} [trackProgress]
+ */
+
+ /**
+@@ -269,9 +269,12 @@ async function batchedUpdate(
+ await performUpdate(collection, nextBatch, update)
+ }
+ }
+- await trackProgress(`Completed batch ending ${renderObjectId(end)}`)
++ await trackProgress(`Completed batch ending ${renderObjectId(end)}`, {
++ completedBatch: true,
++ })
+ start = end
+ }
++ await trackProgress('Completed all batches', { completedAll: true })
+ return updated
+ } finally {
+ BATCHED_UPDATE_RUNNING = false
+
+
+
+diff --git a/services/history-v1/storage/scripts/back_fill_file_hash.mjs b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
+index 8f28e8a4d78..2b54fdb1687 100644
+--- a/services/history-v1/storage/scripts/back_fill_file_hash.mjs
++++ b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
+@@ -274,9 +274,16 @@ logger.initialize('file-migration', {
+ })
+
+ let lastElapsedTime = 0
+-async function trackProgress(progress) {
++async function trackProgress(progress, options = {}) {
++ if (OUTPUT_FILE === '-') {
++ return // skip progress tracking when logging to stdout
++ }
++ if (options.completedAll) {
++ process.stdout.write('\n')
++ return
++ }
+ const elapsedTime = Math.floor((performance.now() - processStart) / 1000)
+- if (lastElapsedTime === elapsedTime) {
++ if (lastElapsedTime === elapsedTime && !options.completedBatch) {
+ // Avoid spamming the console with the same progress message
+ return
+ }
+@@ -1305,7 +1312,7 @@ async function processNonDeletedProjects() {
+ } finally {
+ await waitForDeferredQueues()
+ }
+- console.warn('\nDone updating live projects')
++ console.warn('Done updating live projects')
+ }
+
+ async function processDeletedProjects() {
+@@ -1334,7 +1341,7 @@ async function processDeletedProjects() {
+ } finally {
+ await waitForDeferredQueues()
+ }
+- console.warn('\nDone updating deleted projects')
++ console.warn('Done updating deleted projects')
+ }
+
+ async function main() {
+@@ -1381,7 +1388,9 @@ try {
+
+ let code = 0
+ if (STATS.filesFailed > 0) {
+- console.warn('Some files could not be processed, see logs and try again')
++ console.warn(
++ `Some files could not be processed, see logs in ${OUTPUT_FILE} and try again`
++ )
+ code++
+ }
+ if (STATS.fileHardDeleted > 0) {
+
+
+
+diff --git a/services/history-v1/storage/scripts/back_fill_file_hash.mjs b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
+index 2b54fdb1687..fc46f245d1a 100644
+--- a/services/history-v1/storage/scripts/back_fill_file_hash.mjs
++++ b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
+@@ -525,7 +525,7 @@ function computeDiff(nextEventLoopStats, now) {
+ function printStats(isLast = false) {
+ const now = performance.now()
+ const nextEventLoopStats = performance.eventLoopUtilization()
+- const logLine = JSON.stringify({
++ const logLine = {
+ time: new Date(),
+ LOGGING_IDENTIFIER,
+ ...STATS,
+@@ -533,11 +533,11 @@ function printStats(isLast = false) {
+ eventLoop: nextEventLoopStats,
+ diff: computeDiff(nextEventLoopStats, now),
+ deferredBatches: Array.from(deferredBatches.keys()),
+- })
+- if (isLast) {
+- console.warn(logLine)
++ }
++ if (isLast && OUTPUT_FILE === '-') {
++ console.warn(JSON.stringify(logLine))
+ } else {
+- console.log(logLine)
++ logger.info(logLine, 'file-migration stats')
+ }
+ lastEventLoopStats = nextEventLoopStats
+ lastLog = Object.assign({}, STATS)
+
+
+
+diff --git a/services/history-v1/storage/scripts/back_fill_file_hash.mjs b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
+index fc46f245d1a..4a4d93d902c 100644
+--- a/services/history-v1/storage/scripts/back_fill_file_hash.mjs
++++ b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
+@@ -92,7 +92,7 @@ function parseArgs() {
+ { name: 'all', alias: 'a', type: Boolean },
+ { name: 'projects', type: Boolean },
+ { name: 'deleted-projects', type: Boolean },
+- { name: 'include-hashed-files', type: Boolean },
++ { name: 'skip-hashed-files', type: Boolean },
+ { name: 'skip-existing-blobs', type: Boolean },
+ { name: 'from-file', alias: 'f', type: String, defaultValue: '' },
+ { name: 'dry-run', alias: 'n', type: Boolean },
+@@ -135,7 +135,7 @@ Project selection options:
+ --from-file , -f Process selected projects ids from file
+
+ File selection options:
+- --include-hashed-files Process files that already have a hash
++ --skip-hashed-files Skip processing files that already have a hash
+ --skip-existing-blobs Skip processing files already in the blob store
+
+ Logging options:
+@@ -161,8 +161,7 @@ Typical usage:
+
+ is equivalent to
+
+- node back_fill_file_hash.mjs --projects --deleted-projects \\
+- --include-hashed-files
++ node back_fill_file_hash.mjs --projects --deleted-projects
+ `)
+ process.exit(0)
+ }
+@@ -199,7 +198,6 @@ is equivalent to
+ if (args.all) {
+ args.projects = true
+ args['deleted-projects'] = true
+- args['include-hashed-files'] = true
+ }
+
+ const BATCH_RANGE_START = objectIdFromInput(args.BATCH_RANGE_START).toString()
+@@ -207,7 +205,7 @@ is equivalent to
+ return {
+ PROCESS_NON_DELETED_PROJECTS: args.projects,
+ PROCESS_DELETED_PROJECTS: args['deleted-projects'],
+- PROCESS_HASHED_FILES: args['include-hashed-files'],
++ PROCESS_HASHED_FILES: !args['skip-hashed-files'],
+ PROCESS_BLOBS: !args['skip-existing-blobs'],
+ DRY_RUN: args['dry-run'],
+ OUTPUT_FILE: args.output,
+diff --git a/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs b/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs
+index 117352d6164..a95bcbabd7e 100644
+--- a/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs
++++ b/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs
+@@ -855,7 +855,7 @@ describe('back_fill_file_hash script', function () {
+ // Practically, this is slow and moving it to the end of the tests gets us there most of the way.
+ it('should process nothing on re-run', async function () {
+ const rerun = await runScript(
+- processHashedFiles ? ['--include-hashed-files'] : [],
++ !processHashedFiles ? ['--skip-hashed-files'] : [],
+ {},
+ false
+ )
+@@ -981,7 +981,7 @@ describe('back_fill_file_hash script', function () {
+ it('should gracefully handle fatal errors', async function () {
+ mockFilestore.deleteObject(projectId0, fileId0)
+ const t0 = Date.now()
+- const { stats, result } = await tryRunScript([], {
++ const { stats, result } = await tryRunScript(['--skip-hashed-files'], {
+ RETRIES: '10',
+ RETRY_DELAY_MS: '1000',
+ })
+@@ -1016,7 +1016,7 @@ describe('back_fill_file_hash script', function () {
+ value: { stats, result },
+ },
+ ] = await Promise.allSettled([
+- tryRunScript([], {
++ tryRunScript(['--skip-hashed-files'], {
+ RETRY_DELAY_MS: '100',
+ RETRIES: '60',
+ RETRY_FILESTORE_404: 'true', // 404s are the easiest to simulate in tests
+@@ -1042,7 +1042,7 @@ describe('back_fill_file_hash script', function () {
+ let output
+ before('prepare environment', prepareEnvironment)
+ before('run script', async function () {
+- output = await runScript([], {
++ output = await runScript(['--skip-hashed-files'], {
+ CONCURRENCY: '1',
+ })
+ })
+@@ -1111,10 +1111,10 @@ describe('back_fill_file_hash script', function () {
+ let output1, output2
+ before('prepare environment', prepareEnvironment)
+ before('run script without hashed files', async function () {
+- output1 = await runScript([], {})
++ output1 = await runScript(['--skip-hashed-files'], {})
+ })
+ before('run script with hashed files', async function () {
+- output2 = await runScript(['--include-hashed-files'], {})
++ output2 = await runScript([], {})
+ })
+ it('should print stats for the first run without hashed files', function () {
+ expect(output1.stats).deep.equal(STATS_ALL)
+@@ -1134,7 +1134,7 @@ describe('back_fill_file_hash script', function () {
+ let output
+ before('prepare environment', prepareEnvironment)
+ before('run script', async function () {
+- output = await runScript([], {
++ output = await runScript(['--skip-hashed-files'], {
+ CONCURRENCY: '10',
+ })
+ })
+@@ -1148,7 +1148,7 @@ describe('back_fill_file_hash script', function () {
+ let output
+ before('prepare environment', prepareEnvironment)
+ before('run script', async function () {
+- output = await runScript([], {
++ output = await runScript(['--skip-hashed-files'], {
+ STREAM_HIGH_WATER_MARK: (1024 * 1024).toString(),
+ })
+ })
+@@ -1162,7 +1162,7 @@ describe('back_fill_file_hash script', function () {
+ let output
+ before('prepare environment', prepareEnvironment)
+ before('run script', async function () {
+- output = await runScript(['--include-hashed-files'], {})
++ output = await runScript([], {})
+ })
+ it('should print stats', function () {
+ expect(output.stats).deep.equal(
+@@ -1191,7 +1191,7 @@ describe('back_fill_file_hash script', function () {
+ })
+ let output
+ before('run script', async function () {
+- output = await runScript([], {
++ output = await runScript(['--skip-hashed-files'], {
+ CONCURRENCY: '1',
+ })
+ })
+@@ -1212,14 +1212,20 @@ describe('back_fill_file_hash script', function () {
+ let outputPart0, outputPart1
+ before('prepare environment', prepareEnvironment)
+ before('run script on part 0', async function () {
+- outputPart0 = await runScript([`--BATCH_RANGE_END=${edge}`], {
+- CONCURRENCY: '1',
+- })
++ outputPart0 = await runScript(
++ ['--skip-hashed-files', `--BATCH_RANGE_END=${edge}`],
++ {
++ CONCURRENCY: '1',
++ }
++ )
+ })
+ before('run script on part 1', async function () {
+- outputPart1 = await runScript([`--BATCH_RANGE_START=${edge}`], {
+- CONCURRENCY: '1',
+- })
++ outputPart1 = await runScript(
++ ['--skip-hashed-files', `--BATCH_RANGE_START=${edge}`],
++ {
++ CONCURRENCY: '1',
++ }
++ )
+ })
+
+ it('should print stats for part 0', function () {
+@@ -1264,10 +1270,16 @@ describe('back_fill_file_hash script', function () {
+
+ let outputPart0, outputPart1
+ before('run script on part 0', async function () {
+- outputPart0 = await runScript([`--from-file=${path0}`])
++ outputPart0 = await runScript([
++ '--skip-hashed-files',
++ `--from-file=${path0}`,
++ ])
+ })
+ before('run script on part 1', async function () {
+- outputPart1 = await runScript([`--from-file=${path1}`])
++ outputPart1 = await runScript([
++ '--skip-hashed-files',
++ `--from-file=${path1}`,
++ ])
+ })
+
+ /**
+
+
+
+diff --git a/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs b/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs
+index a95bcbabd7e..fc6941bd7bb 100644
+--- a/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs
++++ b/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs
+@@ -975,7 +975,7 @@ describe('back_fill_file_hash script', function () {
+ STATS_UP_FROM_PROJECT1_ONWARD
+ )
+
+- describe('error cases', () => {
++ describe('error cases', function () {
+ beforeEach('prepare environment', prepareEnvironment)
+
+ it('should gracefully handle fatal errors', async function () {
+@@ -1237,7 +1237,7 @@ describe('back_fill_file_hash script', function () {
+ commonAssertions()
+ })
+
+- describe('projectIds from file', () => {
++ describe('projectIds from file', function () {
+ const path0 = '/tmp/project-ids-0.txt'
+ const path1 = '/tmp/project-ids-1.txt'
+ before('prepare environment', prepareEnvironment)
+
+
+
+diff --git a/services/history-v1/storage/scripts/back_fill_file_hash.mjs b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
+index 4a4d93d902c..375e582c331 100644
+--- a/services/history-v1/storage/scripts/back_fill_file_hash.mjs
++++ b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
+@@ -555,7 +555,7 @@ function handleSignal() {
+
+ /**
+ * @param {QueueEntry} entry
+- * @return {Promise}
++ * @return {Promise}
+ */
+ async function processFileWithCleanup(entry) {
+ const {
+@@ -578,7 +578,7 @@ async function processFileWithCleanup(entry) {
+ /**
+ * @param {QueueEntry} entry
+ * @param {string} filePath
+- * @return {Promise}
++ * @return {Promise}
+ */
+ async function processFile(entry, filePath) {
+ for (let attempt = 0; attempt < RETRIES; attempt++) {
+@@ -612,7 +612,7 @@ async function processFile(entry, filePath) {
+ /**
+ * @param {QueueEntry} entry
+ * @param {string} filePath
+- * @return {Promise}
++ * @return {Promise}
+ */
+ async function processFileOnce(entry, filePath) {
+ const {
+@@ -627,10 +627,7 @@ async function processFileOnce(entry, filePath) {
+ return entry.hash
+ }
+ if (DRY_RUN) {
+- console.log(
+- `DRY-RUN: would process file ${fileId} for project ${projectId}`
+- )
+- return 'dry-run'
++ return // skip processing in dry-run mode by returning undefined
+ }
+ const blobStore = new BlobStore(historyId)
+ STATS.readFromGCSCount++
+@@ -843,6 +840,9 @@ async function handleDeletedFileTreeBatch(batch) {
+ * @return {Promise}
+ */
+ async function tryUpdateFileRefInMongo(entry) {
++ if (DRY_RUN) {
++ return true // skip mongo updates in dry-run mode
++ }
+ if (entry.path.startsWith('project.')) {
+ return await tryUpdateFileRefInMongoInDeletedProject(entry)
+ }
+@@ -865,6 +865,9 @@ async function tryUpdateFileRefInMongo(entry) {
+ * @return {Promise}
+ */
+ async function tryUpdateFileRefInMongoInDeletedProject(entry) {
++ if (DRY_RUN) {
++ return true // skip mongo updates in dry-run mode
++ }
+ STATS.mongoUpdates++
+ const result = await deletedProjectsCollection.updateOne(
+ {
+@@ -1165,6 +1168,7 @@ class ProjectContext {
+ */
+ async #tryBatchHashWrites(collection, entries, query) {
+ if (entries.length === 0) return []
++ if (DRY_RUN) return [] // skip mongo updates in dry-run mode
+ const update = {}
+ for (const entry of entries) {
+ query[`${entry.path}._id`] = new ObjectId(entry.fileId)
+@@ -1210,7 +1214,7 @@ class ProjectContext {
+ }
+ }
+
+- /** @type {Map>} */
++ /** @type {Map>} */
+ #pendingFiles = new Map()
+
+ /**
+@@ -1223,7 +1227,12 @@ class ProjectContext {
+ this.#pendingFiles.set(entry.cacheKey, processFileWithCleanup(entry))
+ }
+ try {
+- entry.hash = await this.#pendingFiles.get(entry.cacheKey)
++ const hash = await this.#pendingFiles.get(entry.cacheKey)
++ if (!hash) {
++ return // hash is undefined in dry-run mode
++ } else {
++ entry.hash = hash
++ }
+ } finally {
+ this.remainingQueueEntries--
+ }
+diff --git a/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs b/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs
+index fc6941bd7bb..646e75e2b58 100644
+--- a/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs
++++ b/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs
+@@ -1130,6 +1130,45 @@ describe('back_fill_file_hash script', function () {
+ commonAssertions(true)
+ })
+
++ describe('full run in dry-run mode', function () {
++ let output
++ before('prepare environment', prepareEnvironment)
++ before('run script', async function () {
++ output = await runScript(
++ ['--dry-run'],
++ {
++ CONCURRENCY: '1',
++ },
++ false
++ )
++ })
++
++ it('should print stats for dry-run mode', function () {
++ // Compute the stats for running the script without dry-run mode.
++ const originalStats = sumStats(STATS_ALL, {
++ ...STATS_FILES_HASHED_EXTRA,
++ readFromGCSCount: 30,
++ readFromGCSIngress: 72,
++ mongoUpdates: 0,
++ filesWithHash: 3,
++ })
++ // For a dry-run mode, we expect the stats to be zero except for the
++ // count of projects, blobs, bad file trees, duplicated files
++ // and files with/without hash. All the other stats such as mongoUpdates
++ // and writeToGCSCount, etc should be zero.
++ const expectedDryRunStats = {
++ ...STATS_ALL_ZERO,
++ projects: originalStats.projects,
++ blobs: originalStats.blobs,
++ badFileTrees: originalStats.badFileTrees,
++ filesDuplicated: originalStats.filesDuplicated,
++ filesWithHash: originalStats.filesWithHash,
++ filesWithoutHash: originalStats.filesWithoutHash,
++ }
++ expect(output.stats).deep.equal(expectedDryRunStats)
++ })
++ })
++
+ describe('full run CONCURRENCY=10', function () {
+ let output
+ before('prepare environment', prepareEnvironment)
+
+
+
+diff --git a/services/history-v1/storage/scripts/back_fill_file_hash.mjs b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
+index 375e582c331..85920bcf03a 100644
+--- a/services/history-v1/storage/scripts/back_fill_file_hash.mjs
++++ b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
+@@ -94,7 +94,7 @@ function parseArgs() {
+ { name: 'deleted-projects', type: Boolean },
+ { name: 'skip-hashed-files', type: Boolean },
+ { name: 'skip-existing-blobs', type: Boolean },
+- { name: 'from-file', alias: 'f', type: String, defaultValue: '' },
++ { name: 'from-file', type: String, defaultValue: '' },
+ { name: 'dry-run', alias: 'n', type: Boolean },
+ {
+ name: 'output',
+@@ -132,7 +132,7 @@ Project selection options:
+ --all, -a Process all projects, including deleted ones
+ --projects Process projects (excluding deleted ones)
+ --deleted-projects Process deleted projects
+- --from-file , -f Process selected projects ids from file
++ --from-file Process selected projects ids from file
+
+ File selection options:
+ --skip-hashed-files Skip processing files that already have a hash
+
+
+
+diff --git a/services/history-v1/storage/scripts/back_fill_file_hash.mjs b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
+index 85920bcf03a..092b8f04e43 100644
+--- a/services/history-v1/storage/scripts/back_fill_file_hash.mjs
++++ b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
+@@ -567,10 +567,7 @@ async function processFileWithCleanup(entry) {
+ return await processFile(entry, filePath)
+ } finally {
+ if (!DRY_RUN) {
+- await Promise.all([
+- fs.promises.rm(filePath, { force: true }),
+- fs.promises.rm(filePath + GZ_SUFFIX, { force: true }),
+- ])
++ await fs.promises.rm(filePath, { force: true })
+ }
+ }
+ }
+@@ -697,8 +694,6 @@ async function uploadBlobToGCS(blobStore, entry, blob, hash, filePath) {
+ entry.ctx.recordHistoryBlob(blob)
+ }
+
+-const GZ_SUFFIX = '.gz'
+-
+ /**
+ * @param {Array} files
+ * @return {Promise}
+
+
+
+diff --git a/libraries/mongo-utils/batchedUpdate.js b/libraries/mongo-utils/batchedUpdate.js
+index f1253c587d3..41af41f0d4a 100644
+--- a/libraries/mongo-utils/batchedUpdate.js
++++ b/libraries/mongo-utils/batchedUpdate.js
+@@ -35,7 +35,7 @@ let BATCHED_UPDATE_RUNNING = false
+ * @property {string} [BATCH_RANGE_START]
+ * @property {string} [BATCH_SIZE]
+ * @property {string} [VERBOSE_LOGGING]
+- * @property {(progress: string, options?: object) => Promise} [trackProgress]
++ * @property {(progress: string) => Promise} [trackProgress]
+ */
+
+ /**
+@@ -269,12 +269,9 @@ async function batchedUpdate(
+ await performUpdate(collection, nextBatch, update)
+ }
+ }
+- await trackProgress(`Completed batch ending ${renderObjectId(end)}`, {
+- completedBatch: true,
+- })
++ await trackProgress(`Completed batch ending ${renderObjectId(end)}`)
+ start = end
+ }
+- await trackProgress('Completed all batches', { completedAll: true })
+ return updated
+ } finally {
+ BATCHED_UPDATE_RUNNING = false
+diff --git a/services/history-v1/storage/scripts/back_fill_file_hash.mjs b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
+index 092b8f04e43..755443adf52 100644
+--- a/services/history-v1/storage/scripts/back_fill_file_hash.mjs
++++ b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
+@@ -1305,7 +1305,7 @@ async function processNonDeletedProjects() {
+ {
+ BATCH_RANGE_START,
+ BATCH_RANGE_END,
+- trackProgress,
++ trackProgress: async message => {},
+ }
+ )
+ } catch (err) {
+@@ -1335,7 +1335,7 @@ async function processDeletedProjects() {
+ 'project.overleaf.history.id': 1,
+ },
+ {},
+- { trackProgress }
++ { trackProgress: async message => {} }
+ )
+ } catch (err) {
+ gracefulShutdownInitiated = true
+
+
+
+diff --git a/services/history-v1/storage/scripts/back_fill_file_hash.mjs b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
+index 755443adf52..4ca17ddf694 100644
+--- a/services/history-v1/storage/scripts/back_fill_file_hash.mjs
++++ b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
+@@ -272,7 +272,7 @@ logger.initialize('file-migration', {
+ })
+
+ let lastElapsedTime = 0
+-async function trackProgress(progress, options = {}) {
++async function displayProgress(options = {}) {
+ if (OUTPUT_FILE === '-') {
+ return // skip progress tracking when logging to stdout
+ }
+@@ -733,6 +733,7 @@ async function waitForDeferredQueues() {
+ // Wait for ALL pending batches to finish, especially wait for their mongo
+ // writes to finish to avoid extra work when resuming the batch.
+ const all = await Promise.allSettled(deferredBatches.values())
++ displayProgress({ completedAll: true })
+ // Now that all batches finished, we can throw if needed.
+ for (const res of all) {
+ if (res.status === 'rejected') {
+@@ -756,6 +757,7 @@ async function queueNextBatch(batch, prefix = 'rootFolder.0') {
+ const deferred = processBatch(batch, prefix)
+ .then(() => {
+ logger.info({ end }, 'actually completed batch')
++ displayProgress({ completedBatch: true })
+ })
+
+ .catch(err => {
+
+
+
+diff --git a/services/history-v1/storage/scripts/back_fill_file_hash.mjs b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
+index 4ca17ddf694..8664be21fbe 100644
+--- a/services/history-v1/storage/scripts/back_fill_file_hash.mjs
++++ b/services/history-v1/storage/scripts/back_fill_file_hash.mjs
+@@ -1226,7 +1226,11 @@ class ProjectContext {
+ try {
+ const hash = await this.#pendingFiles.get(entry.cacheKey)
+ if (!hash) {
+- return // hash is undefined in dry-run mode
++ if (DRY_RUN) {
++ return // hash is undefined in dry-run mode
++ } else {
++ throw new Error('undefined hash outside dry-run mode')
++ }
+ } else {
+ entry.hash = hash
+ }
+
+
+
+diff --git a/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs b/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs
+index 646e75e2b58..43884adbe8f 100644
+--- a/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs
++++ b/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs
+@@ -1132,7 +1132,15 @@ describe('back_fill_file_hash script', function () {
+
+ describe('full run in dry-run mode', function () {
+ let output
++ let projectRecordsBefore
++ let deletedProjectRecordsBefore
+ before('prepare environment', prepareEnvironment)
++ before(async function () {
++ projectRecordsBefore = await projectsCollection.find({}).toArray()
++ deletedProjectRecordsBefore = await deletedProjectsCollection
++ .find({})
++ .toArray()
++ })
+ before('run script', async function () {
+ output = await runScript(
+ ['--dry-run'],
+@@ -1167,6 +1175,14 @@ describe('back_fill_file_hash script', function () {
+ }
+ expect(output.stats).deep.equal(expectedDryRunStats)
+ })
++ it('should not update mongo', async function () {
++ expect(await projectsCollection.find({}).toArray()).to.deep.equal(
++ projectRecordsBefore
++ )
++ expect(await deletedProjectsCollection.find({}).toArray()).to.deep.equal(
++ deletedProjectRecordsBefore
++ )
++ })
+ })
+
+ describe('full run CONCURRENCY=10', function () {
+
diff --git a/server-ce/hotfix/5.5.3/pr_27273.patch b/server-ce/hotfix/5.5.3/pr_27273.patch
new file mode 100644
index 0000000000..b0c0822fb5
--- /dev/null
+++ b/server-ce/hotfix/5.5.3/pr_27273.patch
@@ -0,0 +1,82 @@
+
+
+diff --git a/services/web/frontend/js/features/review-panel-new/components/review-tooltip-menu.tsx b/services/web/frontend/js/features/review-panel-new/components/review-tooltip-menu.tsx
+index f26542ebe909..fb6b68460bdc 100644
+--- a/services/web/frontend/js/features/review-panel-new/components/review-tooltip-menu.tsx
++++ b/services/web/frontend/js/features/review-panel-new/components/review-tooltip-menu.tsx
+@@ -18,7 +18,6 @@ import {
+ reviewTooltipStateField,
+ } from '@/features/source-editor/extensions/review-tooltip'
+ import { EditorView, getTooltip } from '@codemirror/view'
+-import useViewerPermissions from '@/shared/hooks/use-viewer-permissions'
+ import usePreviousValue from '@/shared/hooks/use-previous-value'
+ import { useLayoutContext } from '@/shared/context/layout-context'
+ import { useReviewPanelViewActionsContext } from '../context/review-panel-view-context'
+@@ -35,6 +34,7 @@ import { useEditorPropertiesContext } from '@/features/ide-react/context/editor-
+ import classNames from 'classnames'
+ import useEventListener from '@/shared/hooks/use-event-listener'
+ import useReviewPanelLayout from '../hooks/use-review-panel-layout'
++import { usePermissionsContext } from '@/features/ide-react/context/permissions-context'
+
+ const EDIT_MODE_SWITCH_WIDGET_HEIGHT = 40
+ const CM_LINE_RIGHT_PADDING = 8
+@@ -43,7 +43,7 @@ const TOOLTIP_SHOW_DELAY = 120
+ const ReviewTooltipMenu: FC = () => {
+ const state = useCodeMirrorStateContext()
+ const view = useCodeMirrorViewContext()
+- const isViewer = useViewerPermissions()
++ const permissions = usePermissionsContext()
+ const [show, setShow] = useState(true)
+ const { setView } = useReviewPanelViewActionsContext()
+ const { openReviewPanel } = useReviewPanelLayout()
+@@ -58,7 +58,7 @@ const ReviewTooltipMenu: FC = () => {
+
+ const addComment = useCallback(() => {
+ const { main } = view.state.selection
+- if (main.empty) {
++ if (main.empty || !permissions.comment) {
+ return
+ }
+
+@@ -74,11 +74,11 @@ const ReviewTooltipMenu: FC = () => {
+
+ view.dispatch({ effects })
+ setShow(false)
+- }, [openReviewPanel, setView, setShow, view])
++ }, [view, permissions.comment, openReviewPanel, setView])
+
+ useEventListener('add-new-review-comment', addComment)
+
+- if (isViewer || !show || !tooltipState) {
++ if (!permissions.comment || !show || !tooltipState) {
+ return null
+ }
+
+diff --git a/services/web/frontend/js/features/source-editor/components/toolbar/toolbar-items.tsx b/services/web/frontend/js/features/source-editor/components/toolbar/toolbar-items.tsx
+index 3404976d4462..1811ccc99950 100644
+--- a/services/web/frontend/js/features/source-editor/components/toolbar/toolbar-items.tsx
++++ b/services/web/frontend/js/features/source-editor/components/toolbar/toolbar-items.tsx
+@@ -16,5 +16,6 @@ import { isSplitTestEnabled } from '@/utils/splitTestUtils'
+ import { isMac } from '@/shared/utils/os'
+ import { useProjectContext } from '@/shared/context/project-context'
++import { usePermissionsContext } from '@/features/ide-react/context/permissions-context'
+
+ export const ToolbarItems: FC<{
+ state: EditorState
+@@ -35,6 +36,7 @@ export const ToolbarItems: FC<{
+ useEditorPropertiesContext()
+ const { writefullInstance } = useEditorContext()
+ const { features } = useProjectContext()
++ const permissions = usePermissionsContext()
+ const isActive = withinFormattingCommand(state)
+
+ const symbolPaletteAvailable = getMeta('ol-symbolPaletteAvailable')
+@@ -131,7 +133,7 @@ export const ToolbarItems: FC<{
+ command={commands.wrapInHref}
+ icon="add_link"
+ />
+- {features.trackChangesVisible && (
++ {features.trackChangesVisible && permissions.comment && (
+