From 473f76746558b89681c2416956016baed5f6f41e Mon Sep 17 00:00:00 2001
From: Domagoj Kriskovic
Date: Mon, 5 May 2025 11:49:00 +0200
Subject: [PATCH 001/194] Update event tracking for AI assist payment flow
(#25222)
GitOrigin-RevId: feb7987b1397d70b3a04c797bd2db92e42c325f5
---
.../preview-subscription-change/root.tsx | 37 +++++++++++--------
1 file changed, 22 insertions(+), 15 deletions(-)
diff --git a/services/web/frontend/js/features/subscription/components/preview-subscription-change/root.tsx b/services/web/frontend/js/features/subscription/components/preview-subscription-change/root.tsx
index 513b1b14ba..cb9565e9e4 100644
--- a/services/web/frontend/js/features/subscription/components/preview-subscription-change/root.tsx
+++ b/services/web/frontend/js/features/subscription/components/preview-subscription-change/root.tsx
@@ -1,4 +1,4 @@
-import { useCallback } from 'react'
+import { useCallback, useEffect } from 'react'
import moment from 'moment'
import { useTranslation, Trans } from 'react-i18next'
import {
@@ -30,28 +30,35 @@ function PreviewSubscriptionChange() {
const payNowTask = useAsync()
const location = useLocation()
- const handlePayNowClick = useCallback(() => {
- let addOnSegmentation: Record | null = null
+ useEffect(() => {
if (preview.change.type === 'add-on-purchase') {
- addOnSegmentation = {
- addOn: preview.change.addOn.code,
+ eventTracking.sendMB('preview-subscription-change-view', {
+ plan: preview.change.addOn.code,
upgradeType: 'add-on',
- }
- if (purchaseReferrer) {
- addOnSegmentation.referrer = purchaseReferrer
- }
- eventTracking.sendMB('subscription-change-form-submit', addOnSegmentation)
+ referrer: purchaseReferrer,
+ })
+ }
+ }, [preview.change, purchaseReferrer])
+
+ const handlePayNowClick = useCallback(() => {
+ if (preview.change.type === 'add-on-purchase') {
+ eventTracking.sendMB('subscription-change-form-submit', {
+ plan: preview.change.addOn.code,
+ upgradeType: 'add-on',
+ referrer: purchaseReferrer,
+ })
}
eventTracking.sendMB('assistant-add-on-purchase')
payNowTask
.runAsync(payNow(preview))
.then(() => {
- if (addOnSegmentation) {
- eventTracking.sendMB(
- 'subscription-change-form-success',
- addOnSegmentation
- )
+ if (preview.change.type === 'add-on-purchase') {
+ eventTracking.sendMB('subscription-change-form-success', {
+ plan: preview.change.addOn.code,
+ upgradeType: 'add-on',
+ referrer: purchaseReferrer,
+ })
}
location.replace('/user/subscription/thank-you')
})
From c8a410d3587f6a28319e2f937a41e099d8b27b25 Mon Sep 17 00:00:00 2001
From: M Fahru
Date: Mon, 5 May 2025 05:39:28 -0700
Subject: [PATCH 002/194] Merge pull request #25155 from
overleaf/mf-use-stripe-v18
[web] Upgrade stripe to v18
GitOrigin-RevId: df522f73132e99e38f1716bf33e8ff4881bd5430
---
package-lock.json | 35 +++++++++++++++++++++--------------
services/web/package.json | 2 +-
2 files changed, 22 insertions(+), 15 deletions(-)
diff --git a/package-lock.json b/package-lock.json
index 48f2da293a..639567368b 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -38259,19 +38259,6 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
- "node_modules/stripe": {
- "version": "17.7.0",
- "resolved": "https://registry.npmjs.org/stripe/-/stripe-17.7.0.tgz",
- "integrity": "sha512-aT2BU9KkizY9SATf14WhhYVv2uOapBWX0OFWF4xvcj1mPaNotlSc2CsxpS4DS46ZueSppmCF5BX1sNYBtwBvfw==",
- "license": "MIT",
- "dependencies": {
- "@types/node": ">=8.1.0",
- "qs": "^6.11.0"
- },
- "engines": {
- "node": ">=12.*"
- }
- },
"node_modules/strnum": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/strnum/-/strnum-1.0.5.tgz",
@@ -45549,7 +45536,7 @@
"request": "^2.88.2",
"requestretry": "^7.1.0",
"sanitize-html": "^2.8.1",
- "stripe": "^17.7.0",
+ "stripe": "^18.1.0",
"tough-cookie": "^4.0.0",
"tsscmp": "^1.0.6",
"uid-safe": "^2.1.5",
@@ -46624,6 +46611,26 @@
"resolved": "https://registry.npmjs.org/stream-transform/-/stream-transform-3.2.1.tgz",
"integrity": "sha512-ApK+WTJ5bCOf0A2tlec1qhvr8bGEBM/sgXXB7mysdCYgZJO5DZeaV3h3G+g0HnAQ372P5IhiGqnW29zoLOfTzQ=="
},
+ "services/web/node_modules/stripe": {
+ "version": "18.1.0",
+ "resolved": "https://registry.npmjs.org/stripe/-/stripe-18.1.0.tgz",
+ "integrity": "sha512-MLDiniPTHqcfIT3anyBPmOEcaiDhYa7/jRaNypQ3Rt2SJnayQZBvVbFghIziUCZdltGAndm/ZxVOSw6uuSCDig==",
+ "license": "MIT",
+ "dependencies": {
+ "qs": "^6.11.0"
+ },
+ "engines": {
+ "node": ">=12.*"
+ },
+ "peerDependencies": {
+ "@types/node": ">=12.x.x"
+ },
+ "peerDependenciesMeta": {
+ "@types/node": {
+ "optional": true
+ }
+ }
+ },
"services/web/node_modules/teeny-request": {
"version": "8.0.2",
"resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-8.0.2.tgz",
diff --git a/services/web/package.json b/services/web/package.json
index 679c226556..f849507460 100644
--- a/services/web/package.json
+++ b/services/web/package.json
@@ -165,7 +165,7 @@
"request": "^2.88.2",
"requestretry": "^7.1.0",
"sanitize-html": "^2.8.1",
- "stripe": "^17.7.0",
+ "stripe": "^18.1.0",
"tough-cookie": "^4.0.0",
"tsscmp": "^1.0.6",
"uid-safe": "^2.1.5",
From a5e2708eaebf0daa7fd4c05e74783058376e1121 Mon Sep 17 00:00:00 2001
From: Jakob Ackermann
Date: Mon, 5 May 2025 15:24:25 +0200
Subject: [PATCH 003/194] [document-updater] add safe rollback point for
history-ot (#25283)
GitOrigin-RevId: d7230dd14a379a27d2c6ab03a006463a18979d06
---
services/document-updater/app.js | 2 +
services/document-updater/app/js/Errors.js | 2 +
.../document-updater/app/js/RedisManager.js | 7 +++
.../acceptance/js/GettingADocumentTests.js | 58 +++++++++++++++++++
4 files changed, 69 insertions(+)
diff --git a/services/document-updater/app.js b/services/document-updater/app.js
index 65c9895377..f425872da5 100644
--- a/services/document-updater/app.js
+++ b/services/document-updater/app.js
@@ -212,6 +212,8 @@ app.use((error, req, res, next) => {
return res.status(422).json(error.info)
} else if (error instanceof Errors.FileTooLargeError) {
return res.sendStatus(413)
+ } else if (error instanceof Errors.ProjectMigratedToHistoryOTError) {
+ return res.status(422).send(error.message)
} else if (error.statusCode === 413) {
return res.status(413).send('request entity too large')
} else {
diff --git a/services/document-updater/app/js/Errors.js b/services/document-updater/app/js/Errors.js
index a43f69ad35..0416581b30 100644
--- a/services/document-updater/app/js/Errors.js
+++ b/services/document-updater/app/js/Errors.js
@@ -5,6 +5,7 @@ class OpRangeNotAvailableError extends OError {}
class ProjectStateChangedError extends OError {}
class DeleteMismatchError extends OError {}
class FileTooLargeError extends OError {}
+class ProjectMigratedToHistoryOTError extends OError {}
module.exports = {
NotFoundError,
@@ -12,4 +13,5 @@ module.exports = {
ProjectStateChangedError,
DeleteMismatchError,
FileTooLargeError,
+ ProjectMigratedToHistoryOTError,
}
diff --git a/services/document-updater/app/js/RedisManager.js b/services/document-updater/app/js/RedisManager.js
index f8e97f38b4..c361f34165 100644
--- a/services/document-updater/app/js/RedisManager.js
+++ b/services/document-updater/app/js/RedisManager.js
@@ -324,6 +324,13 @@ const RedisManager = {
} catch (e) {
return callback(e)
}
+ if (docLines != null && !Array.isArray(docLines)) {
+ return callback(
+ new Errors.ProjectMigratedToHistoryOTError(
+ 'refusing to process doc that was migrated to history-ot'
+ )
+ )
+ }
version = parseInt(version || 0, 10)
// check doc is in requested project
diff --git a/services/document-updater/test/acceptance/js/GettingADocumentTests.js b/services/document-updater/test/acceptance/js/GettingADocumentTests.js
index 65298932d9..6f52f49fb9 100644
--- a/services/document-updater/test/acceptance/js/GettingADocumentTests.js
+++ b/services/document-updater/test/acceptance/js/GettingADocumentTests.js
@@ -13,6 +13,11 @@ const { expect } = require('chai')
const MockWebApi = require('./helpers/MockWebApi')
const DocUpdaterClient = require('./helpers/DocUpdaterClient')
const DocUpdaterApp = require('./helpers/DocUpdaterApp')
+const Settings = require('@overleaf/settings')
+const docUpdaterRedis = require('@overleaf/redis-wrapper').createClient(
+ Settings.redis.documentupdater
+)
+const Keys = Settings.redis.documentupdater.key_schema
describe('Getting a document', function () {
before(function (done) {
@@ -109,6 +114,59 @@ describe('Getting a document', function () {
})
})
+ describe('when the document is migrated (history-ot)', function () {
+ before(function (done) {
+ ;[this.project_id, this.doc_id] = Array.from([
+ DocUpdaterClient.randomId(),
+ DocUpdaterClient.randomId(),
+ ])
+
+ MockWebApi.insertDoc(this.project_id, this.doc_id, {
+ lines: this.lines,
+ version: this.version,
+ })
+ DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, error => {
+ if (error != null) {
+ throw error
+ }
+ sinon.spy(MockWebApi, 'getDocument')
+ docUpdaterRedis.set(
+ Keys.docLines({ doc_id: this.doc_id }),
+ JSON.stringify({ content: this.lines.join('\n') }),
+ err => {
+ if (err) return done(err)
+
+ DocUpdaterClient.getDoc(
+ this.project_id,
+ this.doc_id,
+ (error, res, body) => {
+ if (error) return done(error)
+ this.res = res
+ this.body = body
+ done()
+ }
+ )
+ }
+ )
+ })
+ })
+
+ after(function () {
+ MockWebApi.getDocument.restore()
+ })
+
+ it('should not load the document from the web API', function () {
+ MockWebApi.getDocument.called.should.equal(false)
+ })
+
+ it('should return an error', function () {
+ expect(this.res.statusCode).to.equal(422)
+ expect(this.body).to.equal(
+ 'refusing to process doc that was migrated to history-ot'
+ )
+ })
+ })
+
describe('when the request asks for some recent ops', function () {
before(function (done) {
;[this.project_id, this.doc_id] = Array.from([
From d95340edbce607198cd83143fa5bc6a942d862ff Mon Sep 17 00:00:00 2001
From: Jimmy Domagala-Tang
Date: Mon, 5 May 2025 09:41:59 -0400
Subject: [PATCH 004/194] Merge pull request #25145 from
overleaf/jdt-wf-premium-src
Add premium source to Writefull entitlement sync
GitOrigin-RevId: bbebd7741efdf40a444768255b4aade857aca602
---
services/web/app/src/models/User.js | 1 +
1 file changed, 1 insertion(+)
diff --git a/services/web/app/src/models/User.js b/services/web/app/src/models/User.js
index c63647e914..d228c46b82 100644
--- a/services/web/app/src/models/User.js
+++ b/services/web/app/src/models/User.js
@@ -196,6 +196,7 @@ const UserSchema = new Schema(
enabled: { type: Boolean, default: null },
autoCreatedAccount: { type: Boolean, default: false },
isPremium: { type: Boolean, default: false },
+ premiumSource: { type: String, default: null },
},
aiErrorAssistant: {
enabled: { type: Boolean, default: true },
From 07b225542660b6fb86a93e790e89c7c53fa7cee7 Mon Sep 17 00:00:00 2001
From: Jakob Ackermann
Date: Tue, 6 May 2025 12:01:42 +0200
Subject: [PATCH 005/194] [misc] cleanup .dockerignore and .gitignore files
(#25312)
- Remove settings ignore, they are inconsistent and break local prettier
- Remove .dockerignore files, only root ignore file is used
- Move .idea/.run/*.swp/coverage to root
- Remove .npmrc entries, we are no longer writing the rc file
- Remove node_modules/.DS_Store, is contained in root
- Remove cruft
GitOrigin-RevId: 3025fd5acaef343312f55149466c638e759a6e1f
---
.../access-token-encryptor/.dockerignore | 1 -
libraries/access-token-encryptor/.gitignore | 46 -------------
libraries/fetch-utils/.dockerignore | 1 -
libraries/fetch-utils/.gitignore | 3 -
libraries/logger/.dockerignore | 1 -
libraries/logger/.gitignore | 3 -
libraries/metrics/.dockerignore | 1 -
libraries/metrics/.gitignore | 3 -
libraries/mongo-utils/.dockerignore | 1 -
libraries/mongo-utils/.gitignore | 3 -
libraries/o-error/.dockerignore | 1 -
libraries/o-error/.gitignore | 5 --
libraries/object-persistor/.dockerignore | 1 -
libraries/object-persistor/.gitignore | 4 --
libraries/overleaf-editor-core/.dockerignore | 1 -
libraries/overleaf-editor-core/.gitignore | 5 --
libraries/promise-utils/.dockerignore | 1 -
libraries/promise-utils/.gitignore | 3 -
libraries/ranges-tracker/.dockerignore | 1 -
libraries/ranges-tracker/.gitignore | 13 ----
libraries/redis-wrapper/.dockerignore | 1 -
libraries/redis-wrapper/.gitignore | 13 ----
libraries/settings/.dockerignore | 1 -
libraries/settings/.gitignore | 5 --
libraries/stream-utils/.dockerignore | 1 -
libraries/stream-utils/.gitignore | 3 -
services/chat/.gitignore | 12 ----
services/clsi/.gitignore | 11 ---
services/contacts/.gitignore | 5 --
services/docstore/.gitignore | 8 ---
services/document-updater/.gitignore | 52 --------------
services/filestore/.gitignore | 51 --------------
services/git-bridge/.gitignore | 49 +------------
services/history-v1/.gitignore | 3 -
services/notifications/.gitignore | 54 ---------------
services/project-history/.gitignore | 8 ---
services/real-time/.gitignore | 5 --
services/web/.gitignore | 68 -------------------
38 files changed, 1 insertion(+), 447 deletions(-)
delete mode 100644 libraries/access-token-encryptor/.dockerignore
delete mode 100644 libraries/access-token-encryptor/.gitignore
delete mode 100644 libraries/fetch-utils/.dockerignore
delete mode 100644 libraries/fetch-utils/.gitignore
delete mode 100644 libraries/logger/.dockerignore
delete mode 100644 libraries/logger/.gitignore
delete mode 100644 libraries/metrics/.dockerignore
delete mode 100644 libraries/metrics/.gitignore
delete mode 100644 libraries/mongo-utils/.dockerignore
delete mode 100644 libraries/mongo-utils/.gitignore
delete mode 100644 libraries/o-error/.dockerignore
delete mode 100644 libraries/o-error/.gitignore
delete mode 100644 libraries/object-persistor/.dockerignore
delete mode 100644 libraries/object-persistor/.gitignore
delete mode 100644 libraries/overleaf-editor-core/.dockerignore
delete mode 100644 libraries/overleaf-editor-core/.gitignore
delete mode 100644 libraries/promise-utils/.dockerignore
delete mode 100644 libraries/promise-utils/.gitignore
delete mode 100644 libraries/ranges-tracker/.dockerignore
delete mode 100644 libraries/ranges-tracker/.gitignore
delete mode 100644 libraries/redis-wrapper/.dockerignore
delete mode 100644 libraries/redis-wrapper/.gitignore
delete mode 100644 libraries/settings/.dockerignore
delete mode 100644 libraries/settings/.gitignore
delete mode 100644 libraries/stream-utils/.dockerignore
delete mode 100644 libraries/stream-utils/.gitignore
delete mode 100644 services/chat/.gitignore
delete mode 100644 services/contacts/.gitignore
delete mode 100644 services/docstore/.gitignore
delete mode 100644 services/document-updater/.gitignore
delete mode 100644 services/history-v1/.gitignore
delete mode 100644 services/notifications/.gitignore
delete mode 100644 services/project-history/.gitignore
delete mode 100644 services/real-time/.gitignore
diff --git a/libraries/access-token-encryptor/.dockerignore b/libraries/access-token-encryptor/.dockerignore
deleted file mode 100644
index c2658d7d1b..0000000000
--- a/libraries/access-token-encryptor/.dockerignore
+++ /dev/null
@@ -1 +0,0 @@
-node_modules/
diff --git a/libraries/access-token-encryptor/.gitignore b/libraries/access-token-encryptor/.gitignore
deleted file mode 100644
index 66936c4121..0000000000
--- a/libraries/access-token-encryptor/.gitignore
+++ /dev/null
@@ -1,46 +0,0 @@
-compileFolder
-
-Compiled source #
-###################
-*.com
-*.class
-*.dll
-*.exe
-*.o
-*.so
-
-# Packages #
-############
-# it's better to unpack these files and commit the raw source
-# git has its own built in compression methods
-*.7z
-*.dmg
-*.gz
-*.iso
-*.jar
-*.rar
-*.tar
-*.zip
-
-# Logs and databases #
-######################
-*.log
-*.sql
-*.sqlite
-
-# OS generated files #
-######################
-.DS_Store?
-ehthumbs.db
-Icon?
-Thumbs.db
-
-/node_modules/*
-data/*/*
-
-**.swp
-
-/log.json
-hash_folder
-
-.npmrc
diff --git a/libraries/fetch-utils/.dockerignore b/libraries/fetch-utils/.dockerignore
deleted file mode 100644
index c2658d7d1b..0000000000
--- a/libraries/fetch-utils/.dockerignore
+++ /dev/null
@@ -1 +0,0 @@
-node_modules/
diff --git a/libraries/fetch-utils/.gitignore b/libraries/fetch-utils/.gitignore
deleted file mode 100644
index edb0f85350..0000000000
--- a/libraries/fetch-utils/.gitignore
+++ /dev/null
@@ -1,3 +0,0 @@
-
-# managed by monorepo$ bin/update_build_scripts
-.npmrc
diff --git a/libraries/logger/.dockerignore b/libraries/logger/.dockerignore
deleted file mode 100644
index c2658d7d1b..0000000000
--- a/libraries/logger/.dockerignore
+++ /dev/null
@@ -1 +0,0 @@
-node_modules/
diff --git a/libraries/logger/.gitignore b/libraries/logger/.gitignore
deleted file mode 100644
index 2006c875a4..0000000000
--- a/libraries/logger/.gitignore
+++ /dev/null
@@ -1,3 +0,0 @@
-node_modules
-
-.npmrc
diff --git a/libraries/metrics/.dockerignore b/libraries/metrics/.dockerignore
deleted file mode 100644
index c2658d7d1b..0000000000
--- a/libraries/metrics/.dockerignore
+++ /dev/null
@@ -1 +0,0 @@
-node_modules/
diff --git a/libraries/metrics/.gitignore b/libraries/metrics/.gitignore
deleted file mode 100644
index 2006c875a4..0000000000
--- a/libraries/metrics/.gitignore
+++ /dev/null
@@ -1,3 +0,0 @@
-node_modules
-
-.npmrc
diff --git a/libraries/mongo-utils/.dockerignore b/libraries/mongo-utils/.dockerignore
deleted file mode 100644
index c2658d7d1b..0000000000
--- a/libraries/mongo-utils/.dockerignore
+++ /dev/null
@@ -1 +0,0 @@
-node_modules/
diff --git a/libraries/mongo-utils/.gitignore b/libraries/mongo-utils/.gitignore
deleted file mode 100644
index edb0f85350..0000000000
--- a/libraries/mongo-utils/.gitignore
+++ /dev/null
@@ -1,3 +0,0 @@
-
-# managed by monorepo$ bin/update_build_scripts
-.npmrc
diff --git a/libraries/o-error/.dockerignore b/libraries/o-error/.dockerignore
deleted file mode 100644
index c2658d7d1b..0000000000
--- a/libraries/o-error/.dockerignore
+++ /dev/null
@@ -1 +0,0 @@
-node_modules/
diff --git a/libraries/o-error/.gitignore b/libraries/o-error/.gitignore
deleted file mode 100644
index cf2f0ad3fb..0000000000
--- a/libraries/o-error/.gitignore
+++ /dev/null
@@ -1,5 +0,0 @@
-.nyc_output
-coverage
-node_modules/
-
-.npmrc
diff --git a/libraries/object-persistor/.dockerignore b/libraries/object-persistor/.dockerignore
deleted file mode 100644
index c2658d7d1b..0000000000
--- a/libraries/object-persistor/.dockerignore
+++ /dev/null
@@ -1 +0,0 @@
-node_modules/
diff --git a/libraries/object-persistor/.gitignore b/libraries/object-persistor/.gitignore
deleted file mode 100644
index 6a20893208..0000000000
--- a/libraries/object-persistor/.gitignore
+++ /dev/null
@@ -1,4 +0,0 @@
-/node_modules
-*.swp
-
-.npmrc
diff --git a/libraries/overleaf-editor-core/.dockerignore b/libraries/overleaf-editor-core/.dockerignore
deleted file mode 100644
index c2658d7d1b..0000000000
--- a/libraries/overleaf-editor-core/.dockerignore
+++ /dev/null
@@ -1 +0,0 @@
-node_modules/
diff --git a/libraries/overleaf-editor-core/.gitignore b/libraries/overleaf-editor-core/.gitignore
deleted file mode 100644
index 869500a2c7..0000000000
--- a/libraries/overleaf-editor-core/.gitignore
+++ /dev/null
@@ -1,5 +0,0 @@
-/coverage
-/node_modules
-
-# managed by monorepo$ bin/update_build_scripts
-.npmrc
diff --git a/libraries/promise-utils/.dockerignore b/libraries/promise-utils/.dockerignore
deleted file mode 100644
index c2658d7d1b..0000000000
--- a/libraries/promise-utils/.dockerignore
+++ /dev/null
@@ -1 +0,0 @@
-node_modules/
diff --git a/libraries/promise-utils/.gitignore b/libraries/promise-utils/.gitignore
deleted file mode 100644
index edb0f85350..0000000000
--- a/libraries/promise-utils/.gitignore
+++ /dev/null
@@ -1,3 +0,0 @@
-
-# managed by monorepo$ bin/update_build_scripts
-.npmrc
diff --git a/libraries/ranges-tracker/.dockerignore b/libraries/ranges-tracker/.dockerignore
deleted file mode 100644
index c2658d7d1b..0000000000
--- a/libraries/ranges-tracker/.dockerignore
+++ /dev/null
@@ -1 +0,0 @@
-node_modules/
diff --git a/libraries/ranges-tracker/.gitignore b/libraries/ranges-tracker/.gitignore
deleted file mode 100644
index eac200248b..0000000000
--- a/libraries/ranges-tracker/.gitignore
+++ /dev/null
@@ -1,13 +0,0 @@
-**.swp
-
-app.js
-app/js/
-test/unit/js/
-public/build/
-
-node_modules/
-
-/public/js/chat.js
-plato/
-
-.npmrc
diff --git a/libraries/redis-wrapper/.dockerignore b/libraries/redis-wrapper/.dockerignore
deleted file mode 100644
index c2658d7d1b..0000000000
--- a/libraries/redis-wrapper/.dockerignore
+++ /dev/null
@@ -1 +0,0 @@
-node_modules/
diff --git a/libraries/redis-wrapper/.gitignore b/libraries/redis-wrapper/.gitignore
deleted file mode 100644
index eac200248b..0000000000
--- a/libraries/redis-wrapper/.gitignore
+++ /dev/null
@@ -1,13 +0,0 @@
-**.swp
-
-app.js
-app/js/
-test/unit/js/
-public/build/
-
-node_modules/
-
-/public/js/chat.js
-plato/
-
-.npmrc
diff --git a/libraries/settings/.dockerignore b/libraries/settings/.dockerignore
deleted file mode 100644
index c2658d7d1b..0000000000
--- a/libraries/settings/.dockerignore
+++ /dev/null
@@ -1 +0,0 @@
-node_modules/
diff --git a/libraries/settings/.gitignore b/libraries/settings/.gitignore
deleted file mode 100644
index 06d8e1ddb2..0000000000
--- a/libraries/settings/.gitignore
+++ /dev/null
@@ -1,5 +0,0 @@
-/.npmrc
-/node_modules
-
-# managed by monorepo$ bin/update_build_scripts
-.npmrc
diff --git a/libraries/stream-utils/.dockerignore b/libraries/stream-utils/.dockerignore
deleted file mode 100644
index c2658d7d1b..0000000000
--- a/libraries/stream-utils/.dockerignore
+++ /dev/null
@@ -1 +0,0 @@
-node_modules/
diff --git a/libraries/stream-utils/.gitignore b/libraries/stream-utils/.gitignore
deleted file mode 100644
index edb0f85350..0000000000
--- a/libraries/stream-utils/.gitignore
+++ /dev/null
@@ -1,3 +0,0 @@
-
-# managed by monorepo$ bin/update_build_scripts
-.npmrc
diff --git a/services/chat/.gitignore b/services/chat/.gitignore
deleted file mode 100644
index f0cf94b147..0000000000
--- a/services/chat/.gitignore
+++ /dev/null
@@ -1,12 +0,0 @@
-**.swp
-
-public/build/
-
-node_modules/
-
-plato/
-
-**/*.map
-
-# managed by dev-environment$ bin/update_build_scripts
-.npmrc
diff --git a/services/clsi/.gitignore b/services/clsi/.gitignore
index 360466227e..a85e6b757a 100644
--- a/services/clsi/.gitignore
+++ b/services/clsi/.gitignore
@@ -1,14 +1,3 @@
-**.swp
-node_modules
-test/acceptance/fixtures/tmp
compiles
output
-.DS_Store
-*~
cache
-.vagrant
-config/*
-npm-debug.log
-
-# managed by dev-environment$ bin/update_build_scripts
-.npmrc
diff --git a/services/contacts/.gitignore b/services/contacts/.gitignore
deleted file mode 100644
index 80bac793a7..0000000000
--- a/services/contacts/.gitignore
+++ /dev/null
@@ -1,5 +0,0 @@
-node_modules
-forever
-
-# managed by dev-environment$ bin/update_build_scripts
-.npmrc
diff --git a/services/docstore/.gitignore b/services/docstore/.gitignore
deleted file mode 100644
index 84bf300f7f..0000000000
--- a/services/docstore/.gitignore
+++ /dev/null
@@ -1,8 +0,0 @@
-node_modules
-forever
-
-# managed by dev-environment$ bin/update_build_scripts
-.npmrc
-
-# Jetbrains IDEs
-.idea
diff --git a/services/document-updater/.gitignore b/services/document-updater/.gitignore
deleted file mode 100644
index 624e78f096..0000000000
--- a/services/document-updater/.gitignore
+++ /dev/null
@@ -1,52 +0,0 @@
-compileFolder
-
-Compiled source #
-###################
-*.com
-*.class
-*.dll
-*.exe
-*.o
-*.so
-
-# Packages #
-############
-# it's better to unpack these files and commit the raw source
-# git has its own built in compression methods
-*.7z
-*.dmg
-*.gz
-*.iso
-*.jar
-*.rar
-*.tar
-*.zip
-
-# Logs and databases #
-######################
-*.log
-*.sql
-*.sqlite
-
-# OS generated files #
-######################
-.DS_Store?
-ehthumbs.db
-Icon?
-Thumbs.db
-
-/node_modules/*
-
-
-
-forever/
-
-**.swp
-
-# Redis cluster
-**/appendonly.aof
-**/dump.rdb
-**/nodes.conf
-
-# managed by dev-environment$ bin/update_build_scripts
-.npmrc
diff --git a/services/filestore/.gitignore b/services/filestore/.gitignore
index a2f4b5afb2..1772191882 100644
--- a/services/filestore/.gitignore
+++ b/services/filestore/.gitignore
@@ -1,54 +1,3 @@
-compileFolder
-
-Compiled source #
-###################
-*.com
-*.class
-*.dll
-*.exe
-*.o
-*.so
-
-# Packages #
-############
-# it's better to unpack these files and commit the raw source
-# git has its own built in compression methods
-*.7z
-*.dmg
-*.gz
-*.iso
-*.jar
-*.rar
-*.tar
-*.zip
-
-# Logs and databases #
-######################
-*.log
-*.sql
-*.sqlite
-
-# OS generated files #
-######################
-.DS_Store?
-ehthumbs.db
-Icon?
-Thumbs.db
-
-/node_modules/*
-data/*/*
-
-**/*.map
-cookies.txt
uploads/*
-
user_files/*
template_files/*
-
-**.swp
-
-/log.json
-hash_folder
-
-# managed by dev-environment$ bin/update_build_scripts
-.npmrc
diff --git a/services/git-bridge/.gitignore b/services/git-bridge/.gitignore
index 74a7f43d6e..f35e2ee038 100644
--- a/services/git-bridge/.gitignore
+++ b/services/git-bridge/.gitignore
@@ -1,53 +1,6 @@
-# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and Webstorm
-# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
-
-# Let's not share anything because we're using Maven.
-
-.idea
-*.iml
-
-# User-specific stuff:
-.idea/workspace.xml
-.idea/tasks.xml
-.idea/dictionaries
-.idea/vcs.xml
-.idea/jsLibraryMappings.xml
-
-# Sensitive or high-churn files:
-.idea/dataSources.ids
-.idea/dataSources.xml
-.idea/dataSources.local.xml
-.idea/sqlDataSources.xml
-.idea/dynamic.xml
-.idea/uiDesigner.xml
-
-# Gradle:
-.idea/gradle.xml
-.idea/libraries
-
-# Mongo Explorer plugin:
-.idea/mongoSettings.xml
-
-## File-based project format:
-*.iws
-
-## Plugin-specific files:
-
-# IntelliJ
+# Build output
/out/
target/
-# mpeltonen/sbt-idea plugin
-.idea_modules/
-
-# JIRA plugin
-atlassian-ide-plugin.xml
-
-# Crashlytics plugin (for Android Studio and IntelliJ)
-com_crashlytics_export_strings.xml
-crashlytics.properties
-crashlytics-build.properties
-fabric.properties
-
# Local configuration files
conf/runtime.json
diff --git a/services/history-v1/.gitignore b/services/history-v1/.gitignore
deleted file mode 100644
index edb0f85350..0000000000
--- a/services/history-v1/.gitignore
+++ /dev/null
@@ -1,3 +0,0 @@
-
-# managed by monorepo$ bin/update_build_scripts
-.npmrc
diff --git a/services/notifications/.gitignore b/services/notifications/.gitignore
deleted file mode 100644
index 8a030e9aff..0000000000
--- a/services/notifications/.gitignore
+++ /dev/null
@@ -1,54 +0,0 @@
-Compiled source #
-###################
-*.com
-*.class
-*.dll
-*.exe
-*.o
-*.so
-
-# Packages #
-############
-# it's better to unpack these files and commit the raw source
-# git has its own built in compression methods
-*.7z
-*.dmg
-*.gz
-*.iso
-*.jar
-*.rar
-*.tar
-*.zip
-
-# Logs and databases #
-######################
-*.log
-*.sql
-*.sqlite
-
-# OS generated files #
-######################
-.DS_Store?
-ehthumbs.db
-Icon?
-Thumbs.db
-
-node_modules/*
-data/*
-
-cookies.txt
-UserAndProjectPopulator.coffee
-
-public/stylesheets/style.css
-
-Gemfile.lock
-
-*.swp
-.DS_Store
-
-app/views/external
-
-/modules/
-
-# managed by dev-environment$ bin/update_build_scripts
-.npmrc
diff --git a/services/project-history/.gitignore b/services/project-history/.gitignore
deleted file mode 100644
index 25328fed2e..0000000000
--- a/services/project-history/.gitignore
+++ /dev/null
@@ -1,8 +0,0 @@
-**.swp
-node_modules/
-forever/
-.config
-.npm
-
-# managed by dev-environment$ bin/update_build_scripts
-.npmrc
diff --git a/services/real-time/.gitignore b/services/real-time/.gitignore
deleted file mode 100644
index 80bac793a7..0000000000
--- a/services/real-time/.gitignore
+++ /dev/null
@@ -1,5 +0,0 @@
-node_modules
-forever
-
-# managed by dev-environment$ bin/update_build_scripts
-.npmrc
diff --git a/services/web/.gitignore b/services/web/.gitignore
index 8bd23b7f0a..9946f23ae6 100644
--- a/services/web/.gitignore
+++ b/services/web/.gitignore
@@ -1,51 +1,6 @@
-# Compiled source #
-###################
-*.com
-*.class
-*.dll
-*.exe
-*.o
-*.so
-
-# Packages #
-############
-# it's better to unpack these files and commit the raw source
-# git has its own built in compression methods
-*.7z
-*.dmg
-*.gz
-*.iso
-*.jar
-*.rar
-*.tar
-*.zip
-
-# Logs and databases #
-######################
-*.log
-*.sql
-*.sqlite
-
-# OS generated files #
-######################
-.DS_Store?
-ehthumbs.db
-Icon?
-Thumbs.db
-
-# allow "icons"
-![Ii]cons
-
-node_modules/*
data/*
coverage
-cookies.txt
-requestQueueWorker.js
-TpdsWorker.js
-BackgroundJobsWorker.js
-UserAndProjectPopulator.coffee
-
public/manifest.json
public/js
@@ -54,22 +9,6 @@ public/stylesheets
public/fonts
public/images
-Gemfile.lock
-
-*.swp
-.DS_Store
-
-docker-shared.yml
-
-config/*.coffee
-!config/settings.defaults.coffee
-!config/settings.webpack.coffee
-config/*.js
-!config/settings.defaults.js
-!config/settings.webpack.js
-!config/settings.overrides.saas.js
-!config/settings.overrides.server-pro.js
-
modules/**/Makefile
# Precompiled pug files
@@ -78,13 +17,6 @@ modules/**/Makefile
# Sentry secrets file (injected by CI)
.sentryclirc
-# via dev-environment
-.npmrc
-
-# Intellij
-.idea
-.run
-
# Cypress
cypress/screenshots/
cypress/videos/
From 1cd8eba098aff6b2838d40dd4fb99dcf8a60bcc2 Mon Sep 17 00:00:00 2001
From: David <33458145+davidmcpowell@users.noreply.github.com>
Date: Tue, 6 May 2025 11:51:36 +0100
Subject: [PATCH 006/194] Merge pull request #25249 from
overleaf/dp-chat-message-read
Mark messages as read when opening chat tab
GitOrigin-RevId: d0e3290cad72716cbbdf5b6cc92f6c1d387a92c7
---
.../frontend/js/features/chat/context/chat-context.tsx | 9 ++++++++-
.../js/features/ide-redesign/components/rail.tsx | 9 ++++++++-
2 files changed, 16 insertions(+), 2 deletions(-)
diff --git a/services/web/frontend/js/features/chat/context/chat-context.tsx b/services/web/frontend/js/features/chat/context/chat-context.tsx
index d86171a451..9feca60579 100644
--- a/services/web/frontend/js/features/chat/context/chat-context.tsx
+++ b/services/web/frontend/js/features/chat/context/chat-context.tsx
@@ -20,6 +20,8 @@ import { useIdeContext } from '@/shared/context/ide-context'
import getMeta from '@/utils/meta'
import { debugConsole } from '@/utils/debugging'
import { User } from '../../../../../types/user'
+import { useRailContext } from '@/features/ide-redesign/contexts/rail-context'
+import { useIsNewEditorEnabled } from '@/features/ide-redesign/utils/new-editor-utils'
const PAGE_SIZE = 50
@@ -200,7 +202,12 @@ export const ChatProvider: FC = ({ children }) => {
const user = useUserContext()
const { _id: projectId } = useProjectContext()
- const { chatIsOpen } = useLayoutContext()
+ const { chatIsOpen: chatIsOpenOldEditor } = useLayoutContext()
+ const { selectedTab: selectedRailTab, isOpen: railIsOpen } = useRailContext()
+ const newEditor = useIsNewEditorEnabled()
+ const chatIsOpen = newEditor
+ ? selectedRailTab === 'chat' && railIsOpen
+ : chatIsOpenOldEditor
const {
hasFocus: windowHasFocus,
diff --git a/services/web/frontend/js/features/ide-redesign/components/rail.tsx b/services/web/frontend/js/features/ide-redesign/components/rail.tsx
index 34cb7df3f4..c3e14edfcb 100644
--- a/services/web/frontend/js/features/ide-redesign/components/rail.tsx
+++ b/services/web/frontend/js/features/ide-redesign/components/rail.tsx
@@ -32,6 +32,7 @@ import { HistorySidebar } from '@/features/ide-react/components/history-sidebar'
import DictionarySettingsModal from './settings/editor-settings/dictionary-settings-modal'
import OLTooltip from '@/features/ui/components/ol/ol-tooltip'
import OLIconButton from '@/features/ui/components/ol/ol-icon-button'
+import { useChatContext } from '@/features/chat/context/chat-context'
type RailElement = {
icon: AvailableUnfilledIcon
@@ -91,6 +92,8 @@ export const RailLayout = () => {
const { view, setLeftMenuShown } = useLayoutContext()
+ const { markMessagesAsRead } = useChatContext()
+
const isHistoryView = view === 'history'
const railTabs: RailElement[] = useMemo(
@@ -163,9 +166,13 @@ export const RailLayout = () => {
}
// Change the selected tab and make sure it's open
openTab((key ?? 'file-tree') as RailTabKey)
+
+ if (key === 'chat') {
+ markMessagesAsRead()
+ }
}
},
- [openTab, togglePane, selectedTab, railTabs]
+ [openTab, togglePane, selectedTab, railTabs, markMessagesAsRead]
)
const isReviewPanelOpen = selectedTab === 'review-panel'
From 08c5b11689ff81042930c6779a27ce0ebd55a26a Mon Sep 17 00:00:00 2001
From: David <33458145+davidmcpowell@users.noreply.github.com>
Date: Tue, 6 May 2025 11:51:45 +0100
Subject: [PATCH 007/194] Merge pull request #25248 from
overleaf/dp-history-text-color
Add explicit color for history text
GitOrigin-RevId: 4b595ea824d75181c041d44fc48ea81fec864316
---
.../stylesheets/bootstrap-5/pages/editor/history.scss | 4 ++++
1 file changed, 4 insertions(+)
diff --git a/services/web/frontend/stylesheets/bootstrap-5/pages/editor/history.scss b/services/web/frontend/stylesheets/bootstrap-5/pages/editor/history.scss
index b8e206b6ae..1caeb22c1d 100644
--- a/services/web/frontend/stylesheets/bootstrap-5/pages/editor/history.scss
+++ b/services/web/frontend/stylesheets/bootstrap-5/pages/editor/history.scss
@@ -154,6 +154,10 @@ history-root {
}
}
+ .history-version-main-details {
+ color: var(--content-primary);
+ }
+
.version-element-within-selected {
background-color: var(--bg-light-secondary);
border-left: var(--spacing-02) solid var(--green-50);
From 0261d701a72d1a2b404f5e52e87890c4de7c411b Mon Sep 17 00:00:00 2001
From: David <33458145+davidmcpowell@users.noreply.github.com>
Date: Tue, 6 May 2025 11:51:49 +0100
Subject: [PATCH 008/194] Merge pull request #25238 from overleaf/dp-tooltips
Add tooltip to new editor home button
GitOrigin-RevId: 91f47659caf64a7ee31ed156d4ee2d5c933e05b8
---
.../components/toolbar/toolbar.tsx | 19 +++++++++++++------
1 file changed, 13 insertions(+), 6 deletions(-)
diff --git a/services/web/frontend/js/features/ide-redesign/components/toolbar/toolbar.tsx b/services/web/frontend/js/features/ide-redesign/components/toolbar/toolbar.tsx
index ed1b2509ff..56c597451e 100644
--- a/services/web/frontend/js/features/ide-redesign/components/toolbar/toolbar.tsx
+++ b/services/web/frontend/js/features/ide-redesign/components/toolbar/toolbar.tsx
@@ -11,6 +11,7 @@ import { useLayoutContext } from '@/shared/context/layout-context'
import BackToEditorButton from '@/features/editor-navigation-toolbar/components/back-to-editor-button'
import { useCallback } from 'react'
import * as eventTracking from '../../../../infrastructure/event-tracking'
+import OLTooltip from '@/features/ui/components/ol/ol-tooltip'
export const Toolbar = () => {
const { view, setView } = useLayoutContext()
@@ -45,12 +46,18 @@ const ToolbarMenus = () => {
const { t } = useTranslation()
return (
)
From 42eb4b277989cba3469fc3097648ae8737157aa4 Mon Sep 17 00:00:00 2001
From: David <33458145+davidmcpowell@users.noreply.github.com>
Date: Tue, 6 May 2025 11:51:56 +0100
Subject: [PATCH 009/194] Merge pull request #25320 from
overleaf/dp-review-panel-shortcut
Fix open review panel shortcut in new editor
GitOrigin-RevId: 3e4b65ad1f1943574ba937460722912ff382bc39
---
.../features/ide-redesign/contexts/rail-context.tsx | 12 ++++++++++++
1 file changed, 12 insertions(+)
diff --git a/services/web/frontend/js/features/ide-redesign/contexts/rail-context.tsx b/services/web/frontend/js/features/ide-redesign/contexts/rail-context.tsx
index 51c797fa1d..c02d17fb9b 100644
--- a/services/web/frontend/js/features/ide-redesign/contexts/rail-context.tsx
+++ b/services/web/frontend/js/features/ide-redesign/contexts/rail-context.tsx
@@ -1,4 +1,5 @@
import useCollapsiblePanel from '@/features/ide-react/hooks/use-collapsible-panel'
+import useEventListener from '@/shared/hooks/use-event-listener'
import {
createContext,
Dispatch,
@@ -77,6 +78,17 @@ export const RailProvider: FC = ({ children }) => {
[setIsOpen, setSelectedTab]
)
+ useEventListener(
+ 'ui.toggle-review-panel',
+ useCallback(() => {
+ if (isOpen && selectedTab === 'review-panel') {
+ handlePaneCollapse()
+ } else {
+ openTab('review-panel')
+ }
+ }, [handlePaneCollapse, selectedTab, isOpen, openTab])
+ )
+
const value = useMemo(
() => ({
selectedTab,
From aa97dbdbb6ee92f2a828172b0341398797f5b511 Mon Sep 17 00:00:00 2001
From: Tim Down <158919+timdown@users.noreply.github.com>
Date: Tue, 6 May 2025 11:53:20 +0100
Subject: [PATCH 010/194] Merge pull request #25269 from
overleaf/td-flaky-tags-test
Add waits for flaky tag list test
GitOrigin-RevId: 9d0bf2acd54d07e96fe6837296176e62bf981947
---
.../components/sidebar/tags-list.test.tsx | 12 ++++++------
1 file changed, 6 insertions(+), 6 deletions(-)
diff --git a/services/web/test/frontend/features/project-list/components/sidebar/tags-list.test.tsx b/services/web/test/frontend/features/project-list/components/sidebar/tags-list.test.tsx
index b8e5768c99..7d68a8f7fe 100644
--- a/services/web/test/frontend/features/project-list/components/sidebar/tags-list.test.tsx
+++ b/services/web/test/frontend/features/project-list/components/sidebar/tags-list.test.tsx
@@ -41,20 +41,20 @@ describe('', function () {
fetchMock.removeRoutes().clearHistory()
})
- it('displays the tags list', function () {
- const header = screen.getByTestId('organize-projects')
+ it('displays the tags list', async function () {
+ const header = await screen.findByTestId('organize-projects')
expect(header.textContent).to.equal('Organize Tags')
- screen.getByRole('button', {
+ await screen.findByRole('button', {
name: 'New Tag',
})
- screen.getByRole('button', {
+ await screen.findByRole('button', {
name: 'Tag 1 (1)',
})
- screen.getByRole('button', {
+ await screen.findByRole('button', {
name: 'Another tag (2)',
})
- screen.getByRole('button', {
+ await screen.findByRole('button', {
name: 'Uncategorized (3)',
})
})
From 5ce1685b5b5a58a46c7a1ae26ed00e0b9f5dc9fb Mon Sep 17 00:00:00 2001
From: Jakob Ackermann
Date: Tue, 6 May 2025 13:32:00 +0200
Subject: [PATCH 011/194] [clsi-cache] shard each zone into three instances
(#25301)
* [clsi-cache] shard per zone into three instances
Keep the old instance as read fallback. We can remove it in 4 days.
Disk size: 2Ti gives us the maximum write throughput of 240MiB/s on a
N2D instance with fewer than 8 vCPUs.
* [clsi] fix format
* [k8s] clsi-cache: bring back storage-classes
* [k8s] clsi-cache: fix reference to zonal storage-classes
* [k8s] clsi-cache: add logging configs
* [clsi] improve sharding
Co-authored-by: Brian Gough
* [clsi] fix sharding
Index needs to be positive.
* [clsi] fix sharding
The random part is static per machine/process.
* [clsi] restrict clsi-cache to user projects
Co-authored-by: Brian Gough
* [k8s] clsi-cache: align CLSI_CACHE_NGINX_HOST with service LB
---------
Co-authored-by: Brian Gough
GitOrigin-RevId: 1efb1b3245c8194c305420b25e774ea735251fb3
---
services/clsi/app/js/CLSICacheHandler.js | 28 ++++++++++++++++---
services/clsi/app/js/CompileController.js | 4 ++-
services/clsi/config/settings.defaults.js | 12 ++++++--
.../test/unit/js/CompileControllerTests.js | 8 ++++++
.../Features/Compile/ClsiCacheController.js | 7 +++--
.../src/Features/Compile/ClsiCacheHandler.js | 21 +++++++-------
.../src/Features/Compile/ClsiCacheManager.js | 16 ++++++-----
.../app/src/Features/Compile/ClsiManager.js | 2 ++
.../src/Features/Compile/CompileController.js | 4 ++-
.../src/Features/Compile/CompileManager.js | 3 ++
.../support/shared/commands/compile.ts | 5 ++--
.../js/features/pdf-preview/util/file-list.ts | 3 +-
.../features/pdf-preview/util/output-files.js | 1 +
.../pdf-preview/util/pdf-caching-transport.js | 6 ++--
services/web/types/compile.ts | 1 +
15 files changed, 87 insertions(+), 34 deletions(-)
diff --git a/services/clsi/app/js/CLSICacheHandler.js b/services/clsi/app/js/CLSICacheHandler.js
index de6f512987..b9415ae3ec 100644
--- a/services/clsi/app/js/CLSICacheHandler.js
+++ b/services/clsi/app/js/CLSICacheHandler.js
@@ -20,6 +20,19 @@ const TIMING_BUCKETS = [
0, 10, 100, 1000, 2000, 5000, 10000, 15000, 20000, 30000,
]
const MAX_ENTRIES_IN_OUTPUT_TAR = 100
+const OBJECT_ID_REGEX = /^[0-9a-f]{24}$/
+
+/**
+ * @param {string} projectId
+ * @return {{shard: string, url: string}}
+ */
+function getShard(projectId) {
+ // [timestamp 4bytes][random per machine 5bytes][counter 3bytes]
+ // [32bit 4bytes]
+ const last4Bytes = Buffer.from(projectId, 'hex').subarray(8, 12)
+ const idx = last4Bytes.readUInt32BE() % Settings.apis.clsiCache.shards.length
+ return Settings.apis.clsiCache.shards[idx]
+}
/**
* @param {string} projectId
@@ -29,6 +42,7 @@ const MAX_ENTRIES_IN_OUTPUT_TAR = 100
* @param {[{path: string}]} outputFiles
* @param {string} compileGroup
* @param {Record} options
+ * @return {string | undefined}
*/
function notifyCLSICacheAboutBuild({
projectId,
@@ -39,14 +53,16 @@ function notifyCLSICacheAboutBuild({
compileGroup,
options,
}) {
- if (!Settings.apis.clsiCache.enabled) return
+ if (!Settings.apis.clsiCache.enabled) return undefined
+ if (!OBJECT_ID_REGEX.test(projectId)) return undefined
+ const { url, shard } = getShard(projectId)
/**
* @param {[{path: string}]} files
*/
const enqueue = files => {
Metrics.count('clsi_cache_enqueue_files', files.length)
- fetchNothing(`${Settings.apis.clsiCache.url}/enqueue`, {
+ fetchNothing(`${url}/enqueue`, {
method: 'POST',
json: {
projectId,
@@ -97,6 +113,8 @@ function notifyCLSICacheAboutBuild({
'build output.tar.gz for clsi cache failed'
)
})
+
+ return shard
}
/**
@@ -155,6 +173,7 @@ async function downloadOutputDotSynctexFromCompileCache(
outputDir
) {
if (!Settings.apis.clsiCache.enabled) return false
+ if (!OBJECT_ID_REGEX.test(projectId)) return false
const timer = new Metrics.Timer(
'clsi_cache_download',
@@ -165,7 +184,7 @@ async function downloadOutputDotSynctexFromCompileCache(
let stream
try {
stream = await fetchStream(
- `${Settings.apis.clsiCache.url}/project/${projectId}/${
+ `${getShard(projectId).url}/project/${projectId}/${
userId ? `user/${userId}/` : ''
}build/${editorId}-${buildId}/search/output/output.synctex.gz`,
{
@@ -205,8 +224,9 @@ async function downloadOutputDotSynctexFromCompileCache(
*/
async function downloadLatestCompileCache(projectId, userId, compileDir) {
if (!Settings.apis.clsiCache.enabled) return false
+ if (!OBJECT_ID_REGEX.test(projectId)) return false
- const url = `${Settings.apis.clsiCache.url}/project/${projectId}/${
+ const url = `${getShard(projectId).url}/project/${projectId}/${
userId ? `user/${userId}/` : ''
}latest/output/output.tar.gz`
const timer = new Metrics.Timer(
diff --git a/services/clsi/app/js/CompileController.js b/services/clsi/app/js/CompileController.js
index 87a7db6ec2..c698ee2b75 100644
--- a/services/clsi/app/js/CompileController.js
+++ b/services/clsi/app/js/CompileController.js
@@ -112,12 +112,13 @@ function compile(req, res, next) {
buildId = error.buildId
}
+ let clsiCacheShard
if (
status === 'success' &&
request.editorId &&
request.populateClsiCache
) {
- notifyCLSICacheAboutBuild({
+ clsiCacheShard = notifyCLSICacheAboutBuild({
projectId: request.project_id,
userId: request.user_id,
buildId: outputFiles[0].build,
@@ -144,6 +145,7 @@ function compile(req, res, next) {
stats,
timings,
buildId,
+ clsiCacheShard,
outputUrlPrefix: Settings.apis.clsi.outputUrlPrefix,
outputFiles: outputFiles.map(file => ({
url:
diff --git a/services/clsi/config/settings.defaults.js b/services/clsi/config/settings.defaults.js
index 6f16e01a89..17042498db 100644
--- a/services/clsi/config/settings.defaults.js
+++ b/services/clsi/config/settings.defaults.js
@@ -60,9 +60,15 @@ module.exports = {
}`,
},
clsiCache: {
- enabled: !!process.env.CLSI_CACHE_HOST,
- url: `http://${process.env.CLSI_CACHE_HOST}:3044`,
- downloadURL: `http://${process.env.CLSI_CACHE_NGINX_HOST || process.env.CLSI_CACHE_HOST}:8080`,
+ enabled: !!(process.env.CLSI_CACHE_SHARDS || process.env.CLSI_CACHE_HOST),
+ shards: process.env.CLSI_CACHE_SHARDS
+ ? JSON.parse(process.env.CLSI_CACHE_SHARDS)
+ : [
+ {
+ url: `http://${process.env.CLSI_CACHE_HOST}:3044`,
+ shard: 'cache',
+ },
+ ],
},
},
diff --git a/services/clsi/test/unit/js/CompileControllerTests.js b/services/clsi/test/unit/js/CompileControllerTests.js
index e6d21aed9f..506b5f02dd 100644
--- a/services/clsi/test/unit/js/CompileControllerTests.js
+++ b/services/clsi/test/unit/js/CompileControllerTests.js
@@ -129,6 +129,7 @@ describe('CompileController', function () {
url: `${this.Settings.apis.clsi.url}/project/${this.project_id}/build/${file.build}/output/${file.path}`,
...file,
})),
+ clsiCacheShard: undefined,
},
})
.should.equal(true)
@@ -156,6 +157,7 @@ describe('CompileController', function () {
url: `${this.Settings.apis.clsi.url}/project/${this.project_id}/build/${file.build}/output/${file.path}`,
...file,
})),
+ clsiCacheShard: undefined,
},
})
.should.equal(true)
@@ -203,6 +205,7 @@ describe('CompileController', function () {
url: `${this.Settings.apis.clsi.url}/project/${this.project_id}/build/${file.build}/output/${file.path}`,
...file,
})),
+ clsiCacheShard: undefined,
},
})
})
@@ -250,6 +253,7 @@ describe('CompileController', function () {
url: `${this.Settings.apis.clsi.url}/project/${this.project_id}/build/${file.build}/output/${file.path}`,
...file,
})),
+ clsiCacheShard: undefined,
},
})
})
@@ -281,6 +285,7 @@ describe('CompileController', function () {
buildId: this.buildId,
stats: this.stats,
timings: this.timings,
+ clsiCacheShard: undefined,
},
})
.should.equal(true)
@@ -315,6 +320,7 @@ describe('CompileController', function () {
timings: this.timings,
// JSON.stringify will omit these undefined values
buildId: undefined,
+ clsiCacheShard: undefined,
},
})
.should.equal(true)
@@ -348,6 +354,7 @@ describe('CompileController', function () {
timings: this.timings,
// JSON.stringify will omit these undefined values
buildId: undefined,
+ clsiCacheShard: undefined,
},
})
.should.equal(true)
@@ -379,6 +386,7 @@ describe('CompileController', function () {
timings: this.timings,
// JSON.stringify will omit these undefined values
buildId: undefined,
+ clsiCacheShard: undefined,
},
})
.should.equal(true)
diff --git a/services/web/app/src/Features/Compile/ClsiCacheController.js b/services/web/app/src/Features/Compile/ClsiCacheController.js
index 9795fd3ef2..42f037985d 100644
--- a/services/web/app/src/Features/Compile/ClsiCacheController.js
+++ b/services/web/app/src/Features/Compile/ClsiCacheController.js
@@ -110,8 +110,8 @@ async function getLatestBuildFromCache(req, res) {
const userId = CompileController._getUserIdForCompile(req)
try {
const {
- internal: { location: metaLocation, zone },
- external: { isUpToDate, allFiles },
+ internal: { location: metaLocation },
+ external: { isUpToDate, allFiles, zone, shard },
} = await ClsiCacheManager.getLatestBuildFromCache(
projectId,
userId,
@@ -153,7 +153,7 @@ async function getLatestBuildFromCache(req, res) {
size,
editorId,
})
- if (clsiServerId !== 'cache') {
+ if (clsiServerId !== shard) {
// Enable PDF caching and attempt to download from VM first.
// (clsi VMs do not have the editorId in the path on disk, omit it).
Object.assign(f, {
@@ -174,6 +174,7 @@ async function getLatestBuildFromCache(req, res) {
outputFiles,
compileGroup,
clsiServerId,
+ clsiCacheShard: shard,
pdfDownloadDomain,
pdfCachingMinChunkSize,
options,
diff --git a/services/web/app/src/Features/Compile/ClsiCacheHandler.js b/services/web/app/src/Features/Compile/ClsiCacheHandler.js
index 54ebd9e259..76b5d50f12 100644
--- a/services/web/app/src/Features/Compile/ClsiCacheHandler.js
+++ b/services/web/app/src/Features/Compile/ClsiCacheHandler.js
@@ -41,7 +41,7 @@ async function clearCache(projectId, userId) {
path += '/output'
await Promise.all(
- Settings.apis.clsiCache.instances.map(async ({ url, zone }) => {
+ Settings.apis.clsiCache.instances.map(async ({ url, shard }) => {
const u = new URL(url)
u.pathname = path
try {
@@ -50,7 +50,7 @@ async function clearCache(projectId, userId) {
signal: AbortSignal.timeout(15_000),
})
} catch (err) {
- throw OError.tag(err, 'clear clsi-cache', { url, zone })
+ throw OError.tag(err, 'clear clsi-cache', { url, shard })
}
})
)
@@ -64,7 +64,7 @@ async function clearCache(projectId, userId) {
* @param buildId
* @param filename
* @param signal
- * @return {Promise<{size: number, zone: string, location: string, lastModified: Date, allFiles: string[]}>}
+ * @return {Promise<{size: number, zone: string, shard: string, location: string, lastModified: Date, allFiles: string[]}>}
*/
async function getOutputFile(
projectId,
@@ -93,7 +93,7 @@ async function getOutputFile(
* @param userId
* @param filename
* @param signal
- * @return {Promise<{size: number, zone: string, location: string, lastModified: Date, allFiles: string[]}>}
+ * @return {Promise<{size: number, zone: string, shard: string, location: string, lastModified: Date, allFiles: string[]}>}
*/
async function getLatestOutputFile(
projectId,
@@ -125,7 +125,7 @@ async function getLatestOutputFile(
* @param userId
* @param path
* @param signal
- * @return {Promise<{size: number, zone: string, location: string, lastModified: Date, allFiles: string[]}>}
+ * @return {Promise<{size: number, zone: string, shard: string, location: string, lastModified: Date, allFiles: string[]}>}
*/
async function getRedirectWithFallback(
projectId,
@@ -135,7 +135,7 @@ async function getRedirectWithFallback(
) {
// Avoid hitting the same instance first all the time.
const instances = _.shuffle(Settings.apis.clsiCache.instances)
- for (const { url, zone } of instances) {
+ for (const { url, shard } of instances) {
const u = new URL(url)
u.pathname = path
try {
@@ -149,6 +149,7 @@ async function getRedirectWithFallback(
return {
location,
zone: headers.get('X-Zone'),
+ shard: headers.get('X-Shard') || 'cache',
lastModified: new Date(headers.get('X-Last-Modified')),
size: parseInt(headers.get('X-Content-Length'), 10),
allFiles: JSON.parse(headers.get('X-All-Files')),
@@ -158,7 +159,7 @@ async function getRedirectWithFallback(
break // No clsi-cache instance has cached something for this project/user.
}
logger.warn(
- { err, projectId, userId, url, zone },
+ { err, projectId, userId, url, shard },
'getLatestOutputFile from clsi-cache failed'
)
// This clsi-cache instance is down, try the next backend.
@@ -178,18 +179,18 @@ async function getRedirectWithFallback(
* @param templateId
* @param templateVersionId
* @param lastUpdated
- * @param zone
+ * @param shard
* @param signal
* @return {Promise}
*/
async function prepareCacheSource(
projectId,
userId,
- { sourceProjectId, templateId, templateVersionId, lastUpdated, zone, signal }
+ { sourceProjectId, templateId, templateVersionId, lastUpdated, shard, signal }
) {
const url = new URL(
`/project/${projectId}/user/${userId}/import-from`,
- Settings.apis.clsiCache.instances.find(i => i.zone === zone).url
+ Settings.apis.clsiCache.instances.find(i => i.shard === shard).url
)
try {
await fetchNothing(url, {
diff --git a/services/web/app/src/Features/Compile/ClsiCacheManager.js b/services/web/app/src/Features/Compile/ClsiCacheManager.js
index 3fe4b987c5..cf0665af56 100644
--- a/services/web/app/src/Features/Compile/ClsiCacheManager.js
+++ b/services/web/app/src/Features/Compile/ClsiCacheManager.js
@@ -1,9 +1,11 @@
+const _ = require('lodash')
const { NotFoundError } = require('../Errors/Errors')
const ClsiCacheHandler = require('./ClsiCacheHandler')
const DocumentUpdaterHandler = require('../DocumentUpdater/DocumentUpdaterHandler')
const ProjectGetter = require('../Project/ProjectGetter')
const SplitTestHandler = require('../SplitTests/SplitTestHandler')
const UserGetter = require('../User/UserGetter')
+const Settings = require('@overleaf/settings')
/**
* Get the most recent build and metadata
@@ -14,11 +16,11 @@ const UserGetter = require('../User/UserGetter')
* @param userId
* @param filename
* @param signal
- * @return {Promise<{internal: {zone: string, location: string}, external: {isUpToDate: boolean, lastUpdated: Date, size: number, allFiles: string[]}}>}
+ * @return {Promise<{internal: {location: string}, external: {zone: string, shard: string, isUpToDate: boolean, lastUpdated: Date, size: number, allFiles: string[]}}>}
*/
async function getLatestBuildFromCache(projectId, userId, filename, signal) {
const [
- { location, lastModified: lastCompiled, zone, size, allFiles },
+ { location, lastModified: lastCompiled, zone, shard, size, allFiles },
lastUpdatedInRedis,
{ lastUpdated: lastUpdatedInMongo },
] = await Promise.all([
@@ -36,13 +38,14 @@ async function getLatestBuildFromCache(projectId, userId, filename, signal) {
return {
internal: {
location,
- zone,
},
external: {
isUpToDate,
lastUpdated,
size,
allFiles,
+ shard,
+ zone,
},
}
}
@@ -73,12 +76,11 @@ async function prepareClsiCache(
const signal = AbortSignal.timeout(5_000)
let lastUpdated
- let zone = 'b' // populate template data on zone b
+ let shard = _.shuffle(Settings.apis.clsiCache.instances)[0].shard
if (sourceProjectId) {
try {
;({
- internal: { zone },
- external: { lastUpdated },
+ external: { lastUpdated, shard },
} = await getLatestBuildFromCache(
sourceProjectId,
userId,
@@ -95,7 +97,7 @@ async function prepareClsiCache(
sourceProjectId,
templateId,
templateVersionId,
- zone,
+ shard,
lastUpdated,
signal,
})
diff --git a/services/web/app/src/Features/Compile/ClsiManager.js b/services/web/app/src/Features/Compile/ClsiManager.js
index 2e32aa9622..6f11297248 100644
--- a/services/web/app/src/Features/Compile/ClsiManager.js
+++ b/services/web/app/src/Features/Compile/ClsiManager.js
@@ -207,6 +207,7 @@ async function _sendBuiltRequest(projectId, userId, req, options, callback) {
stats: compile.stats,
timings: compile.timings,
outputUrlPrefix: compile.outputUrlPrefix,
+ clsiCacheShard: compile.clsiCacheShard,
}
}
@@ -853,6 +854,7 @@ module.exports = {
'timings',
'outputUrlPrefix',
'buildId',
+ 'clsiCacheShard',
]),
sendExternalRequest: callbackifyMultiResult(sendExternalRequest, [
'status',
diff --git a/services/web/app/src/Features/Compile/CompileController.js b/services/web/app/src/Features/Compile/CompileController.js
index 5d2bbcda3e..9fb9d502a4 100644
--- a/services/web/app/src/Features/Compile/CompileController.js
+++ b/services/web/app/src/Features/Compile/CompileController.js
@@ -192,7 +192,8 @@ module.exports = CompileController = {
stats,
timings,
outputUrlPrefix,
- buildId
+ buildId,
+ clsiCacheShard
) => {
if (error) {
Metrics.inc('compile-error')
@@ -236,6 +237,7 @@ module.exports = CompileController = {
outputFilesArchive,
compileGroup: limits?.compileGroup,
clsiServerId,
+ clsiCacheShard,
validationProblems,
stats,
timings,
diff --git a/services/web/app/src/Features/Compile/CompileManager.js b/services/web/app/src/Features/Compile/CompileManager.js
index 9b5404865a..974f573815 100644
--- a/services/web/app/src/Features/Compile/CompileManager.js
+++ b/services/web/app/src/Features/Compile/CompileManager.js
@@ -86,6 +86,7 @@ async function compile(projectId, userId, options = {}) {
timings,
outputUrlPrefix,
buildId,
+ clsiCacheShard,
} = await ClsiManager.promises.sendRequest(projectId, compileAsUser, options)
return {
@@ -98,6 +99,7 @@ async function compile(projectId, userId, options = {}) {
timings,
outputUrlPrefix,
buildId,
+ clsiCacheShard,
}
}
@@ -184,6 +186,7 @@ module.exports = CompileManager = {
'timings',
'outputUrlPrefix',
'buildId',
+ 'clsiCacheShard',
]),
stopCompile: callbackify(stopCompile),
diff --git a/services/web/cypress/support/shared/commands/compile.ts b/services/web/cypress/support/shared/commands/compile.ts
index 9f7273c403..44ee9c0805 100644
--- a/services/web/cypress/support/shared/commands/compile.ts
+++ b/services/web/cypress/support/shared/commands/compile.ts
@@ -48,6 +48,7 @@ const compileFromCacheResponse = () => {
fromCache: true,
status: 'success',
clsiServerId: 'foo',
+ clsiCacheShard: 'clsi-cache-zone-b-shard-1',
compileGroup: 'priority',
pdfDownloadDomain: 'https://clsi.test-overleaf.com',
outputFiles: outputFiles(),
@@ -166,10 +167,10 @@ export const waitForCompileOutput = ({
} = {}) => {
cy.wait(`@${prefix}-log`)
.its('request.query.clsiserverid')
- .should('eq', cached ? 'cache' : 'foo') // straight from cache if cached
+ .should('eq', cached ? 'clsi-cache-zone-b-shard-1' : 'foo') // straight from cache if cached
cy.wait(`@${prefix}-blg`)
.its('request.query.clsiserverid')
- .should('eq', cached ? 'cache' : 'foo') // straight from cache if cached
+ .should('eq', cached ? 'clsi-cache-zone-b-shard-1' : 'foo') // straight from cache if cached
if (pdf) {
cy.wait(`@${prefix}-pdf`)
.its('request.query.clsiserverid')
diff --git a/services/web/frontend/js/features/pdf-preview/util/file-list.ts b/services/web/frontend/js/features/pdf-preview/util/file-list.ts
index 310fbb55fb..a8a37a9e2b 100644
--- a/services/web/frontend/js/features/pdf-preview/util/file-list.ts
+++ b/services/web/frontend/js/features/pdf-preview/util/file-list.ts
@@ -13,6 +13,7 @@ export function buildFileList(
outputFiles: Map,
{
clsiServerId,
+ clsiCacheShard,
compileGroup,
outputFilesArchive,
fromCache = false,
@@ -24,7 +25,7 @@ export function buildFileList(
const params = new URLSearchParams()
if (fromCache) {
- params.set('clsiserverid', 'cache')
+ params.set('clsiserverid', clsiCacheShard || 'cache')
} else if (clsiServerId) {
params.set('clsiserverid', clsiServerId)
}
diff --git a/services/web/frontend/js/features/pdf-preview/util/output-files.js b/services/web/frontend/js/features/pdf-preview/util/output-files.js
index 6c93a02368..3ee0dc1180 100644
--- a/services/web/frontend/js/features/pdf-preview/util/output-files.js
+++ b/services/web/frontend/js/features/pdf-preview/util/output-files.js
@@ -17,6 +17,7 @@ export function handleOutputFiles(outputFiles, projectId, data) {
if (!outputFile) return null
outputFile.editorId = outputFile.editorId || EDITOR_SESSION_ID
+ outputFile.clsiCacheShard = data.clsiCacheShard || 'cache'
// build the URL for viewing the PDF in the preview UI
const params = new URLSearchParams()
diff --git a/services/web/frontend/js/features/pdf-preview/util/pdf-caching-transport.js b/services/web/frontend/js/features/pdf-preview/util/pdf-caching-transport.js
index 4497b57398..f568c634a4 100644
--- a/services/web/frontend/js/features/pdf-preview/util/pdf-caching-transport.js
+++ b/services/web/frontend/js/features/pdf-preview/util/pdf-caching-transport.js
@@ -116,7 +116,9 @@ export function generatePdfCachingTransportFactory() {
return (
u.pathname.endsWith(
`build/${this.pdfFile.editorId}-${this.pdfFile.build}/output/output.pdf`
- ) && u.searchParams.get('clsiserverid') === 'cache'
+ ) &&
+ (u.searchParams.get('clsiserverid') === 'cache' ||
+ u.searchParams.get('clsiserverid')?.startsWith('clsi-cache-'))
)
}
const canTryFromCache = err => {
@@ -127,7 +129,7 @@ export function generatePdfCachingTransportFactory() {
const getOutputPDFURLFromCache = () => {
if (usesCache(this.url)) return this.url
const u = new URL(this.url)
- u.searchParams.set('clsiserverid', 'cache')
+ u.searchParams.set('clsiserverid', this.pdfFile.clsiCacheShard)
u.pathname = u.pathname.replace(
/build\/[a-f0-9-]+\//,
`build/${this.pdfFile.editorId}-${this.pdfFile.build}/`
diff --git a/services/web/types/compile.ts b/services/web/types/compile.ts
index 541d03149c..3038893529 100644
--- a/services/web/types/compile.ts
+++ b/services/web/types/compile.ts
@@ -23,6 +23,7 @@ export type CompileResponseData = {
outputFiles: CompileOutputFile[]
compileGroup?: string
clsiServerId?: string
+ clsiCacheShard?: string
pdfDownloadDomain?: string
pdfCachingMinChunkSize: number
validationProblems: any
From c3368167d051abaaa61c7fcf27f8b79795518859 Mon Sep 17 00:00:00 2001
From: Alf Eaton
Date: Tue, 6 May 2025 13:43:11 +0100
Subject: [PATCH 012/194] Remove z-index from outline elements (#25265)
GitOrigin-RevId: 39b85a478b71bf42ebb6b886b6ae1b4ed6557570
---
.../frontend/stylesheets/bootstrap-5/pages/editor/outline.scss | 2 --
1 file changed, 2 deletions(-)
diff --git a/services/web/frontend/stylesheets/bootstrap-5/pages/editor/outline.scss b/services/web/frontend/stylesheets/bootstrap-5/pages/editor/outline.scss
index dc83a234bc..cc815ea058 100644
--- a/services/web/frontend/stylesheets/bootstrap-5/pages/editor/outline.scss
+++ b/services/web/frontend/stylesheets/bootstrap-5/pages/editor/outline.scss
@@ -281,7 +281,6 @@
font-size: inherit;
vertical-align: inherit;
position: relative;
- z-index: 1;
color: var(--outline-item-carat-color);
margin-right: calc(var(--spacing-03) * -1);
border-radius: var(--border-radius-base);
@@ -304,7 +303,6 @@
background-color: transparent;
border: 0;
position: relative;
- z-index: 1;
padding: 0 var(--spacing-03);
line-height: var(--spacing-08);
border-radius: var(--border-radius-base);
From bfe42734bcc152c2deb778a22a81fb6fe0a5816c Mon Sep 17 00:00:00 2001
From: Alf Eaton
Date: Tue, 6 May 2025 13:43:33 +0100
Subject: [PATCH 013/194] Merge pull request #25261 from
overleaf/ae-textlayer-layer
Move `will-change: transform` to textLayer
GitOrigin-RevId: 15fdd919da54ed95e115d664156066e6fda36982
---
.../stylesheets/bootstrap-5/pages/editor/pdf.scss | 13 ++++++++++++-
1 file changed, 12 insertions(+), 1 deletion(-)
diff --git a/services/web/frontend/stylesheets/bootstrap-5/pages/editor/pdf.scss b/services/web/frontend/stylesheets/bootstrap-5/pages/editor/pdf.scss
index 8649eacd1c..df5c9e2b77 100644
--- a/services/web/frontend/stylesheets/bootstrap-5/pages/editor/pdf.scss
+++ b/services/web/frontend/stylesheets/bootstrap-5/pages/editor/pdf.scss
@@ -193,7 +193,6 @@
div.pdf-canvas {
background: white;
box-shadow: 0 0 10px rgb(0 0 0 / 50%);
- will-change: transform;
}
div.pdf-canvas.pdfng-empty {
@@ -237,6 +236,18 @@
outline: none;
}
+ /* Avoid slowdown in Safari when text layers are reset on selection change */
+ /* stylelint-disable-next-line selector-class-pattern */
+ .textLayer {
+ will-change: transform;
+ }
+
+ /* Avoid multiple small layers within annotation layer */
+ /* stylelint-disable-next-line selector-class-pattern */
+ .annotationLayer {
+ will-change: transform;
+ }
+
/* Avoids https://github.com/mozilla/pdf.js/issues/13840 in Chrome */
/* stylelint-disable-next-line selector-class-pattern */
.textLayer br::selection {
From 6881ba956a062cbf0e1965d4f79e26dc4051e831 Mon Sep 17 00:00:00 2001
From: Jakob Ackermann
Date: Tue, 6 May 2025 15:19:59 +0200
Subject: [PATCH 014/194] [clsi-cache] only use sharding from updated project
editor tabs (#25326)
GitOrigin-RevId: 1754276bed3186c0536055c983e32476cc90d416
---
services/clsi/app/js/CLSICacheHandler.js | 9 ++++++++-
services/clsi/app/js/CompileController.js | 1 +
services/clsi/app/js/RequestParser.js | 8 ++++++++
services/clsi/config/settings.defaults.js | 1 +
services/web/app/src/Features/Compile/ClsiManager.js | 1 +
.../web/app/src/Features/Compile/CompileController.js | 2 ++
.../frontend/js/features/pdf-preview/util/compiler.js | 1 +
.../web/test/unit/src/Compile/CompileControllerTests.js | 4 ++++
8 files changed, 26 insertions(+), 1 deletion(-)
diff --git a/services/clsi/app/js/CLSICacheHandler.js b/services/clsi/app/js/CLSICacheHandler.js
index b9415ae3ec..73137d23c3 100644
--- a/services/clsi/app/js/CLSICacheHandler.js
+++ b/services/clsi/app/js/CLSICacheHandler.js
@@ -41,6 +41,7 @@ function getShard(projectId) {
* @param {string} editorId
* @param {[{path: string}]} outputFiles
* @param {string} compileGroup
+ * @param {boolean} clsiCacheSharded
* @param {Record} options
* @return {string | undefined}
*/
@@ -51,11 +52,17 @@ function notifyCLSICacheAboutBuild({
editorId,
outputFiles,
compileGroup,
+ clsiCacheSharded,
options,
}) {
if (!Settings.apis.clsiCache.enabled) return undefined
if (!OBJECT_ID_REGEX.test(projectId)) return undefined
- const { url, shard } = getShard(projectId)
+ let { url, shard } = getShard(projectId)
+ if (!clsiCacheSharded) {
+ // Client is not aware of sharding yet.
+ url = Settings.apis.clsiCache.url
+ shard = 'cache'
+ }
/**
* @param {[{path: string}]} files
diff --git a/services/clsi/app/js/CompileController.js b/services/clsi/app/js/CompileController.js
index c698ee2b75..138801890a 100644
--- a/services/clsi/app/js/CompileController.js
+++ b/services/clsi/app/js/CompileController.js
@@ -125,6 +125,7 @@ function compile(req, res, next) {
editorId: request.editorId,
outputFiles,
compileGroup: request.compileGroup,
+ clsiCacheSharded: request.clsiCacheSharded,
options: {
compiler: request.compiler,
draft: request.draft,
diff --git a/services/clsi/app/js/RequestParser.js b/services/clsi/app/js/RequestParser.js
index 4e9d722921..f65d6940c8 100644
--- a/services/clsi/app/js/RequestParser.js
+++ b/services/clsi/app/js/RequestParser.js
@@ -90,6 +90,14 @@ function parse(body, callback) {
type: 'boolean',
}
)
+ response.clsiCacheSharded = _parseAttribute(
+ 'clsiCacheSharded',
+ compile.options.clsiCacheSharded,
+ {
+ default: false,
+ type: 'boolean',
+ }
+ )
response.check = _parseAttribute('check', compile.options.check, {
type: 'string',
})
diff --git a/services/clsi/config/settings.defaults.js b/services/clsi/config/settings.defaults.js
index 17042498db..614644ac7b 100644
--- a/services/clsi/config/settings.defaults.js
+++ b/services/clsi/config/settings.defaults.js
@@ -61,6 +61,7 @@ module.exports = {
},
clsiCache: {
enabled: !!(process.env.CLSI_CACHE_SHARDS || process.env.CLSI_CACHE_HOST),
+ url: `http://${process.env.CLSI_CACHE_HOST}:3044`,
shards: process.env.CLSI_CACHE_SHARDS
? JSON.parse(process.env.CLSI_CACHE_SHARDS)
: [
diff --git a/services/web/app/src/Features/Compile/ClsiManager.js b/services/web/app/src/Features/Compile/ClsiManager.js
index 6f11297248..011ba60759 100644
--- a/services/web/app/src/Features/Compile/ClsiManager.js
+++ b/services/web/app/src/Features/Compile/ClsiManager.js
@@ -781,6 +781,7 @@ function _finaliseRequest(projectId, options, project, docs, files) {
imageName: project.imageName,
draft: Boolean(options.draft),
stopOnFirstError: Boolean(options.stopOnFirstError),
+ clsiCacheSharded: Boolean(options.clsiCacheSharded),
check: options.check,
syncType: options.syncType,
syncState: options.syncState,
diff --git a/services/web/app/src/Features/Compile/CompileController.js b/services/web/app/src/Features/Compile/CompileController.js
index 9fb9d502a4..f1d37e7638 100644
--- a/services/web/app/src/Features/Compile/CompileController.js
+++ b/services/web/app/src/Features/Compile/CompileController.js
@@ -132,12 +132,14 @@ module.exports = CompileController = {
const isAutoCompile = !!req.query.auto_compile
const fileLineErrors = !!req.query.file_line_errors
const stopOnFirstError = !!req.body.stopOnFirstError
+ const clsiCacheSharded = !!req.body.clsiCacheSharded
const userId = SessionManager.getLoggedInUserId(req.session)
const options = {
isAutoCompile,
fileLineErrors,
stopOnFirstError,
editorId: req.body.editorId,
+ clsiCacheSharded,
}
if (req.body.rootDoc_id) {
diff --git a/services/web/frontend/js/features/pdf-preview/util/compiler.js b/services/web/frontend/js/features/pdf-preview/util/compiler.js
index d938cb3893..cd052cb5a9 100644
--- a/services/web/frontend/js/features/pdf-preview/util/compiler.js
+++ b/services/web/frontend/js/features/pdf-preview/util/compiler.js
@@ -110,6 +110,7 @@ export default class DocumentCompiler {
incrementalCompilesEnabled: !this.error,
stopOnFirstError: options.stopOnFirstError,
editorId: EDITOR_SESSION_ID,
+ clsiCacheSharded: true,
}
const data = await postJSON(
diff --git a/services/web/test/unit/src/Compile/CompileControllerTests.js b/services/web/test/unit/src/Compile/CompileControllerTests.js
index aefa197a17..07e433b5af 100644
--- a/services/web/test/unit/src/Compile/CompileControllerTests.js
+++ b/services/web/test/unit/src/Compile/CompileControllerTests.js
@@ -250,6 +250,7 @@ describe('CompileController', function () {
fileLineErrors: false,
stopOnFirstError: false,
editorId: undefined,
+ clsiCacheSharded: false,
}
)
})
@@ -293,6 +294,7 @@ describe('CompileController', function () {
fileLineErrors: false,
stopOnFirstError: false,
editorId: undefined,
+ clsiCacheSharded: false,
}
)
})
@@ -318,6 +320,7 @@ describe('CompileController', function () {
fileLineErrors: false,
stopOnFirstError: false,
editorId: undefined,
+ clsiCacheSharded: false,
}
)
})
@@ -342,6 +345,7 @@ describe('CompileController', function () {
fileLineErrors: false,
stopOnFirstError: false,
editorId: 'the-editor-id',
+ clsiCacheSharded: false,
}
)
})
From f0856c862f0939ceb3635eefa8bd557a9d213d3a Mon Sep 17 00:00:00 2001
From: Antoine Clausse
Date: Tue, 6 May 2025 16:18:02 +0200
Subject: [PATCH 015/194] [web] Migrate `two-factor-authentication` module to
BS5 (#25181)
* Delete unused file error.pug
* Revert-me: Enable 2FA locally
* Migrate 2FA pages to BS5
* Add BS5 notification classes to hydrate-form.js
* Revert "Revert-me: Enable 2FA locally"
This reverts commit 2874bedb05e579623e5beb6cf518aa8608808802.
* Fix: Re-add .text-capitalize on button
* Use `notification` mixin for success state
* Append complex notifications with icons in `showMessages`
* Keep the BS3 version of the notification in `showMessages`, move the BS5 implementation to `createNotificationFromMessageBS5`
Check the Boostrap version with `!window?.Frontend?.['bootstrap-3']`, which is a bit hacky
* Update breakpoings in 2FA form to leave more room for error notification
* Address PR comments:
* Remove useless `createTextNode`
* Use `isBootstrap5`
* `Setup` -> `Set Up`
GitOrigin-RevId: d7285853ea1191da7711b7bada8d65ff064bc27d
---
.../js/features/form-helpers/hydrate-form.js | 64 +++++++++++++++++++
1 file changed, 64 insertions(+)
diff --git a/services/web/frontend/js/features/form-helpers/hydrate-form.js b/services/web/frontend/js/features/form-helpers/hydrate-form.js
index 9c00dd43ae..3febf861b7 100644
--- a/services/web/frontend/js/features/form-helpers/hydrate-form.js
+++ b/services/web/frontend/js/features/form-helpers/hydrate-form.js
@@ -3,6 +3,7 @@ import { FetchError, postJSON } from '../../infrastructure/fetch-json'
import { canSkipCaptcha, validateCaptchaV2 } from './captcha'
import inputValidator from './input-validator'
import { disableElement, enableElement } from '../utils/disableElement'
+import { isBootstrap5 } from '@/features/utils/bootstrap-5'
// Form helper(s) to handle:
// - Attaching to the relevant form elements
@@ -133,6 +134,66 @@ function hideFormElements(formEl) {
}
}
+/**
+ * Creates a notification element from a message object, with BS5 classes.
+ *
+ * @param {Object} message
+ * @param {'error' | 'success' | 'warning' | 'info'} message.type
+ * @param {string} message.key
+ * @param {string} message.text
+ * @param {string[]} message.hints
+ * @returns {HTMLDivElement}
+ */
+function createNotificationFromMessageBS5(message) {
+ const messageEl = document.createElement('div')
+ messageEl.className = classNames('mb-3 notification', {
+ 'notification-type-error': message.type === 'error',
+ 'notification-type-success': message.type === 'success',
+ 'notification-type-warning': message.type === 'warning',
+ 'notification-type-info': message.type === 'info',
+ })
+ messageEl.setAttribute('aria-live', 'assertive')
+ messageEl.setAttribute('role', message.type === 'error' ? 'alert' : 'status')
+
+ const materialIcon = {
+ info: 'info',
+ success: 'check_circle',
+ error: 'error',
+ warning: 'warning',
+ }[message.type]
+ if (materialIcon) {
+ const iconEl = document.createElement('div')
+ iconEl.className = 'notification-icon'
+ const iconSpan = document.createElement('span')
+ iconSpan.className = 'material-symbols'
+ iconSpan.setAttribute('aria-hidden', 'true')
+ iconSpan.textContent = materialIcon
+ iconEl.append(iconSpan)
+ messageEl.append(iconEl)
+ }
+
+ const contentAndCtaEl = document.createElement('div')
+ contentAndCtaEl.className = 'notification-content-and-cta'
+
+ const contentEl = document.createElement('div')
+ contentEl.className = 'notification-content'
+ contentEl.append(message.text || `Error: ${message.key}`)
+
+ if (message.hints && message.hints.length) {
+ const listEl = document.createElement('ul')
+ message.hints.forEach(hint => {
+ const listItemEl = document.createElement('li')
+ listItemEl.textContent = hint
+ listEl.append(listItemEl)
+ })
+ contentEl.append(listEl)
+ }
+ contentAndCtaEl.append(contentEl)
+ messageEl.append(contentAndCtaEl)
+
+ return messageEl
+}
+
// TODO: remove the showMessages function after every form alerts are updated to use the new style
// TODO: rename showMessagesNewStyle to showMessages after the above is done
function showMessages(formEl, messageBag) {
@@ -157,6 +218,9 @@ function showMessages(formEl, messageBag) {
customErrorElements.forEach(el => {
el.hidden = false
})
+ } else if (isBootstrap5()) {
+ const notification = createNotificationFromMessageBS5(message)
+ messagesEl.append(notification)
} else {
// No custom error element for key on page, append a new error message
const messageEl = document.createElement('div')
From 9a2847dbeefc09ca20d1343741d389f12fcb4d08 Mon Sep 17 00:00:00 2001
From: Antoine Clausse
Date: Tue, 6 May 2025 16:18:14 +0200
Subject: [PATCH 016/194] [web] Add startup metrics (#25277)
* [web] refactor startup sequence
The primary objective here is to call loadGlobalBlobs() only once.
But to get there, we need to reorder things and add extra try/catch
sections to ensure we are not letting the global uncaughtException
handler catch startup errors.
Co-authored-by: Antoine Clausse
* [web] add metrics for startup steps
Co-authored-by: Antoine Clausse
---------
Co-authored-by: Jakob Ackermann
GitOrigin-RevId: c73edea02516e919d55b896588dcd1862835fedf
---
libraries/metrics/initialize.js | 4 +
services/web/app.mjs | 91 ++++++++++++-------
.../web/app/src/infrastructure/Modules.js | 5 +
.../web/app/src/infrastructure/Server.mjs | 4 +
4 files changed, 69 insertions(+), 35 deletions(-)
diff --git a/libraries/metrics/initialize.js b/libraries/metrics/initialize.js
index 1028ee06c3..f1a77666c7 100644
--- a/libraries/metrics/initialize.js
+++ b/libraries/metrics/initialize.js
@@ -5,6 +5,8 @@
* before any other module to support code instrumentation.
*/
+const metricsModuleImportStartTime = performance.now()
+
const APP_NAME = process.env.METRICS_APP_NAME || 'unknown'
const BUILD_VERSION = process.env.BUILD_VERSION
const ENABLE_PROFILE_AGENT = process.env.ENABLE_PROFILE_AGENT === 'true'
@@ -103,3 +105,5 @@ function recordProcessStart() {
const metrics = require('.')
metrics.inc('process_startup')
}
+
+module.exports = { metricsModuleImportStartTime }
diff --git a/services/web/app.mjs b/services/web/app.mjs
index 5ece02cf32..1538e60149 100644
--- a/services/web/app.mjs
+++ b/services/web/app.mjs
@@ -1,5 +1,5 @@
// Metrics must be initialized before importing anything else
-import '@overleaf/metrics/initialize.js'
+import { metricsModuleImportStartTime } from '@overleaf/metrics/initialize.js'
import Modules from './app/src/infrastructure/Modules.js'
import metrics from '@overleaf/metrics'
@@ -20,6 +20,13 @@ import FileWriter from './app/src/infrastructure/FileWriter.js'
import { fileURLToPath } from 'node:url'
import Features from './app/src/infrastructure/Features.js'
+metrics.gauge(
+ 'web_startup',
+ performance.now() - metricsModuleImportStartTime,
+ 1,
+ { path: 'imports' }
+)
+
logger.initialize(process.env.METRICS_APP_NAME || 'web')
logger.logger.serializers.user = Serializers.user
logger.logger.serializers.docs = Serializers.docs
@@ -58,6 +65,29 @@ if (
)
}
+// handle SIGTERM for graceful shutdown in kubernetes
+process.on('SIGTERM', function (signal) {
+ triggerGracefulShutdown(Server.server, signal)
+})
+
+const beforeWaitForMongoAndGlobalBlobs = performance.now()
+try {
+ await Promise.all([
+ mongodb.connectionPromise,
+ mongoose.connectionPromise,
+ HistoryManager.promises.loadGlobalBlobs(),
+ ])
+} catch (err) {
+ logger.fatal({ err }, 'Cannot connect to mongo. Exiting.')
+ process.exit(1)
+}
+metrics.gauge(
+ 'web_startup',
+ performance.now() - beforeWaitForMongoAndGlobalBlobs,
+ 1,
+ { path: 'waitForMongoAndGlobalBlobs' }
+)
+
const port = Settings.port || Settings.internal.web.port || 3000
const host = Settings.internal.web.host || '127.0.0.1'
if (process.argv[1] === fileURLToPath(import.meta.url)) {
@@ -69,42 +99,33 @@ if (process.argv[1] === fileURLToPath(import.meta.url)) {
PlansLocator.ensurePlansAreSetupCorrectly()
- Promise.all([
- mongodb.connectionPromise,
- mongoose.connectionPromise,
- HistoryManager.promises.loadGlobalBlobs(),
- ])
- .then(async () => {
- Server.server.listen(port, host, function () {
- logger.debug(`web starting up, listening on ${host}:${port}`)
- logger.debug(`${http.globalAgent.maxSockets} sockets enabled`)
- // wait until the process is ready before monitoring the event loop
- metrics.event_loop.monitor(logger)
- })
- QueueWorkers.start()
- await Modules.start()
- })
- .catch(err => {
- logger.fatal({ err }, 'Cannot connect to mongo. Exiting.')
- process.exit(1)
- })
+ Server.server.listen(port, host, function () {
+ logger.debug(`web starting up, listening on ${host}:${port}`)
+ logger.debug(`${http.globalAgent.maxSockets} sockets enabled`)
+ // wait until the process is ready before monitoring the event loop
+ metrics.event_loop.monitor(logger)
+
+ // Record metrics for the total startup time before listening on HTTP.
+ metrics.gauge(
+ 'web_startup',
+ performance.now() - metricsModuleImportStartTime,
+ 1,
+ { path: 'metricsModuleImportToHTTPListen' }
+ )
+ })
+ try {
+ QueueWorkers.start()
+ } catch (err) {
+ logger.fatal({ err }, 'failed to start queue processing')
+ }
+ try {
+ await Modules.start()
+ } catch (err) {
+ logger.fatal({ err }, 'failed to start web module background jobs')
+ }
}
// initialise site admin tasks
-Promise.all([
- mongodb.connectionPromise,
- mongoose.connectionPromise,
- HistoryManager.promises.loadGlobalBlobs(),
-])
- .then(() => SiteAdminHandler.initialise())
- .catch(err => {
- logger.fatal({ err }, 'Cannot connect to mongo. Exiting.')
- process.exit(1)
- })
-
-// handle SIGTERM for graceful shutdown in kubernetes
-process.on('SIGTERM', function (signal) {
- triggerGracefulShutdown(Server.server, signal)
-})
+SiteAdminHandler.initialise()
export default Server.server
diff --git a/services/web/app/src/infrastructure/Modules.js b/services/web/app/src/infrastructure/Modules.js
index a21be431c4..f746519612 100644
--- a/services/web/app/src/infrastructure/Modules.js
+++ b/services/web/app/src/infrastructure/Modules.js
@@ -4,6 +4,7 @@ const { promisify, callbackify } = require('util')
const Settings = require('@overleaf/settings')
const Views = require('./Views')
const _ = require('lodash')
+const Metrics = require('@overleaf/metrics')
const MODULE_BASE_PATH = Path.join(__dirname, '/../../../modules')
@@ -15,7 +16,11 @@ let _viewIncludes = {}
async function modules() {
if (!_modulesLoaded) {
+ const beforeLoadModules = performance.now()
await loadModules()
+ Metrics.gauge('web_startup', performance.now() - beforeLoadModules, 1, {
+ path: 'loadModules',
+ })
}
return _modules
}
diff --git a/services/web/app/src/infrastructure/Server.mjs b/services/web/app/src/infrastructure/Server.mjs
index 3c7fd752d6..9e548bdc9e 100644
--- a/services/web/app/src/infrastructure/Server.mjs
+++ b/services/web/app/src/infrastructure/Server.mjs
@@ -372,6 +372,10 @@ if (Settings.enabledServices.includes('web')) {
metrics.injectMetricsRoute(webRouter)
metrics.injectMetricsRoute(privateApiRouter)
+const beforeRouterInitialize = performance.now()
await Router.initialize(webRouter, privateApiRouter, publicApiRouter)
+metrics.gauge('web_startup', performance.now() - beforeRouterInitialize, 1, {
+ path: 'Router.initialize',
+})
export default { app, server }
From 81941ff335f8ca19ab308c958d5c09c410e9d42d Mon Sep 17 00:00:00 2001
From: Antoine Clausse
Date: Tue, 6 May 2025 16:18:47 +0200
Subject: [PATCH 017/194] Update some dependencies so they're compatible with
Node 22 (#25317)
* `"@google-cloud/profiler": "^6.0.3"`
* `bin/npm update pprof`
* `bin/npm update nan`
* `bin/npm update @google-cloud/profiler`
* Ignore false positive of `@typescript-eslint/return-await`
> Returning an awaited value that is not a promise is not allowed
Though the function was promisified
GitOrigin-RevId: 24dbe3e8df2b55c0b9583ac79a61e0956ac3fac0
---
libraries/metrics/package.json | 2 +-
libraries/object-persistor/src/FSPersistor.js | 2 +
package-lock.json | 1355 ++++-------------
3 files changed, 323 insertions(+), 1036 deletions(-)
diff --git a/libraries/metrics/package.json b/libraries/metrics/package.json
index 384e58cfe5..19b566c2b0 100644
--- a/libraries/metrics/package.json
+++ b/libraries/metrics/package.json
@@ -9,7 +9,7 @@
"main": "index.js",
"dependencies": {
"@google-cloud/opentelemetry-cloud-trace-exporter": "^2.1.0",
- "@google-cloud/profiler": "^6.0.0",
+ "@google-cloud/profiler": "^6.0.3",
"@opentelemetry/api": "^1.4.1",
"@opentelemetry/auto-instrumentations-node": "^0.39.1",
"@opentelemetry/exporter-trace-otlp-http": "^0.41.2",
diff --git a/libraries/object-persistor/src/FSPersistor.js b/libraries/object-persistor/src/FSPersistor.js
index 01aab72800..38a81407df 100644
--- a/libraries/object-persistor/src/FSPersistor.js
+++ b/libraries/object-persistor/src/FSPersistor.js
@@ -305,8 +305,10 @@ module.exports = class FSPersistor extends AbstractPersistor {
async _listDirectory(path) {
if (this.useSubdirectories) {
+ // eslint-disable-next-line @typescript-eslint/return-await
return await glob(Path.join(path, '**'))
} else {
+ // eslint-disable-next-line @typescript-eslint/return-await
return await glob(`${path}_*`)
}
}
diff --git a/package-lock.json b/package-lock.json
index 639567368b..fe1225f129 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -209,7 +209,7 @@
"version": "4.2.0",
"dependencies": {
"@google-cloud/opentelemetry-cloud-trace-exporter": "^2.1.0",
- "@google-cloud/profiler": "^6.0.0",
+ "@google-cloud/profiler": "^6.0.3",
"@opentelemetry/api": "^1.4.1",
"@opentelemetry/auto-instrumentations-node": "^0.39.1",
"@opentelemetry/exporter-trace-otlp-http": "^0.41.2",
@@ -232,6 +232,15 @@
"@overleaf/logger": "*"
}
},
+ "libraries/metrics/node_modules/@opentelemetry/api": {
+ "version": "1.9.0",
+ "resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.9.0.tgz",
+ "integrity": "sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg==",
+ "license": "Apache-2.0",
+ "engines": {
+ "node": ">=8.0.0"
+ }
+ },
"libraries/metrics/node_modules/yn": {
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz",
@@ -4888,17 +4897,6 @@
"node": ">=10"
}
},
- "node_modules/@google-cloud/bigquery/node_modules/duplexify": {
- "version": "4.1.2",
- "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.2.tgz",
- "integrity": "sha512-fz3OjcNCHmRP12MJoZMPglx8m4rrFP8rovnk4vT8Fs+aonZoCwGg10dSsQsfP/E62eZcPTMSMP6686fu9Qlqtw==",
- "dependencies": {
- "end-of-stream": "^1.4.1",
- "inherits": "^2.0.3",
- "readable-stream": "^3.1.1",
- "stream-shift": "^1.0.0"
- }
- },
"node_modules/@google-cloud/bigquery/node_modules/uuid": {
"version": "8.3.2",
"resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz",
@@ -4926,17 +4924,6 @@
"node": ">=10"
}
},
- "node_modules/@google-cloud/common/node_modules/duplexify": {
- "version": "4.1.2",
- "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.2.tgz",
- "integrity": "sha512-fz3OjcNCHmRP12MJoZMPglx8m4rrFP8rovnk4vT8Fs+aonZoCwGg10dSsQsfP/E62eZcPTMSMP6686fu9Qlqtw==",
- "dependencies": {
- "end-of-stream": "^1.4.1",
- "inherits": "^2.0.3",
- "readable-stream": "^3.1.1",
- "stream-shift": "^1.0.0"
- }
- },
"node_modules/@google-cloud/logging": {
"version": "11.1.0",
"resolved": "https://registry.npmjs.org/@google-cloud/logging/-/logging-11.1.0.tgz",
@@ -5093,246 +5080,202 @@
"integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="
},
"node_modules/@google-cloud/logging-min": {
- "version": "10.4.0",
- "resolved": "https://registry.npmjs.org/@google-cloud/logging-min/-/logging-min-10.4.0.tgz",
- "integrity": "sha512-TcblDYAATO9hHcDcWYFh+vqt3pAV7Qddaih1JK3cpkzLa+BWjD5gAVAWww8W9Wr5yxOX+8CkssanH/xSS4n76Q==",
+ "version": "11.2.0",
+ "resolved": "https://registry.npmjs.org/@google-cloud/logging-min/-/logging-min-11.2.0.tgz",
+ "integrity": "sha512-o1mwzi1+9NMEjwYZJ0X3tK64obf9PzPVBAhzEJv65L0h7jVl3Fw7GswtsMUkdUvZexf96vAvlZZMvXB9jAIW2Q==",
+ "license": "Apache-2.0",
"dependencies": {
- "@google-cloud/common": "^4.0.0",
- "@google-cloud/paginator": "^4.0.0",
- "@google-cloud/projectify": "^3.0.0",
- "@google-cloud/promisify": "^3.0.0",
+ "@google-cloud/common": "^5.0.0",
+ "@google-cloud/paginator": "^5.0.0",
+ "@google-cloud/projectify": "^4.0.0",
+ "@google-cloud/promisify": "^4.0.0",
+ "@opentelemetry/api": "^1.7.0",
"arrify": "^2.0.1",
"dot-prop": "^6.0.0",
"eventid": "^2.0.0",
"extend": "^3.0.2",
- "gcp-metadata": "^4.0.0",
- "google-auth-library": "^8.0.2",
- "google-gax": "^3.5.2",
+ "gcp-metadata": "^6.0.0",
+ "google-auth-library": "^9.0.0",
+ "google-gax": "^4.0.3",
"on-finished": "^2.3.0",
"pumpify": "^2.0.1",
"stream-events": "^1.0.5",
"uuid": "^9.0.0"
},
"engines": {
- "node": ">=12.0.0"
+ "node": ">=14.0.0"
}
},
"node_modules/@google-cloud/logging-min/node_modules/@google-cloud/common": {
- "version": "4.0.3",
- "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-4.0.3.tgz",
- "integrity": "sha512-fUoMo5b8iAKbrYpneIRV3z95AlxVJPrjpevxs4SKoclngWZvTXBSGpNisF5+x5m+oNGve7jfB1e6vNBZBUs7Fw==",
+ "version": "5.0.2",
+ "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-5.0.2.tgz",
+ "integrity": "sha512-V7bmBKYQyu0eVG2BFejuUjlBt+zrya6vtsKdY+JxMM/dNntPF41vZ9+LhOshEUH01zOHEqBSvI7Dad7ZS6aUeA==",
+ "license": "Apache-2.0",
"dependencies": {
- "@google-cloud/projectify": "^3.0.0",
- "@google-cloud/promisify": "^3.0.0",
+ "@google-cloud/projectify": "^4.0.0",
+ "@google-cloud/promisify": "^4.0.0",
"arrify": "^2.0.1",
"duplexify": "^4.1.1",
- "ent": "^2.2.0",
"extend": "^3.0.2",
- "google-auth-library": "^8.0.2",
- "retry-request": "^5.0.0",
- "teeny-request": "^8.0.0"
+ "google-auth-library": "^9.0.0",
+ "html-entities": "^2.5.2",
+ "retry-request": "^7.0.0",
+ "teeny-request": "^9.0.0"
},
"engines": {
- "node": ">=12.0.0"
+ "node": ">=14.0.0"
}
},
"node_modules/@google-cloud/logging-min/node_modules/@google-cloud/paginator": {
- "version": "4.0.1",
- "resolved": "https://registry.npmjs.org/@google-cloud/paginator/-/paginator-4.0.1.tgz",
- "integrity": "sha512-6G1ui6bWhNyHjmbYwavdN7mpVPRBtyDg/bfqBTAlwr413On2TnFNfDxc9UhTJctkgoCDgQXEKiRPLPR9USlkbQ==",
+ "version": "5.0.2",
+ "resolved": "https://registry.npmjs.org/@google-cloud/paginator/-/paginator-5.0.2.tgz",
+ "integrity": "sha512-DJS3s0OVH4zFDB1PzjxAsHqJT6sKVbRwwML0ZBP9PbU7Yebtu/7SWMRzvO2J3nUi9pRNITCfu4LJeooM2w4pjg==",
+ "license": "Apache-2.0",
"dependencies": {
"arrify": "^2.0.0",
"extend": "^3.0.2"
},
"engines": {
- "node": ">=12.0.0"
+ "node": ">=14.0.0"
}
},
"node_modules/@google-cloud/logging-min/node_modules/@google-cloud/projectify": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-3.0.0.tgz",
- "integrity": "sha512-HRkZsNmjScY6Li8/kb70wjGlDDyLkVk3KvoEo9uIoxSjYLJasGiCch9+PqRVDOCGUFvEIqyogl+BeqILL4OJHA==",
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-4.0.0.tgz",
+ "integrity": "sha512-MmaX6HeSvyPbWGwFq7mXdo0uQZLGBYCwziiLIGq5JVX+/bdI3SAq6bP98trV5eTWfLuvsMcIC1YJOF2vfteLFA==",
+ "license": "Apache-2.0",
"engines": {
- "node": ">=12.0.0"
+ "node": ">=14.0.0"
}
},
"node_modules/@google-cloud/logging-min/node_modules/@google-cloud/promisify": {
- "version": "3.0.1",
- "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-3.0.1.tgz",
- "integrity": "sha512-z1CjRjtQyBOYL+5Qr9DdYIfrdLBe746jRTYfaYU6MeXkqp7UfYs/jX16lFFVzZ7PGEJvqZNqYUEtb1mvDww4pA==",
+ "version": "4.1.0",
+ "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-4.1.0.tgz",
+ "integrity": "sha512-G/FQx5cE/+DqBbOpA5jKsegGwdPniU6PuIEMt+qxWgFxvxuFOzVmp6zYchtYuwAWV5/8Dgs0yAmjvNZv3uXLQg==",
+ "license": "Apache-2.0",
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
- "node_modules/@google-cloud/logging-min/node_modules/duplexify": {
- "version": "4.1.2",
- "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.2.tgz",
- "integrity": "sha512-fz3OjcNCHmRP12MJoZMPglx8m4rrFP8rovnk4vT8Fs+aonZoCwGg10dSsQsfP/E62eZcPTMSMP6686fu9Qlqtw==",
+ "node_modules/@google-cloud/logging-min/node_modules/@opentelemetry/api": {
+ "version": "1.9.0",
+ "resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.9.0.tgz",
+ "integrity": "sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg==",
+ "license": "Apache-2.0",
+ "engines": {
+ "node": ">=8.0.0"
+ }
+ },
+ "node_modules/@google-cloud/logging-min/node_modules/agent-base": {
+ "version": "7.1.3",
+ "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.3.tgz",
+ "integrity": "sha512-jRR5wdylq8CkOe6hei19GGZnxM6rBGwFl3Bg0YItGDimvjGtAvdZk4Pu6Cl4u4Igsws4a1fd1Vq3ezrhn4KmFw==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 14"
+ }
+ },
+ "node_modules/@google-cloud/logging-min/node_modules/gaxios": {
+ "version": "6.7.1",
+ "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-6.7.1.tgz",
+ "integrity": "sha512-LDODD4TMYx7XXdpwxAVRAIAuB0bzv0s+ywFonY46k126qzQHT9ygyoa9tncmOiQmmDrik65UYsEkv3lbfqQ3yQ==",
+ "license": "Apache-2.0",
"dependencies": {
- "end-of-stream": "^1.4.1",
- "inherits": "^2.0.3",
- "readable-stream": "^3.1.1",
- "stream-shift": "^1.0.0"
+ "extend": "^3.0.2",
+ "https-proxy-agent": "^7.0.1",
+ "is-stream": "^2.0.0",
+ "node-fetch": "^2.6.9",
+ "uuid": "^9.0.1"
+ },
+ "engines": {
+ "node": ">=14"
+ }
+ },
+ "node_modules/@google-cloud/logging-min/node_modules/gaxios/node_modules/https-proxy-agent": {
+ "version": "7.0.6",
+ "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz",
+ "integrity": "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==",
+ "license": "MIT",
+ "dependencies": {
+ "agent-base": "^7.1.2",
+ "debug": "4"
+ },
+ "engines": {
+ "node": ">= 14"
}
},
"node_modules/@google-cloud/logging-min/node_modules/gcp-metadata": {
- "version": "4.3.1",
- "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.3.1.tgz",
- "integrity": "sha512-x850LS5N7V1F3UcV7PoupzGsyD6iVwTVvsh3tbXfkctZnBnjW5yu5z1/3k3SehF7TyoTIe78rJs02GMMy+LF+A==",
+ "version": "6.1.1",
+ "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-6.1.1.tgz",
+ "integrity": "sha512-a4tiq7E0/5fTjxPAaH4jpjkSv/uCaU2p5KC6HVGrvl0cDjA8iBZv4vv1gyzlmK0ZUKqwpOyQMKzZQe3lTit77A==",
+ "license": "Apache-2.0",
"dependencies": {
- "gaxios": "^4.0.0",
+ "gaxios": "^6.1.1",
+ "google-logging-utils": "^0.0.2",
"json-bigint": "^1.0.0"
},
"engines": {
- "node": ">=10"
- }
- },
- "node_modules/@google-cloud/logging-min/node_modules/gcp-metadata/node_modules/gaxios": {
- "version": "4.3.3",
- "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-4.3.3.tgz",
- "integrity": "sha512-gSaYYIO1Y3wUtdfHmjDUZ8LWaxJQpiavzbF5Kq53akSzvmVg0RfyOcFDbO1KJ/KCGRFz2qG+lS81F0nkr7cRJA==",
- "dependencies": {
- "abort-controller": "^3.0.0",
- "extend": "^3.0.2",
- "https-proxy-agent": "^5.0.0",
- "is-stream": "^2.0.0",
- "node-fetch": "^2.6.7"
- },
- "engines": {
- "node": ">=10"
+ "node": ">=14"
}
},
"node_modules/@google-cloud/logging-min/node_modules/google-auth-library": {
- "version": "8.9.0",
- "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-8.9.0.tgz",
- "integrity": "sha512-f7aQCJODJFmYWN6PeNKzgvy9LI2tYmXnzpNDHEjG5sDNPgGb2FXQyTBnXeSH+PAtpKESFD+LmHw3Ox3mN7e1Fg==",
+ "version": "9.15.1",
+ "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-9.15.1.tgz",
+ "integrity": "sha512-Jb6Z0+nvECVz+2lzSMt9u98UsoakXxA2HGHMCxh+so3n90XgYWkq5dur19JAJV7ONiJY22yBTyJB1TSkvPq9Ng==",
+ "license": "Apache-2.0",
"dependencies": {
- "arrify": "^2.0.0",
"base64-js": "^1.3.0",
"ecdsa-sig-formatter": "^1.0.11",
- "fast-text-encoding": "^1.0.0",
- "gaxios": "^5.0.0",
- "gcp-metadata": "^5.3.0",
- "gtoken": "^6.1.0",
- "jws": "^4.0.0",
- "lru-cache": "^6.0.0"
- },
- "engines": {
- "node": ">=12"
- }
- },
- "node_modules/@google-cloud/logging-min/node_modules/google-auth-library/node_modules/gcp-metadata": {
- "version": "5.3.0",
- "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-5.3.0.tgz",
- "integrity": "sha512-FNTkdNEnBdlqF2oatizolQqNANMrcqJt6AAYt99B3y1aLLC8Hc5IOBb+ZnnzllodEEf6xMBp6wRcBbc16fa65w==",
- "dependencies": {
- "gaxios": "^5.0.0",
- "json-bigint": "^1.0.0"
- },
- "engines": {
- "node": ">=12"
- }
- },
- "node_modules/@google-cloud/logging-min/node_modules/google-gax": {
- "version": "3.6.1",
- "resolved": "https://registry.npmjs.org/google-gax/-/google-gax-3.6.1.tgz",
- "integrity": "sha512-g/lcUjGcB6DSw2HxgEmCDOrI/CByOwqRvsuUvNalHUK2iPPPlmAIpbMbl62u0YufGMr8zgE3JL7th6dCb1Ry+w==",
- "dependencies": {
- "@grpc/grpc-js": "~1.8.0",
- "@grpc/proto-loader": "^0.7.0",
- "@types/long": "^4.0.0",
- "@types/rimraf": "^3.0.2",
- "abort-controller": "^3.0.0",
- "duplexify": "^4.0.0",
- "fast-text-encoding": "^1.0.3",
- "google-auth-library": "^8.0.2",
- "is-stream-ended": "^0.1.4",
- "node-fetch": "^2.6.1",
- "object-hash": "^3.0.0",
- "proto3-json-serializer": "^1.0.0",
- "protobufjs": "7.2.4",
- "protobufjs-cli": "1.1.1",
- "retry-request": "^5.0.0"
- },
- "bin": {
- "compileProtos": "build/tools/compileProtos.js",
- "minifyProtoJson": "build/tools/minify.js"
- },
- "engines": {
- "node": ">=12"
- }
- },
- "node_modules/@google-cloud/logging-min/node_modules/google-p12-pem": {
- "version": "4.0.1",
- "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-4.0.1.tgz",
- "integrity": "sha512-WPkN4yGtz05WZ5EhtlxNDWPhC4JIic6G8ePitwUWy4l+XPVYec+a0j0Ts47PDtW59y3RwAhUd9/h9ZZ63px6RQ==",
- "dependencies": {
- "node-forge": "^1.3.1"
- },
- "bin": {
- "gp12-pem": "build/src/bin/gp12-pem.js"
- },
- "engines": {
- "node": ">=12.0.0"
- }
- },
- "node_modules/@google-cloud/logging-min/node_modules/gtoken": {
- "version": "6.1.2",
- "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-6.1.2.tgz",
- "integrity": "sha512-4ccGpzz7YAr7lxrT2neugmXQ3hP9ho2gcaityLVkiUecAiwiy60Ii8gRbZeOsXV19fYaRjgBSshs8kXw+NKCPQ==",
- "dependencies": {
- "gaxios": "^5.0.1",
- "google-p12-pem": "^4.0.0",
+ "gaxios": "^6.1.1",
+ "gcp-metadata": "^6.1.0",
+ "gtoken": "^7.0.0",
"jws": "^4.0.0"
},
"engines": {
- "node": ">=12.0.0"
+ "node": ">=14"
}
},
- "node_modules/@google-cloud/logging-min/node_modules/lru-cache": {
- "version": "6.0.0",
- "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz",
- "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==",
+ "node_modules/@google-cloud/logging-min/node_modules/gtoken": {
+ "version": "7.1.0",
+ "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-7.1.0.tgz",
+ "integrity": "sha512-pCcEwRi+TKpMlxAQObHDQ56KawURgyAf6jtIY046fJ5tIv3zDe/LEIubckAO8fj6JnAxLdmWkUfNyulQ2iKdEw==",
+ "license": "MIT",
"dependencies": {
- "yallist": "^4.0.0"
+ "gaxios": "^6.0.0",
+ "jws": "^4.0.0"
},
"engines": {
- "node": ">=10"
- }
- },
- "node_modules/@google-cloud/logging-min/node_modules/object-hash": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/object-hash/-/object-hash-3.0.0.tgz",
- "integrity": "sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw==",
- "engines": {
- "node": ">= 6"
+ "node": ">=14.0.0"
}
},
"node_modules/@google-cloud/logging-min/node_modules/retry-request": {
- "version": "5.0.2",
- "resolved": "https://registry.npmjs.org/retry-request/-/retry-request-5.0.2.tgz",
- "integrity": "sha512-wfI3pk7EE80lCIXprqh7ym48IHYdwmAAzESdbU8Q9l7pnRCk9LEhpbOTNKjz6FARLm/Bl5m+4F0ABxOkYUujSQ==",
+ "version": "7.0.2",
+ "resolved": "https://registry.npmjs.org/retry-request/-/retry-request-7.0.2.tgz",
+ "integrity": "sha512-dUOvLMJ0/JJYEn8NrpOaGNE7X3vpI5XlZS/u0ANjqtcZVKnIxP7IgCFwrKTxENw29emmwug53awKtaMm4i9g5w==",
+ "license": "MIT",
"dependencies": {
- "debug": "^4.1.1",
- "extend": "^3.0.2"
+ "@types/request": "^2.48.8",
+ "extend": "^3.0.2",
+ "teeny-request": "^9.0.0"
},
"engines": {
- "node": ">=12"
+ "node": ">=14"
}
},
"node_modules/@google-cloud/logging-min/node_modules/teeny-request": {
- "version": "8.0.3",
- "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-8.0.3.tgz",
- "integrity": "sha512-jJZpA5He2y52yUhA7pyAGZlgQpcB+xLjcN0eUFxr9c8hP/H7uOXbBNVo/O0C/xVfJLJs680jvkFgVJEEvk9+ww==",
+ "version": "9.0.0",
+ "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-9.0.0.tgz",
+ "integrity": "sha512-resvxdc6Mgb7YEThw6G6bExlXKkv6+YbuzGg9xuXxSgxJF7Ozs+o8Y9+2R3sArdWdW8nOokoQb1yrpFB0pQK2g==",
+ "license": "Apache-2.0",
"dependencies": {
"http-proxy-agent": "^5.0.0",
"https-proxy-agent": "^5.0.0",
- "node-fetch": "^2.6.1",
+ "node-fetch": "^2.6.9",
"stream-events": "^1.0.5",
"uuid": "^9.0.0"
},
"engines": {
- "node": ">=12"
+ "node": ">=14"
}
},
"node_modules/@google-cloud/logging-min/node_modules/uuid": {
@@ -5343,15 +5286,11 @@
"https://github.com/sponsors/broofa",
"https://github.com/sponsors/ctavan"
],
+ "license": "MIT",
"bin": {
"uuid": "dist/bin/uuid"
}
},
- "node_modules/@google-cloud/logging-min/node_modules/yallist": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
- "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="
- },
"node_modules/@google-cloud/logging/node_modules/@google-cloud/common": {
"version": "5.0.2",
"resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-5.0.2.tgz",
@@ -5426,17 +5365,6 @@
}
}
},
- "node_modules/@google-cloud/logging/node_modules/duplexify": {
- "version": "4.1.3",
- "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.3.tgz",
- "integrity": "sha512-M3BmBhwJRZsSx38lZyhE53Csddgzl5R7xGJNk7CVddZD6CcmwMCH8J+7AprIrQKH7TonKxaCjcv27Qmf+sQ+oA==",
- "dependencies": {
- "end-of-stream": "^1.4.1",
- "inherits": "^2.0.3",
- "readable-stream": "^3.1.1",
- "stream-shift": "^1.0.2"
- }
- },
"node_modules/@google-cloud/logging/node_modules/gaxios": {
"version": "6.6.0",
"resolved": "https://registry.npmjs.org/gaxios/-/gaxios-6.6.0.tgz",
@@ -5598,22 +5526,24 @@
}
},
"node_modules/@google-cloud/profiler": {
- "version": "6.0.0",
- "resolved": "https://registry.npmjs.org/@google-cloud/profiler/-/profiler-6.0.0.tgz",
- "integrity": "sha512-EAxPbDiNRidAKOEnlUK3M+CcOlqG+REkUEZKirLtxFwzI/m7LmGqDzQvrVWTOSFSEYJ9qQRRnO+Q1osNGk3NUg==",
+ "version": "6.0.3",
+ "resolved": "https://registry.npmjs.org/@google-cloud/profiler/-/profiler-6.0.3.tgz",
+ "integrity": "sha512-Ey8li6Vc2CbfEzOTSZaqKolxPMGacxVUQuhChNT0Wi55a3nfImMiiuDgqYw1In/a9Q3Z62O7jUg2L8f1XwMN7Q==",
+ "license": "Apache-2.0",
"dependencies": {
"@google-cloud/common": "^5.0.0",
- "@google-cloud/logging-min": "^10.0.0",
+ "@google-cloud/logging-min": "^11.0.0",
+ "@google-cloud/promisify": "~4.0.0",
"@types/console-log-level": "^1.4.0",
"@types/semver": "^7.0.0",
"console-log-level": "^1.4.0",
"delay": "^5.0.0",
"extend": "^3.0.2",
"gcp-metadata": "^6.0.0",
- "parse-duration": "^1.0.0",
- "pprof": "3.2.1",
+ "ms": "^2.1.3",
+ "pprof": "4.0.0",
"pretty-ms": "^7.0.0",
- "protobufjs": "~7.2.4",
+ "protobufjs": "~7.4.0",
"semver": "^7.0.0",
"teeny-request": "^9.0.0"
},
@@ -5622,18 +5552,19 @@
}
},
"node_modules/@google-cloud/profiler/node_modules/@google-cloud/common": {
- "version": "5.0.0",
- "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-5.0.0.tgz",
- "integrity": "sha512-IsbTVr7Ag+04GMT87X738vDs85QU1rMvaesm2wEQrtTbZAR92tGmUQ8/D/kdnYgAi98Q4zmfhF+T8Xs/Lw4zAA==",
+ "version": "5.0.2",
+ "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-5.0.2.tgz",
+ "integrity": "sha512-V7bmBKYQyu0eVG2BFejuUjlBt+zrya6vtsKdY+JxMM/dNntPF41vZ9+LhOshEUH01zOHEqBSvI7Dad7ZS6aUeA==",
+ "license": "Apache-2.0",
"dependencies": {
"@google-cloud/projectify": "^4.0.0",
"@google-cloud/promisify": "^4.0.0",
"arrify": "^2.0.1",
"duplexify": "^4.1.1",
- "ent": "^2.2.0",
"extend": "^3.0.2",
"google-auth-library": "^9.0.0",
- "retry-request": "^6.0.0",
+ "html-entities": "^2.5.2",
+ "retry-request": "^7.0.0",
"teeny-request": "^9.0.0"
},
"engines": {
@@ -5644,6 +5575,7 @@
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-4.0.0.tgz",
"integrity": "sha512-MmaX6HeSvyPbWGwFq7mXdo0uQZLGBYCwziiLIGq5JVX+/bdI3SAq6bP98trV5eTWfLuvsMcIC1YJOF2vfteLFA==",
+ "license": "Apache-2.0",
"engines": {
"node": ">=14.0.0"
}
@@ -5652,68 +5584,43 @@
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-4.0.0.tgz",
"integrity": "sha512-Orxzlfb9c67A15cq2JQEyVc7wEsmFBmHjZWZYQMUyJ1qivXyMwdyNOs9odi79hze+2zqdTtu1E19IM/FtqZ10g==",
+ "license": "Apache-2.0",
"engines": {
"node": ">=14"
}
},
"node_modules/@google-cloud/profiler/node_modules/agent-base": {
- "version": "7.1.0",
- "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.0.tgz",
- "integrity": "sha512-o/zjMZRhJxny7OyEF+Op8X+efiELC7k7yOjMzgfzVqOzXqkBkWI79YoTdOtsuWd5BWhAGAuOY/Xa6xpiaWXiNg==",
- "dependencies": {
- "debug": "^4.3.4"
- },
+ "version": "7.1.3",
+ "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.3.tgz",
+ "integrity": "sha512-jRR5wdylq8CkOe6hei19GGZnxM6rBGwFl3Bg0YItGDimvjGtAvdZk4Pu6Cl4u4Igsws4a1fd1Vq3ezrhn4KmFw==",
+ "license": "MIT",
"engines": {
"node": ">= 14"
}
},
- "node_modules/@google-cloud/profiler/node_modules/debug": {
- "version": "4.3.4",
- "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz",
- "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==",
- "dependencies": {
- "ms": "2.1.2"
- },
- "engines": {
- "node": ">=6.0"
- },
- "peerDependenciesMeta": {
- "supports-color": {
- "optional": true
- }
- }
- },
- "node_modules/@google-cloud/profiler/node_modules/duplexify": {
- "version": "4.1.2",
- "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.2.tgz",
- "integrity": "sha512-fz3OjcNCHmRP12MJoZMPglx8m4rrFP8rovnk4vT8Fs+aonZoCwGg10dSsQsfP/E62eZcPTMSMP6686fu9Qlqtw==",
- "dependencies": {
- "end-of-stream": "^1.4.1",
- "inherits": "^2.0.3",
- "readable-stream": "^3.1.1",
- "stream-shift": "^1.0.0"
- }
- },
"node_modules/@google-cloud/profiler/node_modules/gaxios": {
- "version": "6.1.1",
- "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-6.1.1.tgz",
- "integrity": "sha512-bw8smrX+XlAoo9o1JAksBwX+hi/RG15J+NTSxmNPIclKC3ZVK6C2afwY8OSdRvOK0+ZLecUJYtj2MmjOt3Dm0w==",
+ "version": "6.7.1",
+ "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-6.7.1.tgz",
+ "integrity": "sha512-LDODD4TMYx7XXdpwxAVRAIAuB0bzv0s+ywFonY46k126qzQHT9ygyoa9tncmOiQmmDrik65UYsEkv3lbfqQ3yQ==",
+ "license": "Apache-2.0",
"dependencies": {
"extend": "^3.0.2",
"https-proxy-agent": "^7.0.1",
"is-stream": "^2.0.0",
- "node-fetch": "^2.6.9"
+ "node-fetch": "^2.6.9",
+ "uuid": "^9.0.1"
},
"engines": {
"node": ">=14"
}
},
"node_modules/@google-cloud/profiler/node_modules/gaxios/node_modules/https-proxy-agent": {
- "version": "7.0.2",
- "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.2.tgz",
- "integrity": "sha512-NmLNjm6ucYwtcUmL7JQC1ZQ57LmHP4lT15FQ8D61nak1rO6DH+fz5qNK2Ap5UN4ZapYICE3/0KodcLYSPsPbaA==",
+ "version": "7.0.6",
+ "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz",
+ "integrity": "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==",
+ "license": "MIT",
"dependencies": {
- "agent-base": "^7.0.2",
+ "agent-base": "^7.1.2",
"debug": "4"
},
"engines": {
@@ -5721,11 +5628,13 @@
}
},
"node_modules/@google-cloud/profiler/node_modules/gcp-metadata": {
- "version": "6.0.0",
- "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-6.0.0.tgz",
- "integrity": "sha512-Ozxyi23/1Ar51wjUT2RDklK+3HxqDr8TLBNK8rBBFQ7T85iIGnXnVusauj06QyqCXRFZig8LZC+TUddWbndlpQ==",
+ "version": "6.1.1",
+ "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-6.1.1.tgz",
+ "integrity": "sha512-a4tiq7E0/5fTjxPAaH4jpjkSv/uCaU2p5KC6HVGrvl0cDjA8iBZv4vv1gyzlmK0ZUKqwpOyQMKzZQe3lTit77A==",
+ "license": "Apache-2.0",
"dependencies": {
- "gaxios": "^6.0.0",
+ "gaxios": "^6.1.1",
+ "google-logging-utils": "^0.0.2",
"json-bigint": "^1.0.0"
},
"engines": {
@@ -5733,26 +5642,27 @@
}
},
"node_modules/@google-cloud/profiler/node_modules/google-auth-library": {
- "version": "9.1.0",
- "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-9.1.0.tgz",
- "integrity": "sha512-1M9HdOcQNPV5BwSXqwwT238MTKodJFBxZ/V2JP397ieOLv4FjQdfYb9SooR7Mb+oUT2IJ92mLJQf804dyx0MJA==",
+ "version": "9.15.1",
+ "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-9.15.1.tgz",
+ "integrity": "sha512-Jb6Z0+nvECVz+2lzSMt9u98UsoakXxA2HGHMCxh+so3n90XgYWkq5dur19JAJV7ONiJY22yBTyJB1TSkvPq9Ng==",
+ "license": "Apache-2.0",
"dependencies": {
"base64-js": "^1.3.0",
"ecdsa-sig-formatter": "^1.0.11",
- "gaxios": "^6.0.0",
- "gcp-metadata": "^6.0.0",
+ "gaxios": "^6.1.1",
+ "gcp-metadata": "^6.1.0",
"gtoken": "^7.0.0",
- "jws": "^4.0.0",
- "lru-cache": "^6.0.0"
+ "jws": "^4.0.0"
},
"engines": {
"node": ">=14"
}
},
"node_modules/@google-cloud/profiler/node_modules/gtoken": {
- "version": "7.0.1",
- "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-7.0.1.tgz",
- "integrity": "sha512-KcFVtoP1CVFtQu0aSk3AyAt2og66PFhZAlkUOuWKwzMLoulHXG5W5wE5xAnHb+yl3/wEFoqGW7/cDGMU8igDZQ==",
+ "version": "7.1.0",
+ "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-7.1.0.tgz",
+ "integrity": "sha512-pCcEwRi+TKpMlxAQObHDQ56KawURgyAf6jtIY046fJ5tIv3zDe/LEIubckAO8fj6JnAxLdmWkUfNyulQ2iKdEw==",
+ "license": "MIT",
"dependencies": {
"gaxios": "^6.0.0",
"jws": "^4.0.0"
@@ -5761,41 +5671,49 @@
"node": ">=14.0.0"
}
},
- "node_modules/@google-cloud/profiler/node_modules/lru-cache": {
- "version": "6.0.0",
- "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz",
- "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==",
+ "node_modules/@google-cloud/profiler/node_modules/protobufjs": {
+ "version": "7.4.0",
+ "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-7.4.0.tgz",
+ "integrity": "sha512-mRUWCc3KUU4w1jU8sGxICXH/gNS94DvI1gxqDvBzhj1JpcsimQkYiOJfwsPUykUI5ZaspFbSgmBLER8IrQ3tqw==",
+ "hasInstallScript": true,
+ "license": "BSD-3-Clause",
"dependencies": {
- "yallist": "^4.0.0"
+ "@protobufjs/aspromise": "^1.1.2",
+ "@protobufjs/base64": "^1.1.2",
+ "@protobufjs/codegen": "^2.0.4",
+ "@protobufjs/eventemitter": "^1.1.0",
+ "@protobufjs/fetch": "^1.1.0",
+ "@protobufjs/float": "^1.0.2",
+ "@protobufjs/inquire": "^1.1.0",
+ "@protobufjs/path": "^1.1.2",
+ "@protobufjs/pool": "^1.1.0",
+ "@protobufjs/utf8": "^1.1.0",
+ "@types/node": ">=13.7.0",
+ "long": "^5.0.0"
},
"engines": {
- "node": ">=10"
+ "node": ">=12.0.0"
}
},
- "node_modules/@google-cloud/profiler/node_modules/ms": {
- "version": "2.1.2",
- "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
- "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
- },
"node_modules/@google-cloud/profiler/node_modules/retry-request": {
- "version": "6.0.0",
- "resolved": "https://registry.npmjs.org/retry-request/-/retry-request-6.0.0.tgz",
- "integrity": "sha512-24kaFMd3wCnT3n4uPnsQh90ZSV8OISpfTFXJ00Wi+/oD2OPrp63EQ8hznk6rhxdlpwx2QBhQSDz2Fg46ki852g==",
+ "version": "7.0.2",
+ "resolved": "https://registry.npmjs.org/retry-request/-/retry-request-7.0.2.tgz",
+ "integrity": "sha512-dUOvLMJ0/JJYEn8NrpOaGNE7X3vpI5XlZS/u0ANjqtcZVKnIxP7IgCFwrKTxENw29emmwug53awKtaMm4i9g5w==",
+ "license": "MIT",
"dependencies": {
- "debug": "^4.1.1",
- "extend": "^3.0.2"
+ "@types/request": "^2.48.8",
+ "extend": "^3.0.2",
+ "teeny-request": "^9.0.0"
},
"engines": {
"node": ">=14"
}
},
"node_modules/@google-cloud/profiler/node_modules/semver": {
- "version": "7.5.4",
- "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz",
- "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==",
- "dependencies": {
- "lru-cache": "^6.0.0"
- },
+ "version": "7.7.1",
+ "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.1.tgz",
+ "integrity": "sha512-hlq8tAfn0m/61p4BVRcPzIGr6LKiMwo4VM6dGi6pt4qcRkmNzTcWq6eCEjEh+qXjkMDvPlOFFSGwQjoEa6gyMA==",
+ "license": "ISC",
"bin": {
"semver": "bin/semver.js"
},
@@ -5807,6 +5725,7 @@
"version": "9.0.0",
"resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-9.0.0.tgz",
"integrity": "sha512-resvxdc6Mgb7YEThw6G6bExlXKkv6+YbuzGg9xuXxSgxJF7Ozs+o8Y9+2R3sArdWdW8nOokoQb1yrpFB0pQK2g==",
+ "license": "Apache-2.0",
"dependencies": {
"http-proxy-agent": "^5.0.0",
"https-proxy-agent": "^5.0.0",
@@ -5826,15 +5745,11 @@
"https://github.com/sponsors/broofa",
"https://github.com/sponsors/ctavan"
],
+ "license": "MIT",
"bin": {
"uuid": "dist/bin/uuid"
}
},
- "node_modules/@google-cloud/profiler/node_modules/yallist": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
- "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="
- },
"node_modules/@google-cloud/projectify": {
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-2.1.1.tgz",
@@ -5905,17 +5820,6 @@
"node": ">=12"
}
},
- "node_modules/@google-cloud/storage/node_modules/duplexify": {
- "version": "4.1.2",
- "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.2.tgz",
- "integrity": "sha512-fz3OjcNCHmRP12MJoZMPglx8m4rrFP8rovnk4vT8Fs+aonZoCwGg10dSsQsfP/E62eZcPTMSMP6686fu9Qlqtw==",
- "dependencies": {
- "end-of-stream": "^1.4.1",
- "inherits": "^2.0.3",
- "readable-stream": "^3.1.1",
- "stream-shift": "^1.0.0"
- }
- },
"node_modules/@google-cloud/storage/node_modules/google-auth-library": {
"version": "8.7.0",
"resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-8.7.0.tgz",
@@ -6510,17 +6414,6 @@
"resolved": "https://registry.npmjs.org/@jsdevtools/ono/-/ono-7.1.3.tgz",
"integrity": "sha512-4JQNk+3mVzK3xh2rqd6RB4J46qUR19azEHBneZyTZM+c456qOrbbM/5xcR8huNCCcbVt7+UmizG6GuUvPvKUYg=="
},
- "node_modules/@jsdoc/salty": {
- "version": "0.2.8",
- "resolved": "https://registry.npmjs.org/@jsdoc/salty/-/salty-0.2.8.tgz",
- "integrity": "sha512-5e+SFVavj1ORKlKaKr2BmTOekmXbelU7dC0cDkQLqag7xfuTPuGMUFx7KWJuv4bYZrTsoL2Z18VVCOKYxzoHcg==",
- "dependencies": {
- "lodash": "^4.17.21"
- },
- "engines": {
- "node": ">=v12.0.0"
- }
- },
"node_modules/@jsonjoy.com/base64": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/@jsonjoy.com/base64/-/base64-1.1.2.tgz",
@@ -6692,14 +6585,15 @@
}
},
"node_modules/@mapbox/node-pre-gyp": {
- "version": "1.0.8",
- "resolved": "https://registry.npmjs.org/@mapbox/node-pre-gyp/-/node-pre-gyp-1.0.8.tgz",
- "integrity": "sha512-CMGKi28CF+qlbXh26hDe6NxCd7amqeAzEqnS6IHeO6LoaKyM/n+Xw3HT1COdq8cuioOdlKdqn/hCmqPUOMOywg==",
+ "version": "1.0.11",
+ "resolved": "https://registry.npmjs.org/@mapbox/node-pre-gyp/-/node-pre-gyp-1.0.11.tgz",
+ "integrity": "sha512-Yhlar6v9WQgUp/He7BdgzOz8lqMQ8sU+jkCq7Wx8Myc5YFJLbEe7lgui/V7G1qB1DJykHSGwreceSaD60Y0PUQ==",
+ "license": "BSD-3-Clause",
"dependencies": {
- "detect-libc": "^1.0.3",
+ "detect-libc": "^2.0.0",
"https-proxy-agent": "^5.0.0",
"make-dir": "^3.1.0",
- "node-fetch": "^2.6.5",
+ "node-fetch": "^2.6.7",
"nopt": "^5.0.0",
"npmlog": "^5.0.1",
"rimraf": "^3.0.2",
@@ -6743,20 +6637,6 @@
"semver": "bin/semver.js"
}
},
- "node_modules/@mapbox/node-pre-gyp/node_modules/rimraf": {
- "version": "3.0.2",
- "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz",
- "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==",
- "dependencies": {
- "glob": "^7.1.3"
- },
- "bin": {
- "rimraf": "bin.js"
- },
- "funding": {
- "url": "https://github.com/sponsors/isaacs"
- }
- },
"node_modules/@mapbox/node-pre-gyp/node_modules/semver": {
"version": "7.5.4",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz",
@@ -12294,15 +12174,6 @@
"@types/send": "*"
}
},
- "node_modules/@types/glob": {
- "version": "8.1.0",
- "resolved": "https://registry.npmjs.org/@types/glob/-/glob-8.1.0.tgz",
- "integrity": "sha512-IO+MJPVhoqz+28h1qLAcBEH2+xHMK6MTyHJc7MTnnYb6wsoLR29POVGJ7LycmVXIqyy/4/2ShP5sUwTXuOwb/w==",
- "dependencies": {
- "@types/minimatch": "^5.1.2",
- "@types/node": "*"
- }
- },
"node_modules/@types/glob-to-regexp": {
"version": "0.4.4",
"resolved": "https://registry.npmjs.org/@types/glob-to-regexp/-/glob-to-regexp-0.4.4.tgz",
@@ -12470,11 +12341,6 @@
"@types/node": "*"
}
},
- "node_modules/@types/linkify-it": {
- "version": "5.0.0",
- "resolved": "https://registry.npmjs.org/@types/linkify-it/-/linkify-it-5.0.0.tgz",
- "integrity": "sha512-sVDA58zAw4eWAffKOaQH5/5j3XeayukzDk+ewSsnv3p4yJEZHCCzMDiZM8e0OUrRvmpGZ85jf4yDHkHsgBNr9Q=="
- },
"node_modules/@types/lodash": {
"version": "4.14.178",
"resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.14.178.tgz",
@@ -12486,20 +12352,6 @@
"resolved": "https://registry.npmjs.org/@types/long/-/long-4.0.1.tgz",
"integrity": "sha512-5tXH6Bx/kNGd3MgffdmP4dy2Z+G4eaXw0SE81Tq3BNadtnMR5/ySMzX4SLEzHJzSmPNn4HIdpQsBvXMUykr58w=="
},
- "node_modules/@types/markdown-it": {
- "version": "14.1.1",
- "resolved": "https://registry.npmjs.org/@types/markdown-it/-/markdown-it-14.1.1.tgz",
- "integrity": "sha512-4NpsnpYl2Gt1ljyBGrKMxFYAYvpqbnnkgP/i/g+NLpjEUa3obn1XJCur9YbEXKDAkaXqsR1LbDnGEJ0MmKFxfg==",
- "dependencies": {
- "@types/linkify-it": "^5",
- "@types/mdurl": "^2"
- }
- },
- "node_modules/@types/mdurl": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/@types/mdurl/-/mdurl-2.0.0.tgz",
- "integrity": "sha512-RGdgjQUZba5p6QEFAVx2OGb8rQDL/cPRG7GiedRzMcJ1tYnUANBncjbSB1NRGwbvjcPeikRABz2nshyPk1bhWg=="
- },
"node_modules/@types/mdx": {
"version": "2.0.13",
"resolved": "https://registry.npmjs.org/@types/mdx/-/mdx-2.0.13.tgz",
@@ -12525,11 +12377,6 @@
"resolved": "https://registry.npmjs.org/@types/mime-db/-/mime-db-1.43.1.tgz",
"integrity": "sha512-kGZJY+R+WnR5Rk+RPHUMERtb2qBRViIHCBdtUrY+NmwuGb8pQdfTqQiCKPrxpdoycl8KWm2DLdkpoSdt479XoQ=="
},
- "node_modules/@types/minimatch": {
- "version": "5.1.2",
- "resolved": "https://registry.npmjs.org/@types/minimatch/-/minimatch-5.1.2.tgz",
- "integrity": "sha512-K0VQKziLUWkVKiRVrx4a40iPaxTUefQmjtkQofBkYRcoaaL/8rhwDWww9qWbrgicNOgnpIsMxyNIUM4+n6dUIA=="
- },
"node_modules/@types/mocha": {
"version": "10.0.6",
"resolved": "https://registry.npmjs.org/@types/mocha/-/mocha-10.0.6.tgz",
@@ -12794,15 +12641,6 @@
"integrity": "sha512-XISRgDJ2Tc5q4TRqvgJtzsRkFYNJzZrhTdtMoGVBttwzzQJkPnS3WWTFc7kuDRoPtPakl+T+OfdEUjYJj7Jbow==",
"dev": true
},
- "node_modules/@types/rimraf": {
- "version": "3.0.2",
- "resolved": "https://registry.npmjs.org/@types/rimraf/-/rimraf-3.0.2.tgz",
- "integrity": "sha512-F3OznnSLAUxFrCEu/L5PY8+ny8DtcFRjx7fZZ9bycvXRi3KPTRS9HOitGZwvPg0juRhXFWIeKX58cnX5YqLohQ==",
- "dependencies": {
- "@types/glob": "*",
- "@types/node": "*"
- }
- },
"node_modules/@types/semver": {
"version": "7.5.0",
"resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.5.0.tgz",
@@ -16506,6 +16344,7 @@
"version": "1.5.0",
"resolved": "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz",
"integrity": "sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==",
+ "license": "MIT",
"dependencies": {
"file-uri-to-path": "1.0.0"
}
@@ -17021,21 +16860,6 @@
"node": ">=10.12.0"
}
},
- "node_modules/c8/node_modules/rimraf": {
- "version": "3.0.2",
- "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz",
- "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==",
- "dev": true,
- "dependencies": {
- "glob": "^7.1.3"
- },
- "bin": {
- "rimraf": "bin.js"
- },
- "funding": {
- "url": "https://github.com/sponsors/isaacs"
- }
- },
"node_modules/c8/node_modules/yargs": {
"version": "16.2.0",
"resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz",
@@ -17259,17 +17083,6 @@
"resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz",
"integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw="
},
- "node_modules/catharsis": {
- "version": "0.9.0",
- "resolved": "https://registry.npmjs.org/catharsis/-/catharsis-0.9.0.tgz",
- "integrity": "sha512-prMTQVpcns/tzFgFVkVp6ak6RykZyWb3gu8ckUpd6YkTlacOd3DXGJjIpD4Q6zJirizvaiAjSSHlOsA+6sNh2A==",
- "dependencies": {
- "lodash": "^4.17.15"
- },
- "engines": {
- "node": ">= 10"
- }
- },
"node_modules/celebrate": {
"version": "15.0.3",
"resolved": "https://registry.npmjs.org/celebrate/-/celebrate-15.0.3.tgz",
@@ -20077,14 +19890,12 @@
}
},
"node_modules/detect-libc": {
- "version": "1.0.3",
- "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-1.0.3.tgz",
- "integrity": "sha1-+hN8S9aY7fVc1c0CrFWfkaTEups=",
- "bin": {
- "detect-libc": "bin/detect-libc.js"
- },
+ "version": "2.0.4",
+ "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.4.tgz",
+ "integrity": "sha512-3UDv+G9CsCKO1WKMGw9fwq/SWJYbI0c5Y7LU1AXYoDdbhE2AHQ6N6Nb34sG8Fj7T5APy8qXDCKuuIHd1BR0tVA==",
+ "license": "Apache-2.0",
"engines": {
- "node": ">=0.10"
+ "node": ">=8"
}
},
"node_modules/detect-node": {
@@ -20410,6 +20221,18 @@
"node": ">=0.10"
}
},
+ "node_modules/duplexify": {
+ "version": "4.1.3",
+ "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.3.tgz",
+ "integrity": "sha512-M3BmBhwJRZsSx38lZyhE53Csddgzl5R7xGJNk7CVddZD6CcmwMCH8J+7AprIrQKH7TonKxaCjcv27Qmf+sQ+oA==",
+ "license": "MIT",
+ "dependencies": {
+ "end-of-stream": "^1.4.1",
+ "inherits": "^2.0.3",
+ "readable-stream": "^3.1.1",
+ "stream-shift": "^1.0.2"
+ }
+ },
"node_modules/duration": {
"version": "0.2.2",
"resolved": "https://registry.npmjs.org/duration/-/duration-0.2.2.tgz",
@@ -23227,7 +23050,8 @@
"node_modules/file-uri-to-path": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz",
- "integrity": "sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw=="
+ "integrity": "sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==",
+ "license": "MIT"
},
"node_modules/filelist": {
"version": "1.0.4",
@@ -23448,6 +23272,7 @@
"version": "2.2.3",
"resolved": "https://registry.npmjs.org/findit2/-/findit2-2.2.3.tgz",
"integrity": "sha512-lg/Moejf4qXovVutL0Lz4IsaPoNYMuxt4PA0nGqFxnJ1CTTGGlEO2wKgoDpwknhvZ8k4Q2F+eesgkLbG2Mxfog==",
+ "license": "MIT",
"engines": {
"node": ">=0.8.22"
}
@@ -23473,20 +23298,6 @@
"node": "^10.12.0 || >=12.0.0"
}
},
- "node_modules/flat-cache/node_modules/rimraf": {
- "version": "3.0.2",
- "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz",
- "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==",
- "dependencies": {
- "glob": "^7.1.3"
- },
- "bin": {
- "rimraf": "bin.js"
- },
- "funding": {
- "url": "https://github.com/sponsors/isaacs"
- }
- },
"node_modules/flatted": {
"version": "3.3.1",
"resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.1.tgz",
@@ -24633,6 +24444,15 @@
"uuid": "dist/bin/uuid"
}
},
+ "node_modules/google-logging-utils": {
+ "version": "0.0.2",
+ "resolved": "https://registry.npmjs.org/google-logging-utils/-/google-logging-utils-0.0.2.tgz",
+ "integrity": "sha512-NEgUnEcBiP5HrPzufUkBzJOD/Sxsco3rLNo1F1TNf7ieU8ryUzBhqba8r756CjLX7rn3fHl6iLEwPYuqpoKgQQ==",
+ "license": "Apache-2.0",
+ "engines": {
+ "node": ">=14"
+ }
+ },
"node_modules/google-p12-pem": {
"version": "3.1.3",
"resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.1.3.tgz",
@@ -26707,11 +26527,6 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
- "node_modules/is-stream-ended": {
- "version": "0.1.4",
- "resolved": "https://registry.npmjs.org/is-stream-ended/-/is-stream-ended-0.1.4.tgz",
- "integrity": "sha512-xj0XPvmr7bQFTvirqnFr50o0hQIh6ZItDqloxt5aJrR4NQsYeSsyFQERYGCAzfindAcnKjINnwEEgLx4IqVzQw=="
- },
"node_modules/is-string": {
"version": "1.0.7",
"resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.7.tgz",
@@ -27312,14 +27127,6 @@
"js-yaml": "bin/js-yaml.js"
}
},
- "node_modules/js2xmlparser": {
- "version": "4.0.2",
- "resolved": "https://registry.npmjs.org/js2xmlparser/-/js2xmlparser-4.0.2.tgz",
- "integrity": "sha512-6n4D8gLlLf1n5mNLQPRfViYzu9RATblzPEtm1SthMX1Pjao0r9YI9nw7ZIfRxQMERS87mcswrg+r/OYrPRX6jA==",
- "dependencies": {
- "xmlcreate": "^2.0.4"
- }
- },
"node_modules/jsbn": {
"version": "0.1.1",
"resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz",
@@ -27391,34 +27198,6 @@
"node": ">=6.0.0"
}
},
- "node_modules/jsdoc": {
- "version": "4.0.3",
- "resolved": "https://registry.npmjs.org/jsdoc/-/jsdoc-4.0.3.tgz",
- "integrity": "sha512-Nu7Sf35kXJ1MWDZIMAuATRQTg1iIPdzh7tqJ6jjvaU/GfDf+qi5UV8zJR3Mo+/pYFvm8mzay4+6O5EWigaQBQw==",
- "dependencies": {
- "@babel/parser": "^7.20.15",
- "@jsdoc/salty": "^0.2.1",
- "@types/markdown-it": "^14.1.1",
- "bluebird": "^3.7.2",
- "catharsis": "^0.9.0",
- "escape-string-regexp": "^2.0.0",
- "js2xmlparser": "^4.0.2",
- "klaw": "^3.0.0",
- "markdown-it": "^14.1.0",
- "markdown-it-anchor": "^8.6.7",
- "marked": "^4.0.10",
- "mkdirp": "^1.0.4",
- "requizzle": "^0.2.3",
- "strip-json-comments": "^3.1.0",
- "underscore": "~1.13.2"
- },
- "bin": {
- "jsdoc": "jsdoc.js"
- },
- "engines": {
- "node": ">=12.0.0"
- }
- },
"node_modules/jsdoc-type-pratt-parser": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/jsdoc-type-pratt-parser/-/jsdoc-type-pratt-parser-4.1.0.tgz",
@@ -27429,25 +27208,6 @@
"node": ">=12.0.0"
}
},
- "node_modules/jsdoc/node_modules/escape-string-regexp": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz",
- "integrity": "sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==",
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/jsdoc/node_modules/mkdirp": {
- "version": "1.0.4",
- "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz",
- "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==",
- "bin": {
- "mkdirp": "bin/cmd.js"
- },
- "engines": {
- "node": ">=10"
- }
- },
"node_modules/jsdom": {
"version": "19.0.0",
"resolved": "https://registry.npmjs.org/jsdom/-/jsdom-19.0.0.tgz",
@@ -27909,14 +27669,6 @@
"node": ">=0.10.0"
}
},
- "node_modules/klaw": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/klaw/-/klaw-3.0.0.tgz",
- "integrity": "sha512-0Fo5oir+O9jnXu5EefYbVK+mHMBeEVEy2cmctR1O1NECcCkPRreJKrS6Qt/j3KC2C148Dfo9i3pCmCMsdqGr0g==",
- "dependencies": {
- "graceful-fs": "^4.1.9"
- }
- },
"node_modules/klaw-sync": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/klaw-sync/-/klaw-sync-6.0.0.tgz",
@@ -28302,19 +28054,6 @@
"integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==",
"dev": true
},
- "node_modules/linkify-it": {
- "version": "5.0.0",
- "resolved": "https://registry.npmjs.org/linkify-it/-/linkify-it-5.0.0.tgz",
- "integrity": "sha512-5aHCbzQRADcdP+ATqnDuhhJ/MRIqDkZX5pyjFHRRysS8vZ5AbqGEoFIb6pYHPZ+L/OC2Lc+xT8uHVVR5CAK/wQ==",
- "dependencies": {
- "uc.micro": "^2.0.0"
- }
- },
- "node_modules/linkify-it/node_modules/uc.micro": {
- "version": "2.1.0",
- "resolved": "https://registry.npmjs.org/uc.micro/-/uc.micro-2.1.0.tgz",
- "integrity": "sha512-ARDJmphmdvUk6Glw7y9DQ2bFkKBHwQHLi2lsaH6PPmz/Ka9sFOBsBluozhDltWmnv9u/cF6Rt87znRTPV+yp/A=="
- },
"node_modules/listr2": {
"version": "3.14.0",
"resolved": "https://registry.npmjs.org/listr2/-/listr2-3.14.0.tgz",
@@ -28723,6 +28462,12 @@
"resolved": "https://registry.npmjs.org/lodash.once/-/lodash.once-4.1.1.tgz",
"integrity": "sha1-DdOXEhPHxW34gJd9UEyI+0cal6w="
},
+ "node_modules/lodash.sortby": {
+ "version": "4.7.0",
+ "resolved": "https://registry.npmjs.org/lodash.sortby/-/lodash.sortby-4.7.0.tgz",
+ "integrity": "sha512-HDWXG8isMntAyRF5vZ7xKuEvOhT4AhlRt/3czTSjvGUxjYCBVRQY48ViDHyfYz9VIoBkW4TMGQNapx+l3RUwdA==",
+ "license": "MIT"
+ },
"node_modules/lodash.support": {
"version": "2.4.1",
"resolved": "https://registry.npmjs.org/lodash.support/-/lodash.support-2.4.1.tgz",
@@ -29088,47 +28833,6 @@
"node": ">=0.10.0"
}
},
- "node_modules/markdown-it": {
- "version": "14.1.0",
- "resolved": "https://registry.npmjs.org/markdown-it/-/markdown-it-14.1.0.tgz",
- "integrity": "sha512-a54IwgWPaeBCAAsv13YgmALOF1elABB08FxO9i+r4VFk5Vl4pKokRPeX8u5TCgSsPi6ec1otfLjdOpVcgbpshg==",
- "dependencies": {
- "argparse": "^2.0.1",
- "entities": "^4.4.0",
- "linkify-it": "^5.0.0",
- "mdurl": "^2.0.0",
- "punycode.js": "^2.3.1",
- "uc.micro": "^2.1.0"
- },
- "bin": {
- "markdown-it": "bin/markdown-it.mjs"
- }
- },
- "node_modules/markdown-it-anchor": {
- "version": "8.6.7",
- "resolved": "https://registry.npmjs.org/markdown-it-anchor/-/markdown-it-anchor-8.6.7.tgz",
- "integrity": "sha512-FlCHFwNnutLgVTflOYHPW2pPcl2AACqVzExlkGQNsi4CJgqOHN7YTgDd4LuhgN1BFO3TS0vLAruV1Td6dwWPJA==",
- "peerDependencies": {
- "@types/markdown-it": "*",
- "markdown-it": "*"
- }
- },
- "node_modules/markdown-it/node_modules/entities": {
- "version": "4.5.0",
- "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz",
- "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==",
- "engines": {
- "node": ">=0.12"
- },
- "funding": {
- "url": "https://github.com/fb55/entities?sponsor=1"
- }
- },
- "node_modules/markdown-it/node_modules/uc.micro": {
- "version": "2.1.0",
- "resolved": "https://registry.npmjs.org/uc.micro/-/uc.micro-2.1.0.tgz",
- "integrity": "sha512-ARDJmphmdvUk6Glw7y9DQ2bFkKBHwQHLi2lsaH6PPmz/Ka9sFOBsBluozhDltWmnv9u/cF6Rt87znRTPV+yp/A=="
- },
"node_modules/marked": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/marked/-/marked-4.1.0.tgz",
@@ -29202,11 +28906,6 @@
"integrity": "sha512-GaqWWShW4kv/G9IEucWScBx9G1/vsFZZJUO+tD26M8J8z3Kw5RDQjaoZe03YAClgeS/SWPOcb4nkFBTEi5DUEA==",
"dev": true
},
- "node_modules/mdurl": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/mdurl/-/mdurl-2.0.0.tgz",
- "integrity": "sha512-Lf+9+2r+Tdp5wXDXC4PcIBjTDtq4UKjCPMQhKIuzpJNW0b96kVqSwW0bT7FhRSfmAiFYgP+SCRvdrDozfh0U5w=="
- },
"node_modules/media-typer": {
"version": "0.3.0",
"resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz",
@@ -30603,9 +30302,10 @@
"license": "MIT"
},
"node_modules/nan": {
- "version": "2.17.0",
- "resolved": "https://registry.npmjs.org/nan/-/nan-2.17.0.tgz",
- "integrity": "sha512-2ZTgtl0nJsO0KQCjEpxcIr5D+Yv90plTitZt9JBfQvVJDS5seMl3FOvsh3+9CoYWXf/1l5OaZzzF6nDm4cagaQ=="
+ "version": "2.22.2",
+ "resolved": "https://registry.npmjs.org/nan/-/nan-2.22.2.tgz",
+ "integrity": "sha512-DANghxFkS1plDdRsX0X9pm0Z6SJNN6gBdtXfanwoZ8hooC5gosGFSBGRYHUVPz1asKA/kMRqDRdHrluZ61SpBQ==",
+ "license": "MIT"
},
"node_modules/nanoclone": {
"version": "0.2.1",
@@ -30876,16 +30576,6 @@
"node-gyp-build-optional-packages-test": "build-test.js"
}
},
- "node_modules/node-gyp-build-optional-packages/node_modules/detect-libc": {
- "version": "2.0.3",
- "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.3.tgz",
- "integrity": "sha512-bwy0MGW55bG41VqxxypOsdSdGqLwXPI/focwgTYCFMbdUiBAxLg9CFzG08sz2aqzknwiX7Hkl0bQENjg8iLByw==",
- "license": "Apache-2.0",
- "optional": true,
- "engines": {
- "node": ">=8"
- }
- },
"node_modules/node-int64": {
"version": "0.4.0",
"resolved": "https://registry.npmjs.org/node-int64/-/node-int64-0.4.0.tgz",
@@ -31483,53 +31173,6 @@
"node": ">=0.10"
}
},
- "node_modules/optionator": {
- "version": "0.8.3",
- "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.8.3.tgz",
- "integrity": "sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA==",
- "dependencies": {
- "deep-is": "~0.1.3",
- "fast-levenshtein": "~2.0.6",
- "levn": "~0.3.0",
- "prelude-ls": "~1.1.2",
- "type-check": "~0.3.2",
- "word-wrap": "~1.2.3"
- },
- "engines": {
- "node": ">= 0.8.0"
- }
- },
- "node_modules/optionator/node_modules/levn": {
- "version": "0.3.0",
- "resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz",
- "integrity": "sha512-0OO4y2iOHix2W6ujICbKIaEQXvFQHue65vUG3pb5EUomzPI90z9hsA1VsO/dbIIpC53J8gxM9Q4Oho0jrCM/yA==",
- "dependencies": {
- "prelude-ls": "~1.1.2",
- "type-check": "~0.3.2"
- },
- "engines": {
- "node": ">= 0.8.0"
- }
- },
- "node_modules/optionator/node_modules/prelude-ls": {
- "version": "1.1.2",
- "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.1.2.tgz",
- "integrity": "sha512-ESF23V4SKG6lVSGZgYNpbsiaAkdab6ZgOxe52p7+Kid3W3u3bxR4Vfd/o21dmN7jSt0IwgZ4v5MUd26FEtXE9w==",
- "engines": {
- "node": ">= 0.8.0"
- }
- },
- "node_modules/optionator/node_modules/type-check": {
- "version": "0.3.2",
- "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.3.2.tgz",
- "integrity": "sha512-ZCmOJdvOWDBYJlzAoFkC+Q0+bUyEOS1ltgp1MGU03fqHG+dbi9tBFU2Rd9QKiDZFAYrhPh2JUf7rZRIuHRKtOg==",
- "dependencies": {
- "prelude-ls": "~1.1.2"
- },
- "engines": {
- "node": ">= 0.8.0"
- }
- },
"node_modules/options": {
"version": "0.0.6",
"resolved": "https://registry.npmjs.org/options/-/options-0.0.6.tgz",
@@ -31780,11 +31423,6 @@
"node": ">=8"
}
},
- "node_modules/parse-duration": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/parse-duration/-/parse-duration-1.1.0.tgz",
- "integrity": "sha512-z6t9dvSJYaPoQq7quMzdEagSFtpGu+utzHqqxmpVWNNZRIXnvqyCvn9XsTdh7c/w0Bqmdz3RB3YnRaKtpRtEXQ=="
- },
"node_modules/parse-json": {
"version": "5.2.0",
"resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz",
@@ -33928,41 +33566,34 @@
}
},
"node_modules/pprof": {
- "version": "3.2.1",
- "resolved": "https://registry.npmjs.org/pprof/-/pprof-3.2.1.tgz",
- "integrity": "sha512-KnextTM3EHQ2zqN8fUjB0VpE+njcVR7cOfo7DjJSLKzIbKTPelDtokI04ScR/Vd8CLDj+M99tsaKV+K6FHzpzA==",
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/pprof/-/pprof-4.0.0.tgz",
+ "integrity": "sha512-Yhfk7Y0G1MYsy97oXxmSG5nvbM1sCz9EALiNhW/isAv5Xf7svzP+1RfGeBlS6mLSgRJvgSLh6Mi5DaisQuPttw==",
"hasInstallScript": true,
+ "license": "Apache-2.0",
"dependencies": {
- "@mapbox/node-pre-gyp": "^1.0.0",
+ "@mapbox/node-pre-gyp": "^1.0.9",
"bindings": "^1.2.1",
"delay": "^5.0.0",
"findit2": "^2.2.3",
- "nan": "^2.14.0",
+ "nan": "^2.17.0",
"p-limit": "^3.0.0",
- "pify": "^5.0.0",
"protobufjs": "~7.2.4",
- "source-map": "^0.7.3",
+ "source-map": "~0.8.0-beta.0",
"split": "^1.0.1"
},
"engines": {
- "node": ">=10.4.1"
- }
- },
- "node_modules/pprof/node_modules/pify": {
- "version": "5.0.0",
- "resolved": "https://registry.npmjs.org/pify/-/pify-5.0.0.tgz",
- "integrity": "sha512-eW/gHNMlxdSP6dmG6uJip6FXN0EQBwm2clYYd8Wul42Cwu/DK8HEftzsapcNdYe2MfLiIwZqsDk2RDEsTE79hA==",
- "engines": {
- "node": ">=10"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
+ "node": ">=14.0.0"
}
},
"node_modules/pprof/node_modules/source-map": {
- "version": "0.7.4",
- "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.4.tgz",
- "integrity": "sha512-l3BikUxvPOcn5E74dZiq5BGsTb5yEwhaTSzccU6t4sDOH8NWJCstKO5QT2CvtFoK6F0saL7p9xHAqHOlCPJygA==",
+ "version": "0.8.0-beta.0",
+ "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.8.0-beta.0.tgz",
+ "integrity": "sha512-2ymg6oRBpebeZi9UUNsgQ89bhx01TcTkmNTGnNO88imTmbSgy4nfujrgVEFKWpMTEGA11EDkTt7mqObTPdigIA==",
+ "license": "BSD-3-Clause",
+ "dependencies": {
+ "whatwg-url": "^7.0.0"
+ },
"engines": {
"node": ">= 8"
}
@@ -34243,17 +33874,6 @@
"integrity": "sha512-vtK/94akxsTMhe0/cbfpR+syPuszcuwhqVjJq26CuNDgFGj682oRBXOP5MJpv2r7JtE8MsiepGIqvvOTBwn2vA==",
"dev": true
},
- "node_modules/proto3-json-serializer": {
- "version": "1.1.1",
- "resolved": "https://registry.npmjs.org/proto3-json-serializer/-/proto3-json-serializer-1.1.1.tgz",
- "integrity": "sha512-AwAuY4g9nxx0u52DnSMkqqgyLHaW/XaPLtaAo3y/ZCfeaQB/g4YDH4kb8Wc/mWzWvu0YjOznVnfn373MVZZrgw==",
- "dependencies": {
- "protobufjs": "^7.0.0"
- },
- "engines": {
- "node": ">=12.0.0"
- }
- },
"node_modules/protobufjs": {
"version": "7.2.5",
"resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-7.2.5.tgz",
@@ -34277,151 +33897,6 @@
"node": ">=12.0.0"
}
},
- "node_modules/protobufjs-cli": {
- "version": "1.1.1",
- "resolved": "https://registry.npmjs.org/protobufjs-cli/-/protobufjs-cli-1.1.1.tgz",
- "integrity": "sha512-VPWMgIcRNyQwWUv8OLPyGQ/0lQY/QTQAVN5fh+XzfDwsVw1FZ2L3DM/bcBf8WPiRz2tNpaov9lPZfNcmNo6LXA==",
- "dependencies": {
- "chalk": "^4.0.0",
- "escodegen": "^1.13.0",
- "espree": "^9.0.0",
- "estraverse": "^5.1.0",
- "glob": "^8.0.0",
- "jsdoc": "^4.0.0",
- "minimist": "^1.2.0",
- "semver": "^7.1.2",
- "tmp": "^0.2.1",
- "uglify-js": "^3.7.7"
- },
- "bin": {
- "pbjs": "bin/pbjs",
- "pbts": "bin/pbts"
- },
- "engines": {
- "node": ">=12.0.0"
- },
- "peerDependencies": {
- "protobufjs": "^7.0.0"
- }
- },
- "node_modules/protobufjs-cli/node_modules/ansi-styles": {
- "version": "4.3.0",
- "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
- "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
- "dependencies": {
- "color-convert": "^2.0.1"
- },
- "engines": {
- "node": ">=8"
- },
- "funding": {
- "url": "https://github.com/chalk/ansi-styles?sponsor=1"
- }
- },
- "node_modules/protobufjs-cli/node_modules/brace-expansion": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz",
- "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==",
- "dependencies": {
- "balanced-match": "^1.0.0"
- }
- },
- "node_modules/protobufjs-cli/node_modules/chalk": {
- "version": "4.1.2",
- "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz",
- "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==",
- "dependencies": {
- "ansi-styles": "^4.1.0",
- "supports-color": "^7.1.0"
- },
- "engines": {
- "node": ">=10"
- },
- "funding": {
- "url": "https://github.com/chalk/chalk?sponsor=1"
- }
- },
- "node_modules/protobufjs-cli/node_modules/escodegen": {
- "version": "1.14.3",
- "resolved": "https://registry.npmjs.org/escodegen/-/escodegen-1.14.3.tgz",
- "integrity": "sha512-qFcX0XJkdg+PB3xjZZG/wKSuT1PnQWx57+TVSjIMmILd2yC/6ByYElPwJnslDsuWuSAp4AwJGumarAAmJch5Kw==",
- "dependencies": {
- "esprima": "^4.0.1",
- "estraverse": "^4.2.0",
- "esutils": "^2.0.2",
- "optionator": "^0.8.1"
- },
- "bin": {
- "escodegen": "bin/escodegen.js",
- "esgenerate": "bin/esgenerate.js"
- },
- "engines": {
- "node": ">=4.0"
- },
- "optionalDependencies": {
- "source-map": "~0.6.1"
- }
- },
- "node_modules/protobufjs-cli/node_modules/escodegen/node_modules/estraverse": {
- "version": "4.3.0",
- "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz",
- "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==",
- "engines": {
- "node": ">=4.0"
- }
- },
- "node_modules/protobufjs-cli/node_modules/glob": {
- "version": "8.1.0",
- "resolved": "https://registry.npmjs.org/glob/-/glob-8.1.0.tgz",
- "integrity": "sha512-r8hpEjiQEYlF2QU0df3dS+nxxSIreXQS1qRhMJM0Q5NDdR386C7jb7Hwwod8Fgiuex+k0GFjgft18yvxm5XoCQ==",
- "deprecated": "Glob versions prior to v9 are no longer supported",
- "dependencies": {
- "fs.realpath": "^1.0.0",
- "inflight": "^1.0.4",
- "inherits": "2",
- "minimatch": "^5.0.1",
- "once": "^1.3.0"
- },
- "engines": {
- "node": ">=12"
- },
- "funding": {
- "url": "https://github.com/sponsors/isaacs"
- }
- },
- "node_modules/protobufjs-cli/node_modules/minimatch": {
- "version": "5.1.6",
- "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz",
- "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==",
- "dependencies": {
- "brace-expansion": "^2.0.1"
- },
- "engines": {
- "node": ">=10"
- }
- },
- "node_modules/protobufjs-cli/node_modules/semver": {
- "version": "7.6.2",
- "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.2.tgz",
- "integrity": "sha512-FNAIBWCx9qcRhoHcgcJ0gvU7SN1lYU2ZXuSfl04bSC5OpvDHFyJCjdNHomPXxjQlCBU67YW64PzY7/VIEH7F2w==",
- "bin": {
- "semver": "bin/semver.js"
- },
- "engines": {
- "node": ">=10"
- }
- },
- "node_modules/protobufjs-cli/node_modules/supports-color": {
- "version": "7.2.0",
- "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz",
- "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==",
- "dependencies": {
- "has-flag": "^4.0.0"
- },
- "engines": {
- "node": ">=8"
- }
- },
"node_modules/proxy-addr": {
"version": "2.0.7",
"resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz",
@@ -34594,17 +34069,6 @@
"pump": "^3.0.0"
}
},
- "node_modules/pumpify/node_modules/duplexify": {
- "version": "4.1.2",
- "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.2.tgz",
- "integrity": "sha512-fz3OjcNCHmRP12MJoZMPglx8m4rrFP8rovnk4vT8Fs+aonZoCwGg10dSsQsfP/E62eZcPTMSMP6686fu9Qlqtw==",
- "dependencies": {
- "end-of-stream": "^1.4.1",
- "inherits": "^2.0.3",
- "readable-stream": "^3.1.1",
- "stream-shift": "^1.0.0"
- }
- },
"node_modules/punycode": {
"version": "2.3.1",
"resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz",
@@ -34613,14 +34077,6 @@
"node": ">=6"
}
},
- "node_modules/punycode.js": {
- "version": "2.3.1",
- "resolved": "https://registry.npmjs.org/punycode.js/-/punycode.js-2.3.1.tgz",
- "integrity": "sha512-uxFIHU0YlHYhDQtV4R9J6a52SLx28BCjT+4ieh7IGbgwVJWO+km431c4yRlREUAsAmt/uMjQUyQHNEPf0M39CA==",
- "engines": {
- "node": ">=6"
- }
- },
"node_modules/qrcode": {
"version": "1.5.0",
"resolved": "https://registry.npmjs.org/qrcode/-/qrcode-1.5.0.tgz",
@@ -36012,14 +35468,6 @@
"resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz",
"integrity": "sha1-kl0mAdOaxIXgkc8NpcbmlNw9yv8="
},
- "node_modules/requizzle": {
- "version": "0.2.4",
- "resolved": "https://registry.npmjs.org/requizzle/-/requizzle-0.2.4.tgz",
- "integrity": "sha512-JRrFk1D4OQ4SqovXOgdav+K8EAhSB/LJZqCz8tbX0KObcdeM15Ss59ozWMBWmmINMagCwmqn4ZNryUGpBsl6Jw==",
- "dependencies": {
- "lodash": "^4.17.21"
- }
- },
"node_modules/reselect": {
"version": "4.1.8",
"resolved": "https://registry.npmjs.org/reselect/-/reselect-4.1.8.tgz",
@@ -36159,6 +35607,22 @@
"integrity": "sha512-V2hovdzFbOi77/WajaSMXk2OLm+xNIeQdMMuB7icj7bk6zi2F8GGAxigcnDFpJHbNyNcgyJDiP+8nOrY5cZGrA==",
"dev": true
},
+ "node_modules/rimraf": {
+ "version": "3.0.2",
+ "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz",
+ "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==",
+ "deprecated": "Rimraf versions prior to v4 are no longer supported",
+ "license": "ISC",
+ "dependencies": {
+ "glob": "^7.1.3"
+ },
+ "bin": {
+ "rimraf": "bin.js"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/isaacs"
+ }
+ },
"node_modules/rndm": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/rndm/-/rndm-1.2.0.tgz",
@@ -37805,6 +37269,7 @@
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/split/-/split-1.0.1.tgz",
"integrity": "sha512-mTyOoPbrivtXnwnIxZRFYRrPNtEFKlpB2fvjSnCQUiAA6qAZzqwna5envK4uk6OIeP17CsdF3rSBGYVBsU0Tkg==",
+ "license": "MIT",
"dependencies": {
"through": "2"
},
@@ -39755,6 +39220,7 @@
"version": "0.2.3",
"resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.3.tgz",
"integrity": "sha512-nZD7m9iCPC5g0pYmcaxogYKggSfLsdxl8of3Q/oIbqCqLLIO9IAF0GWjX1z9NZRHPiXv8Wex4yDCaZsgEw0Y8w==",
+ "dev": true,
"engines": {
"node": ">=14.14"
}
@@ -39938,6 +39404,15 @@
"node": ">= 4.0.0"
}
},
+ "node_modules/tr46": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/tr46/-/tr46-1.0.1.tgz",
+ "integrity": "sha512-dTpowEjclQ7Kgx5SdBkqRzVhERQXov8/l9Ft9dVM9fmg0W0KQSVaXX9T4i6twCPNtYiZM53lpSSUAwJbFPOHxA==",
+ "license": "MIT",
+ "dependencies": {
+ "punycode": "^2.1.0"
+ }
+ },
"node_modules/traverse": {
"version": "0.6.9",
"resolved": "https://registry.npmjs.org/traverse/-/traverse-0.6.9.tgz",
@@ -40236,6 +39711,8 @@
"version": "3.15.0",
"resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.15.0.tgz",
"integrity": "sha512-x+xdeDWq7FiORDvyIJ0q/waWd4PhjBNOm5dQUOq2AKC0IEjxOS66Ha9tctiVDGcRQuh69K7fgU5oRuTK4cysSg==",
+ "dev": true,
+ "optional": true,
"bin": {
"uglifyjs": "bin/uglifyjs"
},
@@ -41937,6 +41414,23 @@
"node": ">=12"
}
},
+ "node_modules/whatwg-url": {
+ "version": "7.1.0",
+ "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-7.1.0.tgz",
+ "integrity": "sha512-WUu7Rg1DroM7oQvGWfOiAK21n74Gg+T4elXEQYkOhtyLeWiJFoOGLXPKI/9gzIie9CtwVLm8wtw6YJdKyxSjeg==",
+ "license": "MIT",
+ "dependencies": {
+ "lodash.sortby": "^4.7.0",
+ "tr46": "^1.0.1",
+ "webidl-conversions": "^4.0.2"
+ }
+ },
+ "node_modules/whatwg-url/node_modules/webidl-conversions": {
+ "version": "4.0.2",
+ "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-4.0.2.tgz",
+ "integrity": "sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg==",
+ "license": "BSD-2-Clause"
+ },
"node_modules/which": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz",
@@ -42103,6 +41597,7 @@
"version": "1.2.5",
"resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz",
"integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==",
+ "dev": true,
"engines": {
"node": ">=0.10.0"
}
@@ -42335,11 +41830,6 @@
"integrity": "sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw==",
"dev": true
},
- "node_modules/xmlcreate": {
- "version": "2.0.4",
- "resolved": "https://registry.npmjs.org/xmlcreate/-/xmlcreate-2.0.4.tgz",
- "integrity": "sha512-nquOebG4sngPmGPICTS5EnxqhKbCmz5Ox5hsszI2T6U5qdrJizBc+0ilYSEjTSzU0yZcmvppztXe/5Al5fUwdg=="
- },
"node_modules/xmlhttprequest": {
"version": "1.8.0",
"resolved": "https://registry.npmjs.org/xmlhttprequest/-/xmlhttprequest-1.8.0.tgz",
@@ -42833,13 +42323,6 @@
"tar-stream": "^2.1.4"
}
},
- "services/clsi/node_modules/nan": {
- "version": "2.22.2",
- "resolved": "https://registry.npmjs.org/nan/-/nan-2.22.2.tgz",
- "integrity": "sha512-DANghxFkS1plDdRsX0X9pm0Z6SJNN6gBdtXfanwoZ8hooC5gosGFSBGRYHUVPz1asKA/kMRqDRdHrluZ61SpBQ==",
- "license": "MIT",
- "optional": true
- },
"services/clsi/node_modules/protobufjs": {
"version": "7.4.0",
"resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-7.4.0.tgz",
@@ -43667,197 +43150,6 @@
"@babel/highlight": "^7.10.4"
}
},
- "services/latexqc/node_modules/@babel/core": {
- "version": "7.26.10",
- "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.26.10.tgz",
- "integrity": "sha512-vMqyb7XCDMPvJFFOaT9kxtiRh42GwlZEg1/uIgtZshS5a/8OaduUfCi7kynKgc3Tw/6Uo2D+db9qBttghhmxwQ==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@ampproject/remapping": "^2.2.0",
- "@babel/code-frame": "^7.26.2",
- "@babel/generator": "^7.26.10",
- "@babel/helper-compilation-targets": "^7.26.5",
- "@babel/helper-module-transforms": "^7.26.0",
- "@babel/helpers": "^7.26.10",
- "@babel/parser": "^7.26.10",
- "@babel/template": "^7.26.9",
- "@babel/traverse": "^7.26.10",
- "@babel/types": "^7.26.10",
- "convert-source-map": "^2.0.0",
- "debug": "^4.1.0",
- "gensync": "^1.0.0-beta.2",
- "json5": "^2.2.3",
- "semver": "^6.3.1"
- },
- "engines": {
- "node": ">=6.9.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/babel"
- }
- },
- "services/latexqc/node_modules/@babel/core/node_modules/@babel/code-frame": {
- "version": "7.26.2",
- "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.26.2.tgz",
- "integrity": "sha512-RJlIHRueQgwWitWgF8OdFYGZX328Ax5BCemNGlqHfplnRT9ESi8JkFlvaVYbS+UubVY6dpv87Fs2u5M29iNFVQ==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@babel/helper-validator-identifier": "^7.25.9",
- "js-tokens": "^4.0.0",
- "picocolors": "^1.0.0"
- },
- "engines": {
- "node": ">=6.9.0"
- }
- },
- "services/latexqc/node_modules/@babel/core/node_modules/semver": {
- "version": "6.3.1",
- "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
- "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
- "dev": true,
- "license": "ISC",
- "bin": {
- "semver": "bin/semver.js"
- }
- },
- "services/latexqc/node_modules/@babel/generator": {
- "version": "7.27.0",
- "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.27.0.tgz",
- "integrity": "sha512-VybsKvpiN1gU1sdMZIp7FcqphVVKEwcuj02x73uvcHE0PTihx1nlBcowYWhDwjpoAXRv43+gDzyggGnn1XZhVw==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@babel/parser": "^7.27.0",
- "@babel/types": "^7.27.0",
- "@jridgewell/gen-mapping": "^0.3.5",
- "@jridgewell/trace-mapping": "^0.3.25",
- "jsesc": "^3.0.2"
- },
- "engines": {
- "node": ">=6.9.0"
- }
- },
- "services/latexqc/node_modules/@babel/helpers": {
- "version": "7.27.0",
- "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.27.0.tgz",
- "integrity": "sha512-U5eyP/CTFPuNE3qk+WZMxFkp/4zUzdceQlfzf7DdGdhp+Fezd7HD+i8Y24ZuTMKX3wQBld449jijbGq6OdGNQg==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@babel/template": "^7.27.0",
- "@babel/types": "^7.27.0"
- },
- "engines": {
- "node": ">=6.9.0"
- }
- },
- "services/latexqc/node_modules/@babel/parser": {
- "version": "7.27.0",
- "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.27.0.tgz",
- "integrity": "sha512-iaepho73/2Pz7w2eMS0Q5f83+0RKI7i4xmiYeBmDzfRVbQtTOG7Ts0S4HzJVsTMGI9keU8rNfuZr8DKfSt7Yyg==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@babel/types": "^7.27.0"
- },
- "bin": {
- "parser": "bin/babel-parser.js"
- },
- "engines": {
- "node": ">=6.0.0"
- }
- },
- "services/latexqc/node_modules/@babel/template": {
- "version": "7.27.0",
- "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.27.0.tgz",
- "integrity": "sha512-2ncevenBqXI6qRMukPlXwHKHchC7RyMuu4xv5JBXRfOGVcTy1mXCD12qrp7Jsoxll1EV3+9sE4GugBVRjT2jFA==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@babel/code-frame": "^7.26.2",
- "@babel/parser": "^7.27.0",
- "@babel/types": "^7.27.0"
- },
- "engines": {
- "node": ">=6.9.0"
- }
- },
- "services/latexqc/node_modules/@babel/template/node_modules/@babel/code-frame": {
- "version": "7.26.2",
- "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.26.2.tgz",
- "integrity": "sha512-RJlIHRueQgwWitWgF8OdFYGZX328Ax5BCemNGlqHfplnRT9ESi8JkFlvaVYbS+UubVY6dpv87Fs2u5M29iNFVQ==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@babel/helper-validator-identifier": "^7.25.9",
- "js-tokens": "^4.0.0",
- "picocolors": "^1.0.0"
- },
- "engines": {
- "node": ">=6.9.0"
- }
- },
- "services/latexqc/node_modules/@babel/traverse": {
- "version": "7.27.0",
- "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.27.0.tgz",
- "integrity": "sha512-19lYZFzYVQkkHkl4Cy4WrAVcqBkgvV2YM2TU3xG6DIwO7O3ecbDPfW3yM3bjAGcqcQHi+CCtjMR3dIEHxsd6bA==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@babel/code-frame": "^7.26.2",
- "@babel/generator": "^7.27.0",
- "@babel/parser": "^7.27.0",
- "@babel/template": "^7.27.0",
- "@babel/types": "^7.27.0",
- "debug": "^4.3.1",
- "globals": "^11.1.0"
- },
- "engines": {
- "node": ">=6.9.0"
- }
- },
- "services/latexqc/node_modules/@babel/traverse/node_modules/@babel/code-frame": {
- "version": "7.26.2",
- "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.26.2.tgz",
- "integrity": "sha512-RJlIHRueQgwWitWgF8OdFYGZX328Ax5BCemNGlqHfplnRT9ESi8JkFlvaVYbS+UubVY6dpv87Fs2u5M29iNFVQ==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@babel/helper-validator-identifier": "^7.25.9",
- "js-tokens": "^4.0.0",
- "picocolors": "^1.0.0"
- },
- "engines": {
- "node": ">=6.9.0"
- }
- },
- "services/latexqc/node_modules/@babel/traverse/node_modules/globals": {
- "version": "11.12.0",
- "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz",
- "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=4"
- }
- },
- "services/latexqc/node_modules/@babel/types": {
- "version": "7.27.0",
- "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.27.0.tgz",
- "integrity": "sha512-H45s8fVLYjbhFH62dIJ3WtmJ6RSPt/3DRO0ZcT2SUiYiQyz3BLVb9ADEnLl91m74aQPS3AzzeajZHYOalWe3bg==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@babel/helper-string-parser": "^7.25.9",
- "@babel/helper-validator-identifier": "^7.25.9"
- },
- "engines": {
- "node": ">=6.9.0"
- }
- },
"services/latexqc/node_modules/@eslint/eslintrc": {
"version": "0.4.3",
"resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-0.4.3.tgz",
@@ -44167,13 +43459,6 @@
"node": ">= 16"
}
},
- "services/latexqc/node_modules/convert-source-map": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz",
- "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==",
- "dev": true,
- "license": "MIT"
- },
"services/latexqc/node_modules/debug": {
"version": "4.4.0",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz",
From e3dd47ba6e3e1bc644677df69dfed9819b29ecfe Mon Sep 17 00:00:00 2001
From: Antoine Clausse
Date: Tue, 6 May 2025 16:19:01 +0200
Subject: [PATCH 018/194] [web] Fix date format in emails.createdAt, use `new
Date()` instead of `Date.now()` (#25322)
GitOrigin-RevId: c94700accb1df902926779c1e6321be63cf65235
---
services/web/scripts/remove_emails_with_commas.mjs | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/services/web/scripts/remove_emails_with_commas.mjs b/services/web/scripts/remove_emails_with_commas.mjs
index 29d78b129c..f6f107edac 100644
--- a/services/web/scripts/remove_emails_with_commas.mjs
+++ b/services/web/scripts/remove_emails_with_commas.mjs
@@ -82,7 +82,7 @@ async function consumeCsvFileAndUpdate() {
$addToSet: {
emails: {
email: newEmail,
- createdAt: Date.now(),
+ createdAt: new Date(),
reversedHostname: 'moc.faelrevo',
},
},
From bc4c3c4ef8235446c2488e45638f3d391518c7fd Mon Sep 17 00:00:00 2001
From: Antoine Clausse
Date: Tue, 6 May 2025 16:34:13 +0200
Subject: [PATCH 019/194] [web] Promisify ClsiCookieManager and
CompileController (reapply again) (#25280)
* Reapply "[web] Promisify ClsiCookieManager and CompileController (reapply and fix)"
This reverts commit 98cb9127ff2b7c7c347c560766f749265d712490.
* Fix: Use query parameters correctly (!!)
* Add unit test on `checkIsLoadSheddingEvent`
* Remove interference between tests: rename to `ClsiCookieManager2` when it's re-sandboxed
* Add test: 'should report "cycle" when other is UP'
GitOrigin-RevId: 3146b149954b908830226cb03b51d9adfa08ec2e
---
.../src/Features/Compile/ClsiCookieManager.js | 402 ++++----
.../src/Features/Compile/CompileController.js | 876 ++++++++----------
services/web/app/src/router.mjs | 4 +-
.../src/Compile/ClsiCookieManagerTests.js | 346 +++----
.../src/Compile/CompileControllerTests.js | 371 ++++----
5 files changed, 940 insertions(+), 1059 deletions(-)
diff --git a/services/web/app/src/Features/Compile/ClsiCookieManager.js b/services/web/app/src/Features/Compile/ClsiCookieManager.js
index fc542fefaf..a1ac0741b9 100644
--- a/services/web/app/src/Features/Compile/ClsiCookieManager.js
+++ b/services/web/app/src/Features/Compile/ClsiCookieManager.js
@@ -1,12 +1,15 @@
const { URL, URLSearchParams } = require('url')
const OError = require('@overleaf/o-error')
const Settings = require('@overleaf/settings')
-const request = require('request').defaults({ timeout: 30 * 1000 })
+const {
+ fetchNothing,
+ fetchStringWithResponse,
+ RequestFailedError,
+} = require('@overleaf/fetch-utils')
const RedisWrapper = require('../../infrastructure/RedisWrapper')
const Cookie = require('cookie')
const logger = require('@overleaf/logger')
const Metrics = require('@overleaf/metrics')
-const { promisifyAll } = require('@overleaf/promise-utils')
const clsiCookiesEnabled = (Settings.clsiCookie?.key ?? '') !== ''
@@ -16,235 +19,204 @@ if (Settings.redis.clsi_cookie_secondary != null) {
rclientSecondary = RedisWrapper.client('clsi_cookie_secondary')
}
-module.exports = function (backendGroup) {
- const cookieManager = {
- buildKey(projectId, userId) {
- if (backendGroup != null) {
- return `clsiserver:${backendGroup}:${projectId}:${userId}`
- } else {
- return `clsiserver:${projectId}:${userId}`
- }
- },
+const ClsiCookieManagerFactory = function (backendGroup) {
+ function buildKey(projectId, userId) {
+ if (backendGroup != null) {
+ return `clsiserver:${backendGroup}:${projectId}:${userId}`
+ } else {
+ return `clsiserver:${projectId}:${userId}`
+ }
+ }
- getServerId(
- projectId,
- userId,
- compileGroup,
- compileBackendClass,
- callback
- ) {
- if (!clsiCookiesEnabled) {
- return callback()
- }
- rclient.get(this.buildKey(projectId, userId), (err, serverId) => {
- if (err) {
- return callback(err)
- }
- if (serverId == null || serverId === '') {
- this._populateServerIdViaRequest(
- projectId,
- userId,
- compileGroup,
- compileBackendClass,
- callback
- )
- } else {
- callback(null, serverId)
- }
- })
- },
+ async function getServerId(
+ projectId,
+ userId,
+ compileGroup,
+ compileBackendClass
+ ) {
+ if (!clsiCookiesEnabled) {
+ return
+ }
+ const serverId = await rclient.get(buildKey(projectId, userId))
- _populateServerIdViaRequest(
- projectId,
- userId,
- compileGroup,
- compileBackendClass,
- callback
- ) {
- const u = new URL(`${Settings.apis.clsi.url}/project/${projectId}/status`)
- u.search = new URLSearchParams({
+ if (!serverId) {
+ return await cookieManager.promises._populateServerIdViaRequest(
+ projectId,
+ userId,
compileGroup,
- compileBackendClass,
- }).toString()
- request.post(u.href, (err, res, body) => {
- if (err) {
- OError.tag(err, 'error getting initial server id for project', {
- project_id: projectId,
- })
- return callback(err)
- }
- if (!clsiCookiesEnabled) {
- return callback()
- }
- const serverId = this._parseServerIdFromResponse(res)
- this.setServerId(
- projectId,
- userId,
- compileGroup,
- compileBackendClass,
- serverId,
- null,
- function (err) {
- if (err) {
- logger.warn(
- { err, projectId },
- 'error setting server id via populate request'
- )
- }
- callback(err, serverId)
- }
- )
- })
- },
-
- _parseServerIdFromResponse(response) {
- const cookies = Cookie.parse(response.headers['set-cookie']?.[0] || '')
- return cookies?.[Settings.clsiCookie.key]
- },
-
- checkIsLoadSheddingEvent(clsiserverid, compileGroup, compileBackendClass) {
- request.get(
- {
- url: `${Settings.apis.clsi.url}/instance-state`,
- qs: { clsiserverid, compileGroup, compileBackendClass },
- },
- (err, res, body) => {
- if (err) {
- Metrics.inc('clsi-lb-switch-backend', 1, {
- status: 'error',
- })
- logger.warn({ err, clsiserverid }, 'cannot probe clsi VM')
- return
- }
- const isStillRunning =
- res.statusCode === 200 && body === `${clsiserverid},UP\n`
- Metrics.inc('clsi-lb-switch-backend', 1, {
- status: isStillRunning ? 'load-shedding' : 'cycle',
- })
- }
+ compileBackendClass
)
- },
+ } else {
+ return serverId
+ }
+ }
- _getTTLInSeconds(clsiServerId) {
- return (clsiServerId || '').includes('-reg-')
- ? Settings.clsiCookie.ttlInSecondsRegular
- : Settings.clsiCookie.ttlInSeconds
- },
-
- setServerId(
- projectId,
- userId,
+ async function _populateServerIdViaRequest(
+ projectId,
+ userId,
+ compileGroup,
+ compileBackendClass
+ ) {
+ const u = new URL(`${Settings.apis.clsi.url}/project/${projectId}/status`)
+ u.search = new URLSearchParams({
compileGroup,
compileBackendClass,
- serverId,
- previous,
- callback
- ) {
- if (!clsiCookiesEnabled) {
- return callback()
- }
- if (serverId == null) {
- // We don't get a cookie back if it hasn't changed
- return rclient.expire(
- this.buildKey(projectId, userId),
- this._getTTLInSeconds(previous),
- err => callback(err)
- )
- }
- if (!previous) {
- // Initial assignment of a user+project or after clearing cache.
- Metrics.inc('clsi-lb-assign-initial-backend')
- } else {
- this.checkIsLoadSheddingEvent(
- previous,
- compileGroup,
- compileBackendClass
- )
- }
- if (rclientSecondary != null) {
- this._setServerIdInRedis(
- rclientSecondary,
- projectId,
- userId,
- serverId,
- () => {}
- )
- }
- this._setServerIdInRedis(rclient, projectId, userId, serverId, err =>
- callback(err)
- )
- },
-
- _setServerIdInRedis(rclient, projectId, userId, serverId, callback) {
- rclient.setex(
- this.buildKey(projectId, userId),
- this._getTTLInSeconds(serverId),
- serverId,
- callback
- )
- },
-
- clearServerId(projectId, userId, callback) {
- if (!clsiCookiesEnabled) {
- return callback()
- }
- rclient.del(this.buildKey(projectId, userId), err => {
- if (err) {
- // redis errors need wrapping as the instance may be shared
- return callback(
- new OError(
- 'Failed to clear clsi persistence',
- { projectId, userId },
- err
- )
- )
- } else {
- return callback()
- }
+ }).toString()
+ let res
+ try {
+ res = await fetchNothing(u.href, {
+ method: 'POST',
+ signal: AbortSignal.timeout(30_000),
})
- },
+ } catch (err) {
+ OError.tag(err, 'error getting initial server id for project', {
+ project_id: projectId,
+ })
+ throw err
+ }
- getCookieJar(
- projectId,
- userId,
- compileGroup,
- compileBackendClass,
- callback
- ) {
- if (!clsiCookiesEnabled) {
- return callback(null, request.jar(), undefined)
- }
- this.getServerId(
+ if (!clsiCookiesEnabled) {
+ return
+ }
+ const serverId = cookieManager._parseServerIdFromResponse(res)
+ try {
+ await cookieManager.promises.setServerId(
projectId,
userId,
compileGroup,
compileBackendClass,
- (err, serverId) => {
- if (err != null) {
- OError.tag(err, 'error getting server id', {
- project_id: projectId,
- })
- return callback(err)
- }
- const serverCookie = request.cookie(
- `${Settings.clsiCookie.key}=${serverId}`
- )
- const jar = request.jar()
- jar.setCookie(serverCookie, Settings.apis.clsi.url)
- callback(null, jar, serverId)
+ serverId,
+ null
+ )
+ return serverId
+ } catch (err) {
+ logger.warn(
+ { err, projectId },
+ 'error setting server id via populate request'
+ )
+ throw err
+ }
+ }
+
+ function _parseServerIdFromResponse(response) {
+ const cookies = Cookie.parse(response.headers['set-cookie']?.[0] || '')
+ return cookies?.[Settings.clsiCookie.key]
+ }
+
+ async function checkIsLoadSheddingEvent(
+ clsiserverid,
+ compileGroup,
+ compileBackendClass
+ ) {
+ let status
+ try {
+ const params = new URLSearchParams({
+ clsiserverid,
+ compileGroup,
+ compileBackendClass,
+ }).toString()
+ const { response, body } = await fetchStringWithResponse(
+ `${Settings.apis.clsi.url}/instance-state?${params}`,
+ {
+ method: 'GET',
+ signal: AbortSignal.timeout(30_000),
}
)
+ status =
+ response.status === 200 && body === `${clsiserverid},UP\n`
+ ? 'load-shedding'
+ : 'cycle'
+ } catch (err) {
+ if (err instanceof RequestFailedError && err.response.status === 404) {
+ status = 'cycle'
+ } else {
+ status = 'error'
+ logger.warn({ err, clsiserverid }, 'cannot probe clsi VM')
+ }
+ }
+ Metrics.inc('clsi-lb-switch-backend', 1, { status })
+ }
+
+ function _getTTLInSeconds(clsiServerId) {
+ return (clsiServerId || '').includes('-reg-')
+ ? Settings.clsiCookie.ttlInSecondsRegular
+ : Settings.clsiCookie.ttlInSeconds
+ }
+
+ async function setServerId(
+ projectId,
+ userId,
+ compileGroup,
+ compileBackendClass,
+ serverId,
+ previous
+ ) {
+ if (!clsiCookiesEnabled) {
+ return
+ }
+ if (serverId == null) {
+ // We don't get a cookie back if it hasn't changed
+ return await rclient.expire(
+ buildKey(projectId, userId),
+ _getTTLInSeconds(previous)
+ )
+ }
+ if (!previous) {
+ // Initial assignment of a user+project or after clearing cache.
+ Metrics.inc('clsi-lb-assign-initial-backend')
+ } else {
+ await checkIsLoadSheddingEvent(
+ previous,
+ compileGroup,
+ compileBackendClass
+ )
+ }
+ if (rclientSecondary != null) {
+ await _setServerIdInRedis(
+ rclientSecondary,
+ projectId,
+ userId,
+ serverId
+ ).catch(() => {})
+ }
+ await _setServerIdInRedis(rclient, projectId, userId, serverId)
+ }
+
+ async function _setServerIdInRedis(rclient, projectId, userId, serverId) {
+ await rclient.setex(
+ buildKey(projectId, userId),
+ _getTTLInSeconds(serverId),
+ serverId
+ )
+ }
+
+ async function clearServerId(projectId, userId) {
+ if (!clsiCookiesEnabled) {
+ return
+ }
+ try {
+ await rclient.del(buildKey(projectId, userId))
+ } catch (err) {
+ // redis errors need wrapping as the instance may be shared
+ throw new OError(
+ 'Failed to clear clsi persistence',
+ { projectId, userId },
+ err
+ )
+ }
+ }
+
+ const cookieManager = {
+ _parseServerIdFromResponse,
+ promises: {
+ getServerId,
+ clearServerId,
+ _populateServerIdViaRequest,
+ setServerId,
},
}
- cookieManager.promises = promisifyAll(cookieManager, {
- without: [
- '_parseServerIdFromResponse',
- 'checkIsLoadSheddingEvent',
- '_getTTLInSeconds',
- ],
- multiResult: {
- getCookieJar: ['jar', 'clsiServerId'],
- },
- })
+
return cookieManager
}
+
+module.exports = ClsiCookieManagerFactory
diff --git a/services/web/app/src/Features/Compile/CompileController.js b/services/web/app/src/Features/Compile/CompileController.js
index f1d37e7638..e07fe49c80 100644
--- a/services/web/app/src/Features/Compile/CompileController.js
+++ b/services/web/app/src/Features/Compile/CompileController.js
@@ -1,4 +1,3 @@
-let CompileController
const { URL, URLSearchParams } = require('url')
const { pipeline } = require('stream/promises')
const { Cookie } = require('tough-cookie')
@@ -17,7 +16,7 @@ const ClsiCookieManager = require('./ClsiCookieManager')(
const Path = require('path')
const AnalyticsManager = require('../Analytics/AnalyticsManager')
const SplitTestHandler = require('../SplitTests/SplitTestHandler')
-const { callbackify } = require('@overleaf/promise-utils')
+const { expressify } = require('@overleaf/promise-utils')
const {
fetchStreamWithResponse,
RequestFailedError,
@@ -34,17 +33,19 @@ function getOutputFilesArchiveSpecification(projectId, userId, buildId) {
const fileName = 'output.zip'
return {
path: fileName,
- url: CompileController._getFileUrl(projectId, userId, buildId, fileName),
+ url: _CompileController._getFileUrl(projectId, userId, buildId, fileName),
type: 'zip',
}
}
-function getImageNameForProject(projectId, callback) {
- ProjectGetter.getProject(projectId, { imageName: 1 }, (err, project) => {
- if (err) return callback(err)
- if (!project) return callback(new Error('project not found'))
- callback(null, project.imageName)
+async function getImageNameForProject(projectId) {
+ const project = await ProjectGetter.promises.getProject(projectId, {
+ imageName: 1,
})
+ if (!project) {
+ throw new Error('project not found')
+ }
+ return project.imageName
}
async function getPdfCachingMinChunkSize(req, res) {
@@ -53,7 +54,9 @@ async function getPdfCachingMinChunkSize(req, res) {
res,
'pdf-caching-min-chunk-size'
)
- if (variant === 'default') return 1_000_000
+ if (variant === 'default') {
+ return 1_000_000
+ }
return parseInt(variant, 10)
}
@@ -123,10 +126,9 @@ async function _getSplitTestOptions(req, res) {
pdfCachingMinChunkSize,
}
}
-const getSplitTestOptionsCb = callbackify(_getSplitTestOptions)
-module.exports = CompileController = {
- compile(req, res, next) {
+const _CompileController = {
+ async compile(req, res) {
res.setTimeout(COMPILE_TIMEOUT_MS)
const projectId = req.params.Project_id
const isAutoCompile = !!req.query.auto_compile
@@ -164,107 +166,95 @@ module.exports = CompileController = {
options.incrementalCompilesEnabled = true
}
- getSplitTestOptionsCb(req, res, (err, splitTestOptions) => {
- if (err) return next(err)
- let {
- compileFromClsiCache,
- populateClsiCache,
- enablePdfCaching,
- pdfCachingMinChunkSize,
- pdfDownloadDomain,
- } = splitTestOptions
- options.compileFromClsiCache = compileFromClsiCache
- options.populateClsiCache = populateClsiCache
- options.enablePdfCaching = enablePdfCaching
- if (enablePdfCaching) {
- options.pdfCachingMinChunkSize = pdfCachingMinChunkSize
- }
+ let {
+ compileFromClsiCache,
+ populateClsiCache,
+ enablePdfCaching,
+ pdfCachingMinChunkSize,
+ pdfDownloadDomain,
+ } = await _getSplitTestOptions(req, res)
+ options.compileFromClsiCache = compileFromClsiCache
+ options.populateClsiCache = populateClsiCache
+ options.enablePdfCaching = enablePdfCaching
+ if (enablePdfCaching) {
+ options.pdfCachingMinChunkSize = pdfCachingMinChunkSize
+ }
- CompileManager.compile(
- projectId,
- userId,
- options,
- (
- error,
+ const {
+ status,
+ outputFiles,
+ clsiServerId,
+ limits,
+ validationProblems,
+ stats,
+ timings,
+ outputUrlPrefix,
+ buildId,
+ clsiCacheShard,
+ } = await CompileManager.promises
+ .compile(projectId, userId, options)
+ .catch(error => {
+ Metrics.inc('compile-error')
+ throw error
+ })
+
+ Metrics.inc('compile-status', 1, { status })
+ if (pdfDownloadDomain && outputUrlPrefix) {
+ pdfDownloadDomain += outputUrlPrefix
+ }
+
+ if (limits) {
+ // For a compile request to be sent to clsi we need limits.
+ // If we get here without having the limits object populated, it is
+ // a reasonable assumption to make that nothing was compiled.
+ // We need to know the limits in order to make use of the events.
+ AnalyticsManager.recordEventForSession(
+ req.session,
+ 'compile-result-backend',
+ {
+ projectId,
+ ownerAnalyticsId: limits.ownerAnalyticsId,
status,
- outputFiles,
- clsiServerId,
- limits,
- validationProblems,
- stats,
- timings,
- outputUrlPrefix,
- buildId,
- clsiCacheShard
- ) => {
- if (error) {
- Metrics.inc('compile-error')
- return next(error)
- }
- Metrics.inc('compile-status', 1, { status })
- if (pdfDownloadDomain && outputUrlPrefix) {
- pdfDownloadDomain += outputUrlPrefix
- }
-
- if (limits) {
- // For a compile request to be sent to clsi we need limits.
- // If we get here without having the limits object populated, it is
- // a reasonable assumption to make that nothing was compiled.
- // We need to know the limits in order to make use of the events.
- AnalyticsManager.recordEventForSession(
- req.session,
- 'compile-result-backend',
- {
- projectId,
- ownerAnalyticsId: limits.ownerAnalyticsId,
- status,
- compileTime: timings?.compileE2E,
- timeout: limits.timeout === 60 ? 'short' : 'long',
- server: clsiServerId?.includes('-c2d-') ? 'faster' : 'normal',
- isAutoCompile,
- isInitialCompile: stats?.isInitialCompile === 1,
- restoredClsiCache: stats?.restoredClsiCache === 1,
- stopOnFirstError,
- }
- )
- }
-
- const outputFilesArchive = buildId
- ? getOutputFilesArchiveSpecification(projectId, userId, buildId)
- : null
-
- res.json({
- status,
- outputFiles,
- outputFilesArchive,
- compileGroup: limits?.compileGroup,
- clsiServerId,
- clsiCacheShard,
- validationProblems,
- stats,
- timings,
- outputUrlPrefix,
- pdfDownloadDomain,
- pdfCachingMinChunkSize,
- })
+ compileTime: timings?.compileE2E,
+ timeout: limits.timeout === 60 ? 'short' : 'long',
+ server: clsiServerId?.includes('-c2d-') ? 'faster' : 'normal',
+ isAutoCompile,
+ isInitialCompile: stats?.isInitialCompile === 1,
+ restoredClsiCache: stats?.restoredClsiCache === 1,
+ stopOnFirstError,
}
)
+ }
+
+ const outputFilesArchive = buildId
+ ? getOutputFilesArchiveSpecification(projectId, userId, buildId)
+ : null
+
+ res.json({
+ status,
+ outputFiles,
+ outputFilesArchive,
+ compileGroup: limits?.compileGroup,
+ clsiServerId,
+ clsiCacheShard,
+ validationProblems,
+ stats,
+ timings,
+ outputUrlPrefix,
+ pdfDownloadDomain,
+ pdfCachingMinChunkSize,
})
},
- stopCompile(req, res, next) {
+ async stopCompile(req, res) {
const projectId = req.params.Project_id
const userId = SessionManager.getLoggedInUserId(req.session)
- CompileManager.stopCompile(projectId, userId, function (error) {
- if (error) {
- return next(error)
- }
- res.sendStatus(200)
- })
+ await CompileManager.promises.stopCompile(projectId, userId)
+ res.sendStatus(200)
},
// Used for submissions through the public API
- compileSubmission(req, res, next) {
+ async compileSubmission(req, res) {
res.setTimeout(COMPILE_TIMEOUT_MS)
const submissionId = req.params.submission_id
const options = {}
@@ -285,195 +275,163 @@ module.exports = CompileController = {
options.compileBackendClass = Settings.apis.clsi.submissionBackendClass
options.timeout =
req.body?.timeout || Settings.defaultFeatures.compileTimeout
- ClsiManager.sendExternalRequest(
- submissionId,
- req.body,
- options,
- function (error, status, outputFiles, clsiServerId, validationProblems) {
- if (error) {
- return next(error)
- }
- res.json({
- status,
- outputFiles,
- clsiServerId,
- validationProblems,
- })
- }
- )
+ const { status, outputFiles, clsiServerId, validationProblems } =
+ await ClsiManager.promises.sendExternalRequest(
+ submissionId,
+ req.body,
+ options
+ )
+ res.json({
+ status,
+ outputFiles,
+ clsiServerId,
+ validationProblems,
+ })
},
- _getSplitTestOptions,
-
_getUserIdForCompile(req) {
if (!Settings.disablePerUserCompiles) {
return SessionManager.getLoggedInUserId(req.session)
}
return null
},
- _compileAsUser(req, callback) {
- callback(null, CompileController._getUserIdForCompile(req))
- },
- _downloadAsUser(req, callback) {
- callback(null, CompileController._getUserIdForCompile(req))
- },
- downloadPdf(req, res, next) {
+ async downloadPdf(req, res) {
Metrics.inc('pdf-downloads')
const projectId = req.params.Project_id
- const rateLimit = function (callback) {
+ const rateLimit = () =>
pdfDownloadRateLimiter
.consume(req.ip, 1, { method: 'ip' })
- .then(() => {
- callback(null, true)
- })
+ .then(() => true)
.catch(err => {
if (err instanceof Error) {
- callback(err)
- } else {
- callback(null, false)
+ throw err
}
+ return false
})
+
+ const project = await ProjectGetter.promises.getProject(projectId, {
+ name: 1,
+ })
+
+ res.contentType('application/pdf')
+ const filename = `${_CompileController._getSafeProjectName(project)}.pdf`
+
+ if (req.query.popupDownload) {
+ res.setContentDisposition('attachment', { filename })
+ } else {
+ res.setContentDisposition('inline', { filename })
}
- ProjectGetter.getProject(projectId, { name: 1 }, function (err, project) {
- if (err) {
- return next(err)
- }
- res.contentType('application/pdf')
- const filename = `${CompileController._getSafeProjectName(project)}.pdf`
+ let canContinue
+ try {
+ canContinue = await rateLimit()
+ } catch (err) {
+ logger.err({ err }, 'error checking rate limit for pdf download')
+ res.sendStatus(500)
+ return
+ }
- if (req.query.popupDownload) {
- res.setContentDisposition('attachment', { filename })
- } else {
- res.setContentDisposition('inline', { filename })
- }
+ if (!canContinue) {
+ logger.debug({ projectId, ip: req.ip }, 'rate limit hit downloading pdf')
+ res.sendStatus(500) // should it be 429?
+ } else {
+ const userId = CompileController._getUserIdForCompile(req)
- rateLimit(function (err, canContinue) {
- if (err) {
- logger.err({ err }, 'error checking rate limit for pdf download')
- res.sendStatus(500)
- } else if (!canContinue) {
- logger.debug(
- { projectId, ip: req.ip },
- 'rate limit hit downloading pdf'
- )
- res.sendStatus(500)
- } else {
- CompileController._downloadAsUser(req, function (error, userId) {
- if (error) {
- return next(error)
- }
- const url = CompileController._getFileUrl(
- projectId,
- userId,
- req.params.build_id,
- 'output.pdf'
- )
- CompileController.proxyToClsi(
- projectId,
- 'output-file',
- url,
- {},
- req,
- res,
- next
- )
- })
- }
- })
- })
+ const url = _CompileController._getFileUrl(
+ projectId,
+ userId,
+ req.params.build_id,
+ 'output.pdf'
+ )
+ await CompileController._proxyToClsi(
+ projectId,
+ 'output-file',
+ url,
+ {},
+ req,
+ res
+ )
+ }
},
_getSafeProjectName(project) {
return project.name.replace(/[^\p{L}\p{Nd}]/gu, '_')
},
- deleteAuxFiles(req, res, next) {
+ async deleteAuxFiles(req, res) {
const projectId = req.params.Project_id
const { clsiserverid } = req.query
- CompileController._compileAsUser(req, function (error, userId) {
- if (error) {
- return next(error)
- }
- CompileManager.deleteAuxFiles(
- projectId,
- userId,
- clsiserverid,
- function (error) {
- if (error) {
- return next(error)
- }
- res.sendStatus(200)
- }
- )
- })
+ const userId = await CompileController._getUserIdForCompile(req)
+ await CompileManager.promises.deleteAuxFiles(
+ projectId,
+ userId,
+ clsiserverid
+ )
+ res.sendStatus(200)
},
// this is only used by templates, so is not called with a userId
- compileAndDownloadPdf(req, res, next) {
+ async compileAndDownloadPdf(req, res) {
const projectId = req.params.project_id
- // pass userId as null, since templates are an "anonymous" compile
- CompileManager.compile(projectId, null, {}, (err, _status, outputFiles) => {
- if (err) {
- logger.err(
- { err, projectId },
- 'something went wrong compile and downloading pdf'
- )
- res.sendStatus(500)
- return
- }
- const pdf = outputFiles.find(f => f.path === 'output.pdf')
- if (!pdf) {
- logger.warn(
- { projectId },
- 'something went wrong compile and downloading pdf: no pdf'
- )
- res.sendStatus(500)
- return
- }
- CompileController.proxyToClsi(
- projectId,
- 'output-file',
- pdf.url,
- {},
- req,
- res,
- next
+
+ let outputFiles
+ try {
+ ;({ outputFiles } = await CompileManager.promises
+ // pass userId as null, since templates are an "anonymous" compile
+ .compile(projectId, null, {}))
+ } catch (err) {
+ logger.err(
+ { err, projectId },
+ 'something went wrong compile and downloading pdf'
)
- })
+ res.sendStatus(500)
+ return
+ }
+ const pdf = outputFiles.find(f => f.path === 'output.pdf')
+ if (!pdf) {
+ logger.warn(
+ { projectId },
+ 'something went wrong compile and downloading pdf: no pdf'
+ )
+ res.sendStatus(500)
+ return
+ }
+ await CompileController._proxyToClsi(
+ projectId,
+ 'output-file',
+ pdf.url,
+ {},
+ req,
+ res
+ )
},
- getFileFromClsi(req, res, next) {
+ async getFileFromClsi(req, res) {
const projectId = req.params.Project_id
- CompileController._downloadAsUser(req, function (error, userId) {
- if (error) {
- return next(error)
- }
+ const userId = CompileController._getUserIdForCompile(req)
- const qs = {}
+ const qs = {}
- const url = CompileController._getFileUrl(
- projectId,
- userId,
- req.params.build_id,
- req.params.file
- )
- CompileController.proxyToClsi(
- projectId,
- 'output-file',
- url,
- qs,
- req,
- res,
- next
- )
- })
+ const url = _CompileController._getFileUrl(
+ projectId,
+ userId,
+ req.params.build_id,
+ req.params.file
+ )
+ await CompileController._proxyToClsi(
+ projectId,
+ 'output-file',
+ url,
+ qs,
+ req,
+ res
+ )
},
- getFileFromClsiWithoutUser(req, res, next) {
+ async getFileFromClsiWithoutUser(req, res) {
const submissionId = req.params.submission_id
- const url = CompileController._getFileUrl(
+ const url = _CompileController._getFileUrl(
submissionId,
null,
req.params.build_id,
@@ -486,15 +444,14 @@ module.exports = CompileController = {
Settings.defaultFeatures.compileGroup,
compileBackendClass: Settings.apis.clsi.submissionBackendClass,
}
- CompileController.proxyToClsiWithLimits(
+ await CompileController._proxyToClsiWithLimits(
submissionId,
'output-file',
url,
{},
limits,
req,
- res,
- next
+ res
)
},
@@ -522,51 +479,42 @@ module.exports = CompileController = {
return `${path}/${action}`
},
- proxySyncPdf(req, res, next) {
+ async proxySyncPdf(req, res) {
const projectId = req.params.Project_id
const { page, h, v, editorId, buildId } = req.query
if (!page?.match(/^\d+$/)) {
- return next(new Error('invalid page parameter'))
+ throw new Error('invalid page parameter')
}
if (!h?.match(/^-?\d+\.\d+$/)) {
- return next(new Error('invalid h parameter'))
+ throw new Error('invalid h parameter')
}
if (!v?.match(/^-?\d+\.\d+$/)) {
- return next(new Error('invalid v parameter'))
+ throw new Error('invalid v parameter')
}
// whether this request is going to a per-user container
- CompileController._compileAsUser(req, function (error, userId) {
- if (error) {
- return next(error)
- }
- getImageNameForProject(projectId, (error, imageName) => {
- if (error) return next(error)
+ const userId = CompileController._getUserIdForCompile(req)
- getSplitTestOptionsCb(req, res, (error, splitTestOptions) => {
- if (error) return next(error)
- const { compileFromClsiCache } = splitTestOptions
+ const imageName = await getImageNameForProject(projectId)
- const url = CompileController._getUrl(projectId, userId, 'sync/pdf')
+ const { compileFromClsiCache } = await _getSplitTestOptions(req, res)
- CompileController.proxyToClsi(
- projectId,
- 'sync-to-pdf',
- url,
- { page, h, v, imageName, editorId, buildId, compileFromClsiCache },
- req,
- res,
- next
- )
- })
- })
- })
+ const url = _CompileController._getUrl(projectId, userId, 'sync/pdf')
+
+ await CompileController._proxyToClsi(
+ projectId,
+ 'sync-to-pdf',
+ url,
+ { page, h, v, imageName, editorId, buildId, compileFromClsiCache },
+ req,
+ res
+ )
},
- proxySyncCode(req, res, next) {
+ async proxySyncCode(req, res) {
const projectId = req.params.Project_id
const { file, line, column, editorId, buildId } = req.query
if (file == null) {
- return next(new Error('missing file parameter'))
+ throw new Error('missing file parameter')
}
// Check that we are dealing with a simple file path (this is not
// strictly needed because synctex uses this parameter as a label
@@ -575,225 +523,219 @@ module.exports = CompileController = {
// allow those by replacing /./ with /
const testPath = file.replace('/./', '/')
if (Path.resolve('/', testPath) !== `/${testPath}`) {
- return next(new Error('invalid file parameter'))
+ throw new Error('invalid file parameter')
}
if (!line?.match(/^\d+$/)) {
- return next(new Error('invalid line parameter'))
+ throw new Error('invalid line parameter')
}
if (!column?.match(/^\d+$/)) {
- return next(new Error('invalid column parameter'))
+ throw new Error('invalid column parameter')
}
- CompileController._compileAsUser(req, function (error, userId) {
- if (error) {
- return next(error)
- }
- getImageNameForProject(projectId, (error, imageName) => {
- if (error) return next(error)
+ const userId = CompileController._getUserIdForCompile(req)
- getSplitTestOptionsCb(req, res, (error, splitTestOptions) => {
- if (error) return next(error)
- const { compileFromClsiCache } = splitTestOptions
+ const imageName = await getImageNameForProject(projectId)
- const url = CompileController._getUrl(projectId, userId, 'sync/code')
- CompileController.proxyToClsi(
- projectId,
- 'sync-to-code',
- url,
- {
- file,
- line,
- column,
- imageName,
- editorId,
- buildId,
- compileFromClsiCache,
- },
- req,
- res,
- next
- )
- })
- })
- })
- },
+ const { compileFromClsiCache } = await _getSplitTestOptions(req, res)
- proxyToClsi(projectId, action, url, qs, req, res, next) {
- CompileManager.getProjectCompileLimits(projectId, function (error, limits) {
- if (error) {
- return next(error)
- }
- CompileController.proxyToClsiWithLimits(
- projectId,
- action,
- url,
- qs,
- limits,
- req,
- res,
- next
- )
- })
- },
-
- proxyToClsiWithLimits(projectId, action, url, qs, limits, req, res, next) {
- _getPersistenceOptions(
- req,
+ const url = _CompileController._getUrl(projectId, userId, 'sync/code')
+ await CompileController._proxyToClsi(
projectId,
- limits.compileGroup,
- limits.compileBackendClass,
- (err, persistenceOptions) => {
- if (err) {
- OError.tag(err, 'error getting cookie jar for clsi request')
- return next(err)
- }
- url = new URL(`${Settings.apis.clsi.url}${url}`)
- url.search = new URLSearchParams({
- ...persistenceOptions.qs,
- ...qs,
- }).toString()
- const timer = new Metrics.Timer(
- 'proxy_to_clsi',
- 1,
- { path: action },
- [0, 100, 1000, 2000, 5000, 10000, 15000, 20000, 30000, 45000, 60000]
- )
- Metrics.inc('proxy_to_clsi', 1, { path: action, status: 'start' })
- fetchStreamWithResponse(url.href, {
- method: req.method,
- signal: AbortSignal.timeout(60 * 1000),
- headers: persistenceOptions.headers,
- })
- .then(({ stream, response }) => {
- if (req.destroyed) {
- // The client has disconnected already, avoid trying to write into the broken connection.
- Metrics.inc('proxy_to_clsi', 1, {
- path: action,
- status: 'req-aborted',
- })
- return
- }
- Metrics.inc('proxy_to_clsi', 1, {
- path: action,
- status: response.status,
- })
-
- for (const key of ['Content-Length', 'Content-Type']) {
- if (response.headers.has(key)) {
- res.setHeader(key, response.headers.get(key))
- }
- }
- res.writeHead(response.status)
- return pipeline(stream, res)
- })
- .then(() => {
- timer.labels.status = 'success'
- timer.done()
- })
- .catch(err => {
- const reqAborted = Boolean(req.destroyed)
- const status = reqAborted ? 'req-aborted-late' : 'error'
- timer.labels.status = status
- const duration = timer.done()
- Metrics.inc('proxy_to_clsi', 1, { path: action, status })
- const streamingStarted = Boolean(res.headersSent)
- if (!streamingStarted) {
- if (err instanceof RequestFailedError) {
- res.sendStatus(err.response.status)
- } else {
- res.sendStatus(500)
- }
- }
- if (
- streamingStarted &&
- reqAborted &&
- err.code === 'ERR_STREAM_PREMATURE_CLOSE'
- ) {
- // Ignore noisy spurious error
- return
- }
- if (
- err instanceof RequestFailedError &&
- ['sync-to-code', 'sync-to-pdf', 'output-file'].includes(action)
- ) {
- // Ignore noisy error
- // https://github.com/overleaf/internal/issues/15201
- return
- }
- logger.warn(
- {
- err,
- projectId,
- url,
- action,
- reqAborted,
- streamingStarted,
- duration,
- },
- 'CLSI proxy error'
- )
- })
- }
+ 'sync-to-code',
+ url,
+ {
+ file,
+ line,
+ column,
+ imageName,
+ editorId,
+ buildId,
+ compileFromClsiCache,
+ },
+ req,
+ res
)
},
- wordCount(req, res, next) {
+ async _proxyToClsi(projectId, action, url, qs, req, res) {
+ const limits =
+ await CompileManager.promises.getProjectCompileLimits(projectId)
+ return CompileController._proxyToClsiWithLimits(
+ projectId,
+ action,
+ url,
+ qs,
+ limits,
+ req,
+ res
+ )
+ },
+
+ async _proxyToClsiWithLimits(projectId, action, url, qs, limits, req, res) {
+ const persistenceOptions = await _getPersistenceOptions(
+ req,
+ projectId,
+ limits.compileGroup,
+ limits.compileBackendClass
+ ).catch(err => {
+ OError.tag(err, 'error getting cookie jar for clsi request')
+ throw err
+ })
+
+ url = new URL(`${Settings.apis.clsi.url}${url}`)
+ url.search = new URLSearchParams({
+ ...persistenceOptions.qs,
+ ...qs,
+ }).toString()
+ const timer = new Metrics.Timer(
+ 'proxy_to_clsi',
+ 1,
+ { path: action },
+ [0, 100, 1000, 2000, 5000, 10000, 15000, 20000, 30000, 45000, 60000]
+ )
+ Metrics.inc('proxy_to_clsi', 1, { path: action, status: 'start' })
+ try {
+ const { stream, response } = await fetchStreamWithResponse(url.href, {
+ method: req.method,
+ signal: AbortSignal.timeout(60 * 1000),
+ headers: persistenceOptions.headers,
+ })
+ if (req.destroyed) {
+ // The client has disconnected already, avoid trying to write into the broken connection.
+ Metrics.inc('proxy_to_clsi', 1, {
+ path: action,
+ status: 'req-aborted',
+ })
+ return
+ }
+ Metrics.inc('proxy_to_clsi', 1, {
+ path: action,
+ status: response.status,
+ })
+
+ for (const key of ['Content-Length', 'Content-Type']) {
+ if (response.headers.has(key)) {
+ res.setHeader(key, response.headers.get(key))
+ }
+ }
+ res.writeHead(response.status)
+ await pipeline(stream, res)
+ timer.labels.status = 'success'
+ timer.done()
+ } catch (err) {
+ const reqAborted = Boolean(req.destroyed)
+ const status = reqAborted ? 'req-aborted-late' : 'error'
+ timer.labels.status = status
+ const duration = timer.done()
+ Metrics.inc('proxy_to_clsi', 1, { path: action, status })
+ const streamingStarted = Boolean(res.headersSent)
+ if (!streamingStarted) {
+ if (err instanceof RequestFailedError) {
+ res.sendStatus(err.response.status)
+ } else {
+ res.sendStatus(500)
+ }
+ }
+ if (
+ streamingStarted &&
+ reqAborted &&
+ err.code === 'ERR_STREAM_PREMATURE_CLOSE'
+ ) {
+ // Ignore noisy spurious error
+ return
+ }
+ if (
+ err instanceof RequestFailedError &&
+ ['sync-to-code', 'sync-to-pdf', 'output-file'].includes(action)
+ ) {
+ // Ignore noisy error
+ // https://github.com/overleaf/internal/issues/15201
+ return
+ }
+ logger.warn(
+ {
+ err,
+ projectId,
+ url,
+ action,
+ reqAborted,
+ streamingStarted,
+ duration,
+ },
+ 'CLSI proxy error'
+ )
+ }
+ },
+
+ async wordCount(req, res) {
const projectId = req.params.Project_id
const file = req.query.file || false
const { clsiserverid } = req.query
- CompileController._compileAsUser(req, function (error, userId) {
- if (error) {
- return next(error)
- }
- CompileManager.wordCount(
- projectId,
- userId,
- file,
- clsiserverid,
- function (error, body) {
- if (error) {
- return next(error)
- }
- res.json(body)
- }
- )
- })
+ const userId = CompileController._getUserIdForCompile(req)
+
+ const body = await CompileManager.promises.wordCount(
+ projectId,
+ userId,
+ file,
+ clsiserverid
+ )
+ res.json(body)
},
}
-function _getPersistenceOptions(
+async function _getPersistenceOptions(
req,
projectId,
compileGroup,
- compileBackendClass,
- callback
+ compileBackendClass
) {
const { clsiserverid } = req.query
const userId = SessionManager.getLoggedInUserId(req)
if (clsiserverid && typeof clsiserverid === 'string') {
- callback(null, {
+ return {
qs: { clsiserverid, compileGroup, compileBackendClass },
headers: {},
- })
+ }
} else {
- ClsiCookieManager.getServerId(
+ const clsiServerId = await ClsiCookieManager.promises.getServerId(
projectId,
userId,
compileGroup,
- compileBackendClass,
- (err, clsiServerId) => {
- if (err) return callback(err)
- callback(null, {
- qs: { compileGroup, compileBackendClass },
- headers: clsiServerId
- ? {
- Cookie: new Cookie({
- key: Settings.clsiCookie.key,
- value: clsiServerId,
- }).cookieString(),
- }
- : {},
- })
- }
+ compileBackendClass
)
+ return {
+ qs: { compileGroup, compileBackendClass },
+ headers: clsiServerId
+ ? {
+ Cookie: new Cookie({
+ key: Settings.clsiCookie.key,
+ value: clsiServerId,
+ }).cookieString(),
+ }
+ : {},
+ }
}
}
+
+const CompileController = {
+ compile: expressify(_CompileController.compile),
+ stopCompile: expressify(_CompileController.stopCompile),
+ compileSubmission: expressify(_CompileController.compileSubmission),
+ downloadPdf: expressify(_CompileController.downloadPdf), //
+ compileAndDownloadPdf: expressify(_CompileController.compileAndDownloadPdf),
+ deleteAuxFiles: expressify(_CompileController.deleteAuxFiles),
+ getFileFromClsi: expressify(_CompileController.getFileFromClsi),
+ getFileFromClsiWithoutUser: expressify(
+ _CompileController.getFileFromClsiWithoutUser
+ ),
+ proxySyncPdf: expressify(_CompileController.proxySyncPdf),
+ proxySyncCode: expressify(_CompileController.proxySyncCode),
+ wordCount: expressify(_CompileController.wordCount),
+
+ _getSafeProjectName: _CompileController._getSafeProjectName,
+ _getSplitTestOptions,
+ _getUserIdForCompile: _CompileController._getUserIdForCompile,
+ _proxyToClsi: _CompileController._proxyToClsi,
+ _proxyToClsiWithLimits: _CompileController._proxyToClsiWithLimits,
+}
+
+module.exports = CompileController
diff --git a/services/web/app/src/router.mjs b/services/web/app/src/router.mjs
index 5e1a21c063..9201ad4c55 100644
--- a/services/web/app/src/router.mjs
+++ b/services/web/app/src/router.mjs
@@ -1187,7 +1187,9 @@ async function initialize(webRouter, privateApiRouter, publicApiRouter) {
const sendRes = _.once(function (statusCode, message) {
res.status(statusCode)
plainTextResponse(res, message)
- ClsiCookieManager.clearServerId(projectId, testUserId, () => {})
+ ClsiCookieManager.promises
+ .clearServerId(projectId, testUserId)
+ .catch(() => {})
}) // force every compile to a new server
// set a timeout
let handler = setTimeout(function () {
diff --git a/services/web/test/unit/src/Compile/ClsiCookieManagerTests.js b/services/web/test/unit/src/Compile/ClsiCookieManagerTests.js
index b61991a100..082262c853 100644
--- a/services/web/test/unit/src/Compile/ClsiCookieManagerTests.js
+++ b/services/web/test/unit/src/Compile/ClsiCookieManagerTests.js
@@ -1,26 +1,22 @@
const sinon = require('sinon')
-const { assert, expect } = require('chai')
+const { expect } = require('chai')
const modulePath = '../../../../app/src/Features/Compile/ClsiCookieManager.js'
const SandboxedModule = require('sandboxed-module')
-const realRequst = require('request')
describe('ClsiCookieManager', function () {
beforeEach(function () {
this.redis = {
auth() {},
get: sinon.stub(),
- setex: sinon.stub().callsArg(3),
+ setex: sinon.stub().resolves(),
}
this.project_id = '123423431321-proj-id'
this.user_id = 'abc-user-id'
- this.request = {
- post: sinon.stub(),
- cookie: realRequst.cookie,
- jar: realRequst.jar,
- defaults: () => {
- return this.request
- },
+ this.fetchUtils = {
+ fetchNothing: sinon.stub().returns(Promise.resolve()),
+ fetchStringWithResponse: sinon.stub().returns(Promise.resolve()),
}
+ this.metrics = { inc: sinon.stub() }
this.settings = {
redis: {
web: 'redis.something',
@@ -41,7 +37,8 @@ describe('ClsiCookieManager', function () {
client: () => this.redis,
}),
'@overleaf/settings': this.settings,
- request: this.request,
+ '@overleaf/fetch-utils': this.fetchUtils,
+ '@overleaf/metrics': this.metrics,
}
this.ClsiCookieManager = SandboxedModule.require(modulePath, {
requires: this.requires,
@@ -49,74 +46,56 @@ describe('ClsiCookieManager', function () {
})
describe('getServerId', function () {
- it('should call get for the key', function (done) {
- this.redis.get.callsArgWith(1, null, 'clsi-7')
- this.ClsiCookieManager.getServerId(
+ it('should call get for the key', async function () {
+ this.redis.get.resolves('clsi-7')
+ const serverId = await this.ClsiCookieManager.promises.getServerId(
this.project_id,
this.user_id,
'',
- 'e2',
- (err, serverId) => {
- if (err) {
- return done(err)
- }
- this.redis.get
- .calledWith(`clsiserver:${this.project_id}:${this.user_id}`)
- .should.equal(true)
- serverId.should.equal('clsi-7')
- done()
- }
+ 'e2'
)
+ this.redis.get
+ .calledWith(`clsiserver:${this.project_id}:${this.user_id}`)
+ .should.equal(true)
+ serverId.should.equal('clsi-7')
})
- it('should _populateServerIdViaRequest if no key is found', function (done) {
- this.ClsiCookieManager._populateServerIdViaRequest = sinon
+ it('should _populateServerIdViaRequest if no key is found', async function () {
+ this.ClsiCookieManager.promises._populateServerIdViaRequest = sinon
.stub()
- .yields(null)
- this.redis.get.callsArgWith(1, null)
- this.ClsiCookieManager.getServerId(
+ .resolves()
+ this.redis.get.resolves(null)
+ await this.ClsiCookieManager.promises.getServerId(
this.project_id,
this.user_id,
- '',
- (err, serverId) => {
- if (err) {
- return done(err)
- }
- this.ClsiCookieManager._populateServerIdViaRequest
- .calledWith(this.project_id, this.user_id)
- .should.equal(true)
- done()
- }
+ ''
)
+ this.ClsiCookieManager.promises._populateServerIdViaRequest
+ .calledWith(this.project_id, this.user_id)
+ .should.equal(true)
})
- it('should _populateServerIdViaRequest if no key is blank', function (done) {
- this.ClsiCookieManager._populateServerIdViaRequest = sinon
+ it('should _populateServerIdViaRequest if no key is blank', async function () {
+ this.ClsiCookieManager.promises._populateServerIdViaRequest = sinon
.stub()
- .yields(null)
- this.redis.get.callsArgWith(1, null, '')
- this.ClsiCookieManager.getServerId(
+ .resolves(null)
+ this.redis.get.resolves('')
+ await this.ClsiCookieManager.promises.getServerId(
this.project_id,
this.user_id,
'',
- 'e2',
- (err, serverId) => {
- if (err) {
- return done(err)
- }
- this.ClsiCookieManager._populateServerIdViaRequest
- .calledWith(this.project_id, this.user_id)
- .should.equal(true)
- done()
- }
+ 'e2'
)
+ this.ClsiCookieManager.promises._populateServerIdViaRequest
+ .calledWith(this.project_id, this.user_id)
+ .should.equal(true)
})
})
describe('_populateServerIdViaRequest', function () {
beforeEach(function () {
this.clsiServerId = 'server-id'
- this.ClsiCookieManager.setServerId = sinon.stub().yields()
+ this.ClsiCookieManager.promises.setServerId = sinon.stub().resolves()
})
describe('with a server id in the response', function () {
@@ -128,71 +107,54 @@ describe('ClsiCookieManager', function () {
],
},
}
- this.request.post.callsArgWith(1, null, this.response)
+ this.fetchUtils.fetchNothing.returns(this.response)
})
- it('should make a request to the clsi', function (done) {
- this.ClsiCookieManager._populateServerIdViaRequest(
+ it('should make a request to the clsi', async function () {
+ await this.ClsiCookieManager.promises._populateServerIdViaRequest(
this.project_id,
this.user_id,
'standard',
- 'e2',
- (err, serverId) => {
- if (err) {
- return done(err)
- }
- const args = this.ClsiCookieManager.setServerId.args[0]
- args[0].should.equal(this.project_id)
- args[1].should.equal(this.user_id)
- args[2].should.equal('standard')
- args[3].should.equal('e2')
- args[4].should.deep.equal(this.clsiServerId)
- done()
- }
+ 'e2'
)
+ const args = this.ClsiCookieManager.promises.setServerId.args[0]
+ args[0].should.equal(this.project_id)
+ args[1].should.equal(this.user_id)
+ args[2].should.equal('standard')
+ args[3].should.equal('e2')
+ args[4].should.deep.equal(this.clsiServerId)
})
- it('should return the server id', function (done) {
- this.ClsiCookieManager._populateServerIdViaRequest(
- this.project_id,
- this.user_id,
- '',
- 'e2',
- (err, serverId) => {
- if (err) {
- return done(err)
- }
- serverId.should.equal(this.clsiServerId)
- done()
- }
- )
+ it('should return the server id', async function () {
+ const serverId =
+ await this.ClsiCookieManager.promises._populateServerIdViaRequest(
+ this.project_id,
+ this.user_id,
+ '',
+ 'e2'
+ )
+ serverId.should.equal(this.clsiServerId)
})
})
describe('without a server id in the response', function () {
beforeEach(function () {
this.response = { headers: {} }
- this.request.post.yields(null, this.response)
+ this.fetchUtils.fetchNothing.returns(this.response)
})
- it('should not set the server id there is no server id in the response', function (done) {
+ it('should not set the server id there is no server id in the response', async function () {
this.ClsiCookieManager._parseServerIdFromResponse = sinon
.stub()
.returns(null)
- this.ClsiCookieManager.setServerId(
+ await this.ClsiCookieManager.promises.setServerId(
this.project_id,
this.user_id,
'standard',
'e2',
this.clsiServerId,
- null,
- err => {
- if (err) {
- return done(err)
- }
- this.redis.setex.called.should.equal(false)
- done()
- }
+ null
)
+ this.redis.setex.called.should.equal(false)
})
})
})
@@ -205,162 +167,148 @@ describe('ClsiCookieManager', function () {
.returns('clsi-8')
})
- it('should set the server id with a ttl', function (done) {
- this.ClsiCookieManager.setServerId(
+ it('should set the server id with a ttl', async function () {
+ await this.ClsiCookieManager.promises.setServerId(
this.project_id,
this.user_id,
'standard',
'e2',
this.clsiServerId,
- null,
- err => {
- if (err) {
- return done(err)
- }
- this.redis.setex.should.have.been.calledWith(
- `clsiserver:${this.project_id}:${this.user_id}`,
- this.settings.clsiCookie.ttlInSeconds,
- this.clsiServerId
- )
- done()
- }
+ null
+ )
+ this.redis.setex.should.have.been.calledWith(
+ `clsiserver:${this.project_id}:${this.user_id}`,
+ this.settings.clsiCookie.ttlInSeconds,
+ this.clsiServerId
)
})
- it('should set the server id with the regular ttl for reg instance', function (done) {
+ it('should set the server id with the regular ttl for reg instance', async function () {
this.clsiServerId = 'clsi-reg-8'
- this.ClsiCookieManager.setServerId(
+ await this.ClsiCookieManager.promises.setServerId(
this.project_id,
this.user_id,
'standard',
'e2',
this.clsiServerId,
- null,
- err => {
- if (err) {
- return done(err)
- }
- expect(this.redis.setex).to.have.been.calledWith(
- `clsiserver:${this.project_id}:${this.user_id}`,
- this.settings.clsiCookie.ttlInSecondsRegular,
- this.clsiServerId
- )
- done()
- }
+ null
+ )
+ expect(this.redis.setex).to.have.been.calledWith(
+ `clsiserver:${this.project_id}:${this.user_id}`,
+ this.settings.clsiCookie.ttlInSecondsRegular,
+ this.clsiServerId
)
})
- it('should not set the server id if clsiCookies are not enabled', function (done) {
+ it('should not set the server id if clsiCookies are not enabled', async function () {
delete this.settings.clsiCookie.key
- this.ClsiCookieManager = SandboxedModule.require(modulePath, {
+ this.ClsiCookieManager2 = SandboxedModule.require(modulePath, {
globals: {
console,
},
requires: this.requires,
})()
- this.ClsiCookieManager.setServerId(
+ await this.ClsiCookieManager2.promises.setServerId(
this.project_id,
this.user_id,
'standard',
'e2',
this.clsiServerId,
- null,
- err => {
- if (err) {
- return done(err)
- }
- this.redis.setex.called.should.equal(false)
- done()
- }
+ null
)
+ this.redis.setex.called.should.equal(false)
})
- it('should also set in the secondary if secondary redis is enabled', function (done) {
- this.redis_secondary = { setex: sinon.stub().callsArg(3) }
+ it('should also set in the secondary if secondary redis is enabled', async function () {
+ this.redis_secondary = { setex: sinon.stub().resolves() }
this.settings.redis.clsi_cookie_secondary = {}
this.RedisWrapper.client = sinon.stub()
this.RedisWrapper.client.withArgs('clsi_cookie').returns(this.redis)
this.RedisWrapper.client
.withArgs('clsi_cookie_secondary')
.returns(this.redis_secondary)
- this.ClsiCookieManager = SandboxedModule.require(modulePath, {
+ this.ClsiCookieManager2 = SandboxedModule.require(modulePath, {
globals: {
console,
},
requires: this.requires,
})()
- this.ClsiCookieManager._parseServerIdFromResponse = sinon
+ this.ClsiCookieManager2._parseServerIdFromResponse = sinon
.stub()
.returns('clsi-8')
- this.ClsiCookieManager.setServerId(
+ await this.ClsiCookieManager2.promises.setServerId(
this.project_id,
this.user_id,
'standard',
'e2',
this.clsiServerId,
- null,
- err => {
- if (err) {
- return done(err)
- }
- this.redis_secondary.setex.should.have.been.calledWith(
- `clsiserver:${this.project_id}:${this.user_id}`,
- this.settings.clsiCookie.ttlInSeconds,
- this.clsiServerId
+ null
+ )
+ this.redis_secondary.setex.should.have.been.calledWith(
+ `clsiserver:${this.project_id}:${this.user_id}`,
+ this.settings.clsiCookie.ttlInSeconds,
+ this.clsiServerId
+ )
+ })
+
+ describe('checkIsLoadSheddingEvent', function () {
+ beforeEach(function () {
+ this.fetchUtils.fetchStringWithResponse.reset()
+ this.call = async () => {
+ await this.ClsiCookieManager.promises.setServerId(
+ this.project_id,
+ this.user_id,
+ 'standard',
+ 'e2',
+ this.clsiServerId,
+ 'previous-clsi-server-id'
+ )
+ expect(
+ this.fetchUtils.fetchStringWithResponse
+ ).to.have.been.calledWith(
+ `${this.settings.apis.clsi.url}/instance-state?clsiserverid=previous-clsi-server-id&compileGroup=standard&compileBackendClass=e2`,
+ { method: 'GET', signal: sinon.match.instanceOf(AbortSignal) }
)
- done()
}
- )
- })
- })
+ })
- describe('getCookieJar', function () {
- beforeEach(function () {
- this.ClsiCookieManager.getServerId = sinon.stub().yields(null, 'clsi-11')
- })
+ it('should report "load-shedding" when previous is UP', async function () {
+ this.fetchUtils.fetchStringWithResponse.resolves({
+ response: { status: 200 },
+ body: 'previous-clsi-server-id,UP\n',
+ })
+ await this.call()
+ expect(this.metrics.inc).to.have.been.calledWith(
+ 'clsi-lb-switch-backend',
+ 1,
+ { status: 'load-shedding' }
+ )
+ })
- it('should return a jar with the cookie set populated from redis', function (done) {
- this.ClsiCookieManager.getCookieJar(
- this.project_id,
- this.user_id,
- '',
- 'e2',
- (err, jar) => {
- if (err) {
- return done(err)
- }
- jar._jar.store.idx['clsi.example.com']['/'][
- this.settings.clsiCookie.key
- ].key.should.equal
- jar._jar.store.idx['clsi.example.com']['/'][
- this.settings.clsiCookie.key
- ].value.should.equal('clsi-11')
- done()
- }
- )
- })
+ it('should report "cycle" when other is UP', async function () {
+ this.fetchUtils.fetchStringWithResponse.resolves({
+ response: { status: 200 },
+ body: 'other-clsi-server-id,UP\n',
+ })
+ await this.call()
+ expect(this.metrics.inc).to.have.been.calledWith(
+ 'clsi-lb-switch-backend',
+ 1,
+ { status: 'cycle' }
+ )
+ })
- it('should return empty cookie jar if clsiCookies are not enabled', function (done) {
- delete this.settings.clsiCookie.key
- this.ClsiCookieManager = SandboxedModule.require(modulePath, {
- globals: {
- console,
- },
- requires: this.requires,
- })()
- this.ClsiCookieManager.getCookieJar(
- this.project_id,
- this.user_id,
- '',
- 'e2',
- (err, jar) => {
- if (err) {
- return done(err)
- }
- assert.deepEqual(jar, realRequst.jar())
- done()
- }
- )
+ it('should report "cycle" when previous is 404', async function () {
+ this.fetchUtils.fetchStringWithResponse.resolves({
+ response: { status: 404 },
+ })
+ await this.call()
+ expect(this.metrics.inc).to.have.been.calledWith(
+ 'clsi-lb-switch-backend',
+ 1,
+ { status: 'cycle' }
+ )
+ })
})
})
})
diff --git a/services/web/test/unit/src/Compile/CompileControllerTests.js b/services/web/test/unit/src/Compile/CompileControllerTests.js
index 07e433b5af..df7276131c 100644
--- a/services/web/test/unit/src/Compile/CompileControllerTests.js
+++ b/services/web/test/unit/src/Compile/CompileControllerTests.js
@@ -1,4 +1,3 @@
-/* eslint-disable mocha/handle-done-callback */
const sinon = require('sinon')
const { expect } = require('chai')
const modulePath = '../../../../app/src/Features/Compile/CompileController.js'
@@ -19,8 +18,15 @@ describe('CompileController', function () {
compileTimeout: 100,
},
}
- this.CompileManager = { compile: sinon.stub() }
- this.ClsiManager = {}
+ this.CompileManager = {
+ promises: {
+ compile: sinon.stub(),
+ getProjectCompileLimits: sinon.stub(),
+ },
+ }
+ this.ClsiManager = {
+ promises: {},
+ }
this.UserGetter = { getUser: sinon.stub() }
this.rateLimiter = {
consume: sinon.stub().resolves(),
@@ -47,10 +53,11 @@ describe('CompileController', function () {
},
}
this.ClsiCookieManager = {
- getServerId: sinon.stub().yields(null, 'clsi-server-id-from-redis'),
+ promises: {
+ getServerId: sinon.stub().resolves('clsi-server-id-from-redis'),
+ },
}
this.SessionManager = {
- getLoggedInUser: sinon.stub().callsArgWith(1, null, this.user),
getLoggedInUserId: sinon.stub().returns(this.user_id),
getSessionUser: sinon.stub().returns(this.user),
isUserLoggedIn: sinon.stub().returns(true),
@@ -76,8 +83,9 @@ describe('CompileController', function () {
'stream/promises': { pipeline: this.pipeline },
'@overleaf/settings': this.settings,
'@overleaf/fetch-utils': this.fetchUtils,
- request: (this.request = sinon.stub()),
- '../Project/ProjectGetter': (this.ProjectGetter = {}),
+ '../Project/ProjectGetter': (this.ProjectGetter = {
+ promises: {},
+ }),
'@overleaf/metrics': (this.Metrics = {
inc: sinon.stub(),
Timer: class {
@@ -121,25 +129,23 @@ describe('CompileController', function () {
beforeEach(function () {
this.req.params = { Project_id: this.projectId }
this.req.session = {}
- this.CompileManager.compile = sinon.stub().callsArgWith(
- 3,
- null,
- (this.status = 'success'),
- (this.outputFiles = [
+ this.CompileManager.promises.compile = sinon.stub().resolves({
+ status: (this.status = 'success'),
+ outputFiles: (this.outputFiles = [
{
path: 'output.pdf',
url: `/project/${this.projectId}/user/${this.user_id}/build/id/output.pdf`,
type: 'pdf',
},
]),
- undefined,
- undefined,
- undefined,
- undefined,
- undefined,
- undefined,
- this.build_id
- )
+ clsiServerId: undefined,
+ limits: undefined,
+ validationProblems: undefined,
+ stats: undefined,
+ timings: undefined,
+ outputUrlPrefix: undefined,
+ buildId: this.build_id,
+ })
})
describe('pdfDownloadDomain', function () {
@@ -148,9 +154,8 @@ describe('CompileController', function () {
})
describe('when clsi does not emit zone prefix', function () {
- beforeEach(function (done) {
- this.res.callback = done
- this.CompileController.compile(this.req, this.res, this.next)
+ beforeEach(async function () {
+ await this.CompileController.compile(this.req, this.res, this.next)
})
it('should add domain verbatim', function () {
@@ -177,28 +182,25 @@ describe('CompileController', function () {
})
describe('when clsi emits a zone prefix', function () {
- beforeEach(function (done) {
- this.res.callback = done
- this.CompileManager.compile = sinon.stub().callsArgWith(
- 3,
- null,
- (this.status = 'success'),
- (this.outputFiles = [
+ beforeEach(async function () {
+ this.CompileManager.promises.compile = sinon.stub().resolves({
+ status: (this.status = 'success'),
+ outputFiles: (this.outputFiles = [
{
path: 'output.pdf',
url: `/project/${this.projectId}/user/${this.user_id}/build/id/output.pdf`,
type: 'pdf',
},
]),
- undefined, // clsiServerId
- undefined, // limits
- undefined, // validationProblems
- undefined, // stats
- undefined, // timings
- '/zone/b',
- this.build_id
- )
- this.CompileController.compile(this.req, this.res, this.next)
+ clsiServerId: undefined,
+ limits: undefined,
+ validationProblems: undefined,
+ stats: undefined,
+ timings: undefined,
+ outputUrlPrefix: '/zone/b',
+ buildId: this.build_id,
+ })
+ await this.CompileController.compile(this.req, this.res, this.next)
})
it('should add the zone prefix', function () {
@@ -227,9 +229,8 @@ describe('CompileController', function () {
})
describe('when not an auto compile', function () {
- beforeEach(function (done) {
- this.res.callback = done
- this.CompileController.compile(this.req, this.res, this.next)
+ beforeEach(async function () {
+ await this.CompileController.compile(this.req, this.res, this.next)
})
it('should look up the user id', function () {
@@ -239,7 +240,7 @@ describe('CompileController', function () {
})
it('should do the compile without the auto compile flag', function () {
- this.CompileManager.compile.should.have.been.calledWith(
+ this.CompileManager.promises.compile.should.have.been.calledWith(
this.projectId,
this.user_id,
{
@@ -276,14 +277,13 @@ describe('CompileController', function () {
})
describe('when an auto compile', function () {
- beforeEach(function (done) {
- this.res.callback = done
+ beforeEach(async function () {
this.req.query = { auto_compile: 'true' }
- this.CompileController.compile(this.req, this.res, this.next)
+ await this.CompileController.compile(this.req, this.res, this.next)
})
it('should do the compile with the auto compile flag', function () {
- this.CompileManager.compile.should.have.been.calledWith(
+ this.CompileManager.promises.compile.should.have.been.calledWith(
this.projectId,
this.user_id,
{
@@ -301,14 +301,13 @@ describe('CompileController', function () {
})
describe('with the draft attribute', function () {
- beforeEach(function (done) {
- this.res.callback = done
+ beforeEach(async function () {
this.req.body = { draft: true }
- this.CompileController.compile(this.req, this.res, this.next)
+ await this.CompileController.compile(this.req, this.res, this.next)
})
it('should do the compile without the draft compile flag', function () {
- this.CompileManager.compile.should.have.been.calledWith(
+ this.CompileManager.promises.compile.should.have.been.calledWith(
this.projectId,
this.user_id,
{
@@ -327,14 +326,13 @@ describe('CompileController', function () {
})
describe('with an editor id', function () {
- beforeEach(function (done) {
- this.res.callback = done
+ beforeEach(async function () {
this.req.body = { editorId: 'the-editor-id' }
- this.CompileController.compile(this.req, this.res, this.next)
+ await this.CompileController.compile(this.req, this.res, this.next)
})
it('should pass the editor id to the compiler', function () {
- this.CompileManager.compile.should.have.been.calledWith(
+ this.CompileManager.promises.compile.should.have.been.calledWith(
this.projectId,
this.user_id,
{
@@ -357,25 +355,29 @@ describe('CompileController', function () {
this.submission_id = 'sub-1234'
this.req.params = { submission_id: this.submission_id }
this.req.body = {}
- this.ClsiManager.sendExternalRequest = sinon
- .stub()
- .callsArgWith(
- 3,
- null,
- (this.status = 'success'),
- (this.outputFiles = ['mock-output-files']),
- (this.clsiServerId = 'mock-server-id'),
- (this.validationProblems = null)
- )
+ this.ClsiManager.promises.sendExternalRequest = sinon.stub().resolves({
+ status: (this.status = 'success'),
+ outputFiles: (this.outputFiles = ['mock-output-files']),
+ clsiServerId: 'mock-server-id',
+ validationProblems: null,
+ })
})
- it('should set the content-type of the response to application/json', function () {
- this.CompileController.compileSubmission(this.req, this.res, this.next)
+ it('should set the content-type of the response to application/json', async function () {
+ await this.CompileController.compileSubmission(
+ this.req,
+ this.res,
+ this.next
+ )
this.res.contentType.calledWith('application/json').should.equal(true)
})
- it('should send a successful response reporting the status and files', function () {
- this.CompileController.compileSubmission(this.req, this.res, this.next)
+ it('should send a successful response reporting the status and files', async function () {
+ await this.CompileController.compileSubmission(
+ this.req,
+ this.res,
+ this.next
+ )
this.res.statusCode.should.equal(200)
this.res.body.should.equal(
JSON.stringify({
@@ -397,7 +399,7 @@ describe('CompileController', function () {
})
it('should use the supplied values', function () {
- this.ClsiManager.sendExternalRequest.should.have.been.calledWith(
+ this.ClsiManager.promises.sendExternalRequest.should.have.been.calledWith(
this.submission_id,
{ compileGroup: 'special', timeout: 600 },
{ compileGroup: 'special', compileBackendClass: 'n2d', timeout: 600 }
@@ -417,7 +419,7 @@ describe('CompileController', function () {
})
it('should use the other options but default values for compileGroup and timeout', function () {
- this.ClsiManager.sendExternalRequest.should.have.been.calledWith(
+ this.ClsiManager.promises.sendExternalRequest.should.have.been.calledWith(
this.submission_id,
{
rootResourcePath: 'main.tex',
@@ -441,24 +443,21 @@ describe('CompileController', function () {
describe('downloadPdf', function () {
beforeEach(function () {
+ this.CompileController._proxyToClsi = sinon.stub().resolves()
this.req.params = { Project_id: this.projectId }
-
this.project = { name: 'test namè; 1' }
- this.ProjectGetter.getProject = sinon
+ this.ProjectGetter.promises.getProject = sinon
.stub()
- .callsArgWith(2, null, this.project)
+ .resolves(this.project)
})
describe('when downloading for embedding', function () {
- beforeEach(function (done) {
- this.CompileController.proxyToClsi = sinon
- .stub()
- .callsFake(() => done())
- this.CompileController.downloadPdf(this.req, this.res, this.next)
+ beforeEach(async function () {
+ await this.CompileController.downloadPdf(this.req, this.res, this.next)
})
it('should look up the project', function () {
- this.ProjectGetter.getProject
+ this.ProjectGetter.promises.getProject
.calledWith(this.projectId, { name: 1 })
.should.equal(true)
})
@@ -478,43 +477,66 @@ describe('CompileController', function () {
})
it('should proxy the PDF from the CLSI', function () {
- this.CompileController.proxyToClsi
+ this.CompileController._proxyToClsi
.calledWith(
this.projectId,
'output-file',
`/project/${this.projectId}/user/${this.user_id}/output/output.pdf`,
{},
this.req,
- this.res,
- this.next
+ this.res
)
.should.equal(true)
})
})
describe('when a build-id is provided', function () {
- beforeEach(function (done) {
+ beforeEach(async function () {
this.req.params.build_id = this.build_id
- this.CompileController.proxyToClsi = sinon
- .stub()
- .callsFake(() => done())
- this.CompileController.downloadPdf(this.req, this.res, this.next)
+ await this.CompileController.downloadPdf(this.req, this.res, this.next)
})
it('should proxy the PDF from the CLSI, with a build-id', function () {
- this.CompileController.proxyToClsi
+ this.CompileController._proxyToClsi
.calledWith(
this.projectId,
'output-file',
`/project/${this.projectId}/user/${this.user_id}/build/${this.build_id}/output/output.pdf`,
{},
this.req,
- this.res,
- this.next
+ this.res
)
.should.equal(true)
})
})
+
+ describe('when rate-limited', function () {
+ beforeEach(async function () {
+ this.rateLimiter.consume.rejects({
+ msBeforeNext: 250,
+ remainingPoints: 0,
+ consumedPoints: 5,
+ isFirstInDuration: false,
+ })
+ })
+ it('should return 500', async function () {
+ await this.CompileController.downloadPdf(this.req, this.res, this.next)
+ // should it be 429 instead?
+ this.res.sendStatus.calledWith(500).should.equal(true)
+ this.CompileController._proxyToClsi.should.not.have.been.called
+ })
+ })
+
+ describe('when rate-limit errors', function () {
+ beforeEach(async function () {
+ this.rateLimiter.consume.rejects(new Error('uh oh'))
+ })
+ it('should return 500', async function () {
+ await this.CompileController.downloadPdf(this.req, this.res, this.next)
+ this.res.sendStatus.calledWith(500).should.equal(true)
+ this.CompileController._proxyToClsi.should.not.have.been.called
+ })
+ })
})
describe('getFileFromClsiWithoutUser', function () {
@@ -528,12 +550,12 @@ describe('CompileController', function () {
}
this.req.body = {}
this.expected_url = `/project/${this.submission_id}/build/${this.build_id}/output/${this.file}`
- this.CompileController.proxyToClsiWithLimits = sinon.stub()
+ this.CompileController._proxyToClsiWithLimits = sinon.stub()
})
describe('without limits specified', function () {
- beforeEach(function () {
- this.CompileController.getFileFromClsiWithoutUser(
+ beforeEach(async function () {
+ await this.CompileController.getFileFromClsiWithoutUser(
this.req,
this.res,
this.next
@@ -541,15 +563,12 @@ describe('CompileController', function () {
})
it('should proxy to CLSI with correct URL and default limits', function () {
- this.CompileController.proxyToClsiWithLimits.should.have.been.calledWith(
+ this.CompileController._proxyToClsiWithLimits.should.have.been.calledWith(
this.submission_id,
'output-file',
this.expected_url,
{},
- {
- compileGroup: 'standard',
- compileBackendClass: 'n2d',
- }
+ { compileGroup: 'standard', compileBackendClass: 'n2d' }
)
})
})
@@ -565,7 +584,7 @@ describe('CompileController', function () {
})
it('should proxy to CLSI with correct URL and specified limits', function () {
- this.CompileController.proxyToClsiWithLimits.should.have.been.calledWith(
+ this.CompileController._proxyToClsiWithLimits.should.have.been.calledWith(
this.submission_id,
'output-file',
this.expected_url,
@@ -581,7 +600,7 @@ describe('CompileController', function () {
describe('proxySyncCode', function () {
let file, line, column, imageName, editorId, buildId
- beforeEach(function (done) {
+ beforeEach(async function () {
this.req.params = { Project_id: this.projectId }
file = 'main.tex'
line = String(Date.now())
@@ -591,17 +610,17 @@ describe('CompileController', function () {
this.req.query = { file, line, column, editorId, buildId }
imageName = 'foo/bar:tag-0'
- this.ProjectGetter.getProject = sinon.stub().yields(null, { imageName })
+ this.ProjectGetter.promises.getProject = sinon
+ .stub()
+ .resolves({ imageName })
- this.next.callsFake(done)
- this.res.callback = done
- this.CompileController.proxyToClsi = sinon.stub().callsFake(() => done())
+ this.CompileController._proxyToClsi = sinon.stub().resolves()
- this.CompileController.proxySyncCode(this.req, this.res, this.next)
+ await this.CompileController.proxySyncCode(this.req, this.res, this.next)
})
it('should proxy the request with an imageName', function () {
- expect(this.CompileController.proxyToClsi).to.have.been.calledWith(
+ expect(this.CompileController._proxyToClsi).to.have.been.calledWith(
this.projectId,
'sync-to-code',
`/project/${this.projectId}/user/${this.user_id}/sync/code`,
@@ -615,8 +634,7 @@ describe('CompileController', function () {
compileFromClsiCache: false,
},
this.req,
- this.res,
- this.next
+ this.res
)
})
})
@@ -624,7 +642,7 @@ describe('CompileController', function () {
describe('proxySyncPdf', function () {
let page, h, v, imageName, editorId, buildId
- beforeEach(function (done) {
+ beforeEach(async function () {
this.req.params = { Project_id: this.projectId }
page = String(Date.now())
h = String(Math.random())
@@ -634,17 +652,17 @@ describe('CompileController', function () {
this.req.query = { page, h, v, editorId, buildId }
imageName = 'foo/bar:tag-1'
- this.ProjectGetter.getProject = sinon.stub().yields(null, { imageName })
+ this.ProjectGetter.promises.getProject = sinon
+ .stub()
+ .resolves({ imageName })
- this.next.callsFake(done)
- this.res.callback = done
- this.CompileController.proxyToClsi = sinon.stub().callsFake(() => done())
+ this.CompileController._proxyToClsi = sinon.stub()
- this.CompileController.proxySyncPdf(this.req, this.res, this.next)
+ await this.CompileController.proxySyncPdf(this.req, this.res, this.next)
})
it('should proxy the request with an imageName', function () {
- expect(this.CompileController.proxyToClsi).to.have.been.calledWith(
+ expect(this.CompileController._proxyToClsi).to.have.been.calledWith(
this.projectId,
'sync-to-pdf',
`/project/${this.projectId}/user/${this.user_id}/sync/pdf`,
@@ -658,13 +676,12 @@ describe('CompileController', function () {
compileFromClsiCache: false,
},
this.req,
- this.res,
- this.next
+ this.res
)
})
})
- describe('proxyToClsi', function () {
+ describe('_proxyToClsi', function () {
beforeEach(function () {
this.req.method = 'mock-method'
this.req.headers = {
@@ -677,15 +694,14 @@ describe('CompileController', function () {
describe('old pdf viewer', function () {
describe('user with standard priority', function () {
- beforeEach(function (done) {
- this.res.callback = done
- this.CompileManager.getProjectCompileLimits = sinon
+ beforeEach(async function () {
+ this.CompileManager.promises.getProjectCompileLimits = sinon
.stub()
- .callsArgWith(1, null, {
+ .resolves({
compileGroup: 'standard',
compileBackendClass: 'e2',
})
- this.CompileController.proxyToClsi(
+ await this.CompileController._proxyToClsi(
this.projectId,
'output-file',
(this.url = '/test'),
@@ -708,15 +724,14 @@ describe('CompileController', function () {
})
describe('user with priority compile', function () {
- beforeEach(function (done) {
- this.res.callback = done
- this.CompileManager.getProjectCompileLimits = sinon
+ beforeEach(async function () {
+ this.CompileManager.promises.getProjectCompileLimits = sinon
.stub()
- .callsArgWith(1, null, {
+ .resolves({
compileGroup: 'priority',
compileBackendClass: 'c2d',
})
- this.CompileController.proxyToClsi(
+ await this.CompileController._proxyToClsi(
this.projectId,
'output-file',
(this.url = '/test'),
@@ -735,16 +750,15 @@ describe('CompileController', function () {
})
describe('user with standard priority via query string', function () {
- beforeEach(function (done) {
- this.res.callback = done
+ beforeEach(async function () {
this.req.query = { compileGroup: 'standard' }
- this.CompileManager.getProjectCompileLimits = sinon
+ this.CompileManager.promises.getProjectCompileLimits = sinon
.stub()
- .callsArgWith(1, null, {
+ .resolves({
compileGroup: 'standard',
compileBackendClass: 'e2',
})
- this.CompileController.proxyToClsi(
+ await this.CompileController._proxyToClsi(
this.projectId,
'output-file',
(this.url = '/test'),
@@ -767,16 +781,15 @@ describe('CompileController', function () {
})
describe('user with non-existent priority via query string', function () {
- beforeEach(function (done) {
- this.res.callback = done
+ beforeEach(async function () {
this.req.query = { compileGroup: 'foobar' }
- this.CompileManager.getProjectCompileLimits = sinon
+ this.CompileManager.promises.getProjectCompileLimits = sinon
.stub()
- .callsArgWith(1, null, {
+ .resolves({
compileGroup: 'standard',
compileBackendClass: 'e2',
})
- this.CompileController.proxyToClsi(
+ await this.CompileController._proxyToClsi(
this.projectId,
'output-file',
(this.url = '/test'),
@@ -795,16 +808,15 @@ describe('CompileController', function () {
})
describe('user with build parameter via query string', function () {
- beforeEach(function (done) {
- this.res.callback = done
- this.CompileManager.getProjectCompileLimits = sinon
+ beforeEach(async function () {
+ this.CompileManager.promises.getProjectCompileLimits = sinon
.stub()
- .callsArgWith(1, null, {
+ .resolves({
compileGroup: 'standard',
compileBackendClass: 'e2',
})
this.req.query = { build: 1234 }
- this.CompileController.proxyToClsi(
+ await this.CompileController._proxyToClsi(
this.projectId,
'output-file',
(this.url = '/test'),
@@ -825,16 +837,16 @@ describe('CompileController', function () {
})
describe('deleteAuxFiles', function () {
- beforeEach(function () {
- this.CompileManager.deleteAuxFiles = sinon.stub().yields()
+ beforeEach(async function () {
+ this.CompileManager.promises.deleteAuxFiles = sinon.stub().resolves()
this.req.params = { Project_id: this.projectId }
this.req.query = { clsiserverid: 'node-1' }
this.res.sendStatus = sinon.stub()
- this.CompileController.deleteAuxFiles(this.req, this.res, this.next)
+ await this.CompileController.deleteAuxFiles(this.req, this.res, this.next)
})
it('should proxy to the CLSI', function () {
- this.CompileManager.deleteAuxFiles
+ this.CompileManager.promises.deleteAuxFiles
.calledWith(this.projectId, this.user_id, 'node-1')
.should.equal(true)
})
@@ -852,26 +864,25 @@ describe('CompileController', function () {
},
}
this.downloadPath = `/project/${this.projectId}/build/123/output/output.pdf`
- this.CompileManager.compile.callsArgWith(3, null, 'success', [
- {
- path: 'output.pdf',
- url: this.downloadPath,
- },
- ])
- this.CompileController.proxyToClsi = sinon.stub()
+ this.CompileManager.promises.compile.resolves({
+ status: 'success',
+ outputFiles: [{ path: 'output.pdf', url: this.downloadPath }],
+ })
+ this.CompileController._proxyToClsi = sinon.stub()
this.res = { send: () => {}, sendStatus: sinon.stub() }
})
- it('should call compile in the compile manager', function (done) {
- this.CompileController.compileAndDownloadPdf(this.req, this.res)
- this.CompileManager.compile.calledWith(this.projectId).should.equal(true)
- done()
+ it('should call compile in the compile manager', async function () {
+ await this.CompileController.compileAndDownloadPdf(this.req, this.res)
+ this.CompileManager.promises.compile
+ .calledWith(this.projectId)
+ .should.equal(true)
})
- it('should proxy the res to the clsi with correct url', function (done) {
- this.CompileController.compileAndDownloadPdf(this.req, this.res)
+ it('should proxy the res to the clsi with correct url', async function () {
+ await this.CompileController.compileAndDownloadPdf(this.req, this.res)
sinon.assert.calledWith(
- this.CompileController.proxyToClsi,
+ this.CompileController._proxyToClsi,
this.projectId,
'output-file',
this.downloadPath,
@@ -880,7 +891,7 @@ describe('CompileController', function () {
this.res
)
- this.CompileController.proxyToClsi
+ this.CompileController._proxyToClsi
.calledWith(
this.projectId,
'output-file',
@@ -890,38 +901,44 @@ describe('CompileController', function () {
this.res
)
.should.equal(true)
- done()
})
- it('should not download anything on compilation failures', function () {
- this.CompileManager.compile.yields(new Error('failed'))
- this.CompileController.compileAndDownloadPdf(this.req, this.res)
+ it('should not download anything on compilation failures', async function () {
+ this.CompileManager.promises.compile.rejects(new Error('failed'))
+ await this.CompileController.compileAndDownloadPdf(
+ this.req,
+ this.res,
+ this.next
+ )
this.res.sendStatus.should.have.been.calledWith(500)
- this.CompileController.proxyToClsi.should.not.have.been.called
+ this.CompileController._proxyToClsi.should.not.have.been.called
})
- it('should not download anything on missing pdf', function () {
- this.CompileManager.compile.yields(null, 'success', [])
- this.CompileController.compileAndDownloadPdf(this.req, this.res)
+ it('should not download anything on missing pdf', async function () {
+ this.CompileManager.promises.compile.resolves({
+ status: 'success',
+ outputFiles: [],
+ })
+ await this.CompileController.compileAndDownloadPdf(this.req, this.res)
this.res.sendStatus.should.have.been.calledWith(500)
- this.CompileController.proxyToClsi.should.not.have.been.called
+ this.CompileController._proxyToClsi.should.not.have.been.called
})
})
describe('wordCount', function () {
- beforeEach(function () {
- this.CompileManager.wordCount = sinon
+ beforeEach(async function () {
+ this.CompileManager.promises.wordCount = sinon
.stub()
- .yields(null, { content: 'body' })
+ .resolves({ content: 'body' })
this.req.params = { Project_id: this.projectId }
this.req.query = { clsiserverid: 'node-42' }
this.res.json = sinon.stub()
this.res.contentType = sinon.stub()
- this.CompileController.wordCount(this.req, this.res, this.next)
+ await this.CompileController.wordCount(this.req, this.res, this.next)
})
it('should proxy to the CLSI', function () {
- this.CompileManager.wordCount
+ this.CompileManager.promises.wordCount
.calledWith(this.projectId, this.user_id, false, 'node-42')
.should.equal(true)
})
From 59275eeb84e76878829a843896625a54444888d2 Mon Sep 17 00:00:00 2001
From: Jessica Lawshe <5312836+lawshe@users.noreply.github.com>
Date: Tue, 6 May 2025 10:23:14 -0500
Subject: [PATCH 020/194] Merge pull request #24919 from
overleaf/jel-create-group-audit-log
[web] Add group audit log
GitOrigin-RevId: b59c38c57f555f18cdfa5dd697ad38d78b590996
---
.../web/app/src/infrastructure/mongodb.js | 1 +
.../web/app/src/models/GroupAuditLogEntry.js | 23 ++++++++++++
services/web/frontend/js/utils/meta.ts | 1 +
.../20250409155536_group_audit_log_index.mjs | 35 +++++++++++++++++++
.../test/acceptance/src/helpers/groupSSO.mjs | 18 +++++-----
5 files changed, 70 insertions(+), 8 deletions(-)
create mode 100644 services/web/app/src/models/GroupAuditLogEntry.js
create mode 100644 services/web/migrations/20250409155536_group_audit_log_index.mjs
diff --git a/services/web/app/src/infrastructure/mongodb.js b/services/web/app/src/infrastructure/mongodb.js
index aa7aa4ac44..7fc1039140 100644
--- a/services/web/app/src/infrastructure/mongodb.js
+++ b/services/web/app/src/infrastructure/mongodb.js
@@ -49,6 +49,7 @@ const db = {
githubSyncUserCredentials: internalDb.collection('githubSyncUserCredentials'),
globalMetrics: internalDb.collection('globalMetrics'),
grouppolicies: internalDb.collection('grouppolicies'),
+ groupAuditLogEntries: internalDb.collection('groupAuditLogEntries'),
institutions: internalDb.collection('institutions'),
messages: internalDb.collection('messages'),
migrations: internalDb.collection('migrations'),
diff --git a/services/web/app/src/models/GroupAuditLogEntry.js b/services/web/app/src/models/GroupAuditLogEntry.js
new file mode 100644
index 0000000000..3bda4ebf95
--- /dev/null
+++ b/services/web/app/src/models/GroupAuditLogEntry.js
@@ -0,0 +1,23 @@
+const mongoose = require('../infrastructure/Mongoose')
+const { Schema } = mongoose
+
+const GroupAuditLogEntrySchema = new Schema(
+ {
+ groupId: { type: Schema.Types.ObjectId, index: true },
+ info: { type: Object },
+ initiatorId: { type: Schema.Types.ObjectId },
+ ipAddress: { type: String },
+ operation: { type: String },
+ timestamp: { type: Date, default: Date.now },
+ },
+ {
+ collection: 'groupAuditLogEntries',
+ minimize: false,
+ }
+)
+
+exports.GroupAuditLogEntry = mongoose.model(
+ 'GroupAuditLogEntry',
+ GroupAuditLogEntrySchema
+)
+exports.GroupAuditLogEntrySchema = GroupAuditLogEntrySchema
diff --git a/services/web/frontend/js/utils/meta.ts b/services/web/frontend/js/utils/meta.ts
index 7aab88b050..6c7209a5bb 100644
--- a/services/web/frontend/js/utils/meta.ts
+++ b/services/web/frontend/js/utils/meta.ts
@@ -103,6 +103,7 @@ export interface Meta {
'ol-gitBridgeEnabled': boolean
'ol-gitBridgePublicBaseUrl': string
'ol-github': { enabled: boolean; error: boolean }
+ 'ol-groupAuditLogs': []
'ol-groupId': string
'ol-groupName': string
'ol-groupPlans': GroupPlans
diff --git a/services/web/migrations/20250409155536_group_audit_log_index.mjs b/services/web/migrations/20250409155536_group_audit_log_index.mjs
new file mode 100644
index 0000000000..282b3c6d2d
--- /dev/null
+++ b/services/web/migrations/20250409155536_group_audit_log_index.mjs
@@ -0,0 +1,35 @@
+/* eslint-disable no-unused-vars */
+
+import Helpers from './lib/helpers.mjs'
+
+const tags = ['saas']
+
+const indexes = [
+ {
+ key: {
+ groupId: 1,
+ timestamp: 1,
+ },
+ name: 'groupId_1_timestamp_1',
+ },
+]
+
+const migrate = async client => {
+ const { db } = client
+ await Helpers.addIndexesToCollection(db.groupAuditLogEntries, indexes)
+}
+
+const rollback = async client => {
+ const { db } = client
+ try {
+ await Helpers.dropIndexesFromCollection(db.groupAuditLogEntries, indexes)
+ } catch (err) {
+ console.error('Something went wrong rolling back the migrations', err)
+ }
+}
+
+export default {
+ tags,
+ migrate,
+ rollback,
+}
diff --git a/services/web/test/acceptance/src/helpers/groupSSO.mjs b/services/web/test/acceptance/src/helpers/groupSSO.mjs
index f7efeb9e63..c5bde77236 100644
--- a/services/web/test/acceptance/src/helpers/groupSSO.mjs
+++ b/services/web/test/acceptance/src/helpers/groupSSO.mjs
@@ -34,7 +34,7 @@ export const baseSsoConfig = {
userIdAttribute,
} // the database also sets enabled and validated, but we cannot set that in the POST request for /manage/groups/:ID/settings/sso
-export async function createGroupSSO() {
+export async function createGroupSSO(SSOConfigValidated = true) {
const nonSSOMemberHelper = await UserHelper.createUser()
const nonSSOMember = nonSSOMemberHelper.user
@@ -47,7 +47,7 @@ export async function createGroupSSO() {
const ssoConfig = new SSOConfig({
...baseSsoConfig,
enabled: true,
- validated: true,
+ validated: SSOConfigValidated,
})
await ssoConfig.save()
@@ -68,12 +68,14 @@ export async function createGroupSSO() {
const enrollmentUrl = getEnrollmentUrl(subscriptionId)
const internalProviderId = getProviderId(subscriptionId)
- await linkGroupMember(
- memberUser.email,
- memberUser.password,
- subscriptionId,
- 'mock@email.com'
- )
+ if (SSOConfigValidated) {
+ await linkGroupMember(
+ memberUser.email,
+ memberUser.password,
+ subscriptionId,
+ 'mock@email.com'
+ )
+ }
const userHelper = new UserHelper()
From f29bd47911c97c08ca76755da099410a1e69777d Mon Sep 17 00:00:00 2001
From: Jimmy Domagala-Tang
Date: Tue, 6 May 2025 11:23:50 -0400
Subject: [PATCH 021/194] Merge pull request #25252 from
overleaf/jdt-add-addon-to-cancellation-mssg-in-subs
Show Assist Add-on for pending and cancelled subscriptions
GitOrigin-RevId: df733d7078c231a5de989bc070b37e3c250fdb37
---
.../states/active/pending-plan-change.tsx | 16 ++++---------
.../components/dashboard/states/canceled.tsx | 18 +++++++++++++++
.../subscription/data/add-on-codes.ts | 23 +++++++++++++++++++
3 files changed, 46 insertions(+), 11 deletions(-)
diff --git a/services/web/frontend/js/features/subscription/components/dashboard/states/active/pending-plan-change.tsx b/services/web/frontend/js/features/subscription/components/dashboard/states/active/pending-plan-change.tsx
index 46d5a1cc90..cf7452941d 100644
--- a/services/web/frontend/js/features/subscription/components/dashboard/states/active/pending-plan-change.tsx
+++ b/services/web/frontend/js/features/subscription/components/dashboard/states/active/pending-plan-change.tsx
@@ -1,7 +1,9 @@
import { Trans } from 'react-i18next'
import { PaidSubscription } from '../../../../../../../../types/subscription/dashboard/subscription'
-import { PendingPaymentProviderPlan } from '../../../../../../../../types/subscription/plan'
-import { AI_ADD_ON_CODE, ADD_ON_NAME } from '../../../../data/add-on-codes'
+import {
+ hasPendingAiAddonCancellation,
+ ADD_ON_NAME,
+} from '../../../../data/add-on-codes'
export function PendingPlanChange({
subscription,
@@ -10,15 +12,7 @@ export function PendingPlanChange({
}) {
if (!subscription.pendingPlan) return null
- const pendingPlan = subscription.pendingPlan as PendingPaymentProviderPlan
-
- const hasAiAddon = subscription.addOns?.some(
- addOn => addOn.addOnCode === AI_ADD_ON_CODE
- )
-
- const pendingAiAddonCancellation =
- hasAiAddon &&
- !pendingPlan.addOns?.some(addOn => addOn.code === AI_ADD_ON_CODE)
+ const pendingAiAddonCancellation = hasPendingAiAddonCancellation(subscription)
const pendingAdditionalLicenses =
(subscription.payment.pendingAdditionalLicenses &&
diff --git a/services/web/frontend/js/features/subscription/components/dashboard/states/canceled.tsx b/services/web/frontend/js/features/subscription/components/dashboard/states/canceled.tsx
index 569aaeba04..12838970d2 100644
--- a/services/web/frontend/js/features/subscription/components/dashboard/states/canceled.tsx
+++ b/services/web/frontend/js/features/subscription/components/dashboard/states/canceled.tsx
@@ -1,5 +1,9 @@
import { useTranslation, Trans } from 'react-i18next'
import { PaidSubscription } from '../../../../../../../types/subscription/dashboard/subscription'
+import {
+ hasPendingAiAddonCancellation,
+ ADD_ON_NAME,
+} from '../../../data/add-on-codes'
import ReactivateSubscription from '../reactivate-subscription'
import OLButton from '@/features/ui/components/ol/ol-button'
@@ -9,6 +13,7 @@ export function CanceledSubscription({
subscription: PaidSubscription
}) {
const { t } = useTranslation()
+ const pendingAiAddonCancellation = hasPendingAiAddonCancellation(subscription)
return (
<>
@@ -26,6 +31,19 @@ export function CanceledSubscription({
]}
/>
+ {pendingAiAddonCancellation && (
+
+ }}
+ />
+
+ )}
addOn.addOnCode === AI_ADD_ON_CODE
+ )
+
+ // cancellation of entire plan counts as removing the add-on
+ if(hasAiAddon && !pendingPlan){
+ return true
+ }
+
+ return hasAiAddon &&
+ !pendingPlan.addOns?.some(addOn => addOn.code === AI_ADD_ON_CODE)
+
+}
\ No newline at end of file
From c060358cd8b4390dc8152ee21bc72a45295fe6d2 Mon Sep 17 00:00:00 2001
From: Jimmy Domagala-Tang
Date: Tue, 6 May 2025 11:24:01 -0400
Subject: [PATCH 022/194] Merge pull request #25223 from
overleaf/jdt-dk-commons-toggle-annual-discount-bundle
Allow for commons to toggle annual for the AI Assist bundle
GitOrigin-RevId: 719dbb4944e3a447e03aa5c3fee7d0f5a0ce005b
---
services/web/frontend/extracted-translations.json | 3 +--
services/web/locales/en.json | 3 +--
2 files changed, 2 insertions(+), 4 deletions(-)
diff --git a/services/web/frontend/extracted-translations.json b/services/web/frontend/extracted-translations.json
index 7df8553f55..7db0fc6d4f 100644
--- a/services/web/frontend/extracted-translations.json
+++ b/services/web/frontend/extracted-translations.json
@@ -82,12 +82,11 @@
"add_on": "",
"add_ons": "",
"add_or_remove_project_from_tag": "",
- "add_overleaf_assist_to_your_group_subscription": "",
- "add_overleaf_assist_to_your_institution": "",
"add_people": "",
"add_role_and_department": "",
"add_to_dictionary": "",
"add_to_tag": "",
+ "add_unlimited_ai_to_overleaf": "",
"add_unlimited_ai_to_your_overleaf_plan": "",
"add_your_comment_here": "",
"add_your_first_group_member_now": "",
diff --git a/services/web/locales/en.json b/services/web/locales/en.json
index 389e3078b5..1a610957e6 100644
--- a/services/web/locales/en.json
+++ b/services/web/locales/en.json
@@ -97,12 +97,11 @@
"add_on": "Add-on",
"add_ons": "Add-ons",
"add_or_remove_project_from_tag": "Add or remove project from tag __tagName__",
- "add_overleaf_assist_to_your_group_subscription": "Add Overleaf Assist to your group subscription",
- "add_overleaf_assist_to_your_institution": "Add Overleaf Assist to your institution",
"add_people": "Add people",
"add_role_and_department": "Add role and department",
"add_to_dictionary": "Add to Dictionary",
"add_to_tag": "Add to tag",
+ "add_unlimited_ai_to_overleaf": "Add unlimited AI* to Overleaf",
"add_unlimited_ai_to_your_overleaf_plan": "Add unlimited AI* to your Overleaf __planName__ plan",
"add_your_comment_here": "Add your comment here",
"add_your_first_group_member_now": "Add your first group members now",
From f72a34f25b86cca345afbe867b4ee431f1bf8257 Mon Sep 17 00:00:00 2001
From: Tim Down <158919+timdown@users.noreply.github.com>
Date: Tue, 6 May 2025 16:45:56 +0100
Subject: [PATCH 023/194] Merge pull request #25348 from
overleaf/td-react-18-flaky-tests
Attempt to fix two flaky frontend project dashboard tests
GitOrigin-RevId: 1d5c3a05f7439ad3e22e5de96da8628ad8dd27c5
---
.../table/project-list-table.test.tsx | 19 ++++++++++++++-----
1 file changed, 14 insertions(+), 5 deletions(-)
diff --git a/services/web/test/frontend/features/project-list/components/table/project-list-table.test.tsx b/services/web/test/frontend/features/project-list/components/table/project-list-table.test.tsx
index f1d4a0755d..f8f9143c16 100644
--- a/services/web/test/frontend/features/project-list/components/table/project-list-table.test.tsx
+++ b/services/web/test/frontend/features/project-list/components/table/project-list-table.test.tsx
@@ -154,17 +154,24 @@ describe('', function () {
})
})
- it('unselects all projects when select all checkbox uchecked', async function () {
+ it('unselects all projects when select all checkbox unchecked', async function () {
renderWithProjectListContext()
await fetchMock.callHistory.flush(true)
const checkbox = await screen.findByLabelText('Select all projects')
- fireEvent.click(checkbox)
+
fireEvent.click(checkbox)
await waitFor(() => {
const allCheckboxes = screen.queryAllByRole('checkbox')
const allCheckboxesChecked = allCheckboxes.filter(c => c.checked)
- expect(allCheckboxesChecked.length).to.equal(0)
+ expect(allCheckboxesChecked).to.have.length(currentProjects.length + 1)
+ })
+
+ fireEvent.click(checkbox)
+
+ await waitFor(() => {
+ const allCheckboxes = screen.queryAllByRole('checkbox')
+ expect(allCheckboxes.every(c => !c.checked)).to.be.true
})
})
@@ -174,12 +181,14 @@ describe('', function () {
const checkbox = await screen.findByLabelText('Select all projects')
fireEvent.click(checkbox)
+ // make sure we are unchecking a project checkbox and that it is already
+ // checked
await waitFor(() => {
expect(
screen
- .getAllByRole('checkbox')[1]
+ .getAllByRole('checkbox', { checked: true })[1]
.getAttribute('data-project-id')
- ).to.exist // make sure we are unchecking a project checkbox
+ ).to.exist
})
fireEvent.click(screen.getAllByRole('checkbox')[1])
From 12939b91b3c29aa0dff625ec97cd2ff5b99356f1 Mon Sep 17 00:00:00 2001
From: Jessica Lawshe <5312836+lawshe@users.noreply.github.com>
Date: Tue, 6 May 2025 10:53:26 -0500
Subject: [PATCH 024/194] Merge pull request #25351 from
overleaf/revert-24919-jel-create-group-audit-log
Revert "[web] Add group audit log"
GitOrigin-RevId: cf192bbe3ebdb693f18bab9c1c5d08da18ed34c0
---
.../web/app/src/infrastructure/mongodb.js | 1 -
.../web/app/src/models/GroupAuditLogEntry.js | 23 ------------
services/web/frontend/js/utils/meta.ts | 1 -
.../20250409155536_group_audit_log_index.mjs | 35 -------------------
.../test/acceptance/src/helpers/groupSSO.mjs | 18 +++++-----
5 files changed, 8 insertions(+), 70 deletions(-)
delete mode 100644 services/web/app/src/models/GroupAuditLogEntry.js
delete mode 100644 services/web/migrations/20250409155536_group_audit_log_index.mjs
diff --git a/services/web/app/src/infrastructure/mongodb.js b/services/web/app/src/infrastructure/mongodb.js
index 7fc1039140..aa7aa4ac44 100644
--- a/services/web/app/src/infrastructure/mongodb.js
+++ b/services/web/app/src/infrastructure/mongodb.js
@@ -49,7 +49,6 @@ const db = {
githubSyncUserCredentials: internalDb.collection('githubSyncUserCredentials'),
globalMetrics: internalDb.collection('globalMetrics'),
grouppolicies: internalDb.collection('grouppolicies'),
- groupAuditLogEntries: internalDb.collection('groupAuditLogEntries'),
institutions: internalDb.collection('institutions'),
messages: internalDb.collection('messages'),
migrations: internalDb.collection('migrations'),
diff --git a/services/web/app/src/models/GroupAuditLogEntry.js b/services/web/app/src/models/GroupAuditLogEntry.js
deleted file mode 100644
index 3bda4ebf95..0000000000
--- a/services/web/app/src/models/GroupAuditLogEntry.js
+++ /dev/null
@@ -1,23 +0,0 @@
-const mongoose = require('../infrastructure/Mongoose')
-const { Schema } = mongoose
-
-const GroupAuditLogEntrySchema = new Schema(
- {
- groupId: { type: Schema.Types.ObjectId, index: true },
- info: { type: Object },
- initiatorId: { type: Schema.Types.ObjectId },
- ipAddress: { type: String },
- operation: { type: String },
- timestamp: { type: Date, default: Date.now },
- },
- {
- collection: 'groupAuditLogEntries',
- minimize: false,
- }
-)
-
-exports.GroupAuditLogEntry = mongoose.model(
- 'GroupAuditLogEntry',
- GroupAuditLogEntrySchema
-)
-exports.GroupAuditLogEntrySchema = GroupAuditLogEntrySchema
diff --git a/services/web/frontend/js/utils/meta.ts b/services/web/frontend/js/utils/meta.ts
index 6c7209a5bb..7aab88b050 100644
--- a/services/web/frontend/js/utils/meta.ts
+++ b/services/web/frontend/js/utils/meta.ts
@@ -103,7 +103,6 @@ export interface Meta {
'ol-gitBridgeEnabled': boolean
'ol-gitBridgePublicBaseUrl': string
'ol-github': { enabled: boolean; error: boolean }
- 'ol-groupAuditLogs': []
'ol-groupId': string
'ol-groupName': string
'ol-groupPlans': GroupPlans
diff --git a/services/web/migrations/20250409155536_group_audit_log_index.mjs b/services/web/migrations/20250409155536_group_audit_log_index.mjs
deleted file mode 100644
index 282b3c6d2d..0000000000
--- a/services/web/migrations/20250409155536_group_audit_log_index.mjs
+++ /dev/null
@@ -1,35 +0,0 @@
-/* eslint-disable no-unused-vars */
-
-import Helpers from './lib/helpers.mjs'
-
-const tags = ['saas']
-
-const indexes = [
- {
- key: {
- groupId: 1,
- timestamp: 1,
- },
- name: 'groupId_1_timestamp_1',
- },
-]
-
-const migrate = async client => {
- const { db } = client
- await Helpers.addIndexesToCollection(db.groupAuditLogEntries, indexes)
-}
-
-const rollback = async client => {
- const { db } = client
- try {
- await Helpers.dropIndexesFromCollection(db.groupAuditLogEntries, indexes)
- } catch (err) {
- console.error('Something went wrong rolling back the migrations', err)
- }
-}
-
-export default {
- tags,
- migrate,
- rollback,
-}
diff --git a/services/web/test/acceptance/src/helpers/groupSSO.mjs b/services/web/test/acceptance/src/helpers/groupSSO.mjs
index c5bde77236..f7efeb9e63 100644
--- a/services/web/test/acceptance/src/helpers/groupSSO.mjs
+++ b/services/web/test/acceptance/src/helpers/groupSSO.mjs
@@ -34,7 +34,7 @@ export const baseSsoConfig = {
userIdAttribute,
} // the database also sets enabled and validated, but we cannot set that in the POST request for /manage/groups/:ID/settings/sso
-export async function createGroupSSO(SSOConfigValidated = true) {
+export async function createGroupSSO() {
const nonSSOMemberHelper = await UserHelper.createUser()
const nonSSOMember = nonSSOMemberHelper.user
@@ -47,7 +47,7 @@ export async function createGroupSSO(SSOConfigValidated = true) {
const ssoConfig = new SSOConfig({
...baseSsoConfig,
enabled: true,
- validated: SSOConfigValidated,
+ validated: true,
})
await ssoConfig.save()
@@ -68,14 +68,12 @@ export async function createGroupSSO(SSOConfigValidated = true) {
const enrollmentUrl = getEnrollmentUrl(subscriptionId)
const internalProviderId = getProviderId(subscriptionId)
- if (SSOConfigValidated) {
- await linkGroupMember(
- memberUser.email,
- memberUser.password,
- subscriptionId,
- 'mock@email.com'
- )
- }
+ await linkGroupMember(
+ memberUser.email,
+ memberUser.password,
+ subscriptionId,
+ 'mock@email.com'
+ )
const userHelper = new UserHelper()
From 6c3cc794a4bfa1e24b8861be8ba07232cd730b54 Mon Sep 17 00:00:00 2001
From: M Fahru
Date: Tue, 6 May 2025 09:49:37 -0700
Subject: [PATCH 025/194] Merge pull request #25161 from
overleaf/mf-stripe-webhook
[web] Implement stripe webhook for `customer.subscription.created` event type
GitOrigin-RevId: f32e7607ddf900211efbe487bcd1f09172100178
---
services/web/types/stripe/webhook-event.ts | 13 +++++++++++++
1 file changed, 13 insertions(+)
create mode 100644 services/web/types/stripe/webhook-event.ts
diff --git a/services/web/types/stripe/webhook-event.ts b/services/web/types/stripe/webhook-event.ts
new file mode 100644
index 0000000000..6be2ea1e95
--- /dev/null
+++ b/services/web/types/stripe/webhook-event.ts
@@ -0,0 +1,13 @@
+type CustomerSubscriptionCreated = {
+ type: 'customer.subscription.created'
+ data: {
+ object: {
+ id: string
+ metadata: {
+ adminUserId?: string
+ }
+ }
+ }
+}
+
+export type WebhookEvent = CustomerSubscriptionCreated
From 661aa20c095c0bdcada9e409d4c26365b24488f9 Mon Sep 17 00:00:00 2001
From: M Fahru
Date: Tue, 6 May 2025 09:49:51 -0700
Subject: [PATCH 026/194] Merge pull request #25288 from
overleaf/mf-stripe-webhook-subscription-updated
[web] Handle `customer.subscription.updated` stripe webhook event type
GitOrigin-RevId: 821baee5d5a45b92ee7bce47598a5e3ea5aa95ea
---
services/web/types/stripe/webhook-event.ts | 4 +---
1 file changed, 1 insertion(+), 3 deletions(-)
diff --git a/services/web/types/stripe/webhook-event.ts b/services/web/types/stripe/webhook-event.ts
index 6be2ea1e95..648f26b8dc 100644
--- a/services/web/types/stripe/webhook-event.ts
+++ b/services/web/types/stripe/webhook-event.ts
@@ -1,4 +1,4 @@
-type CustomerSubscriptionCreated = {
+export type CustomerSubscriptionWebhookEvent = {
type: 'customer.subscription.created'
data: {
object: {
@@ -9,5 +9,3 @@ type CustomerSubscriptionCreated = {
}
}
}
-
-export type WebhookEvent = CustomerSubscriptionCreated
From eddeca294294e8d6c8d76db3cd14c8899d4661a2 Mon Sep 17 00:00:00 2001
From: Antoine Clausse
Date: Wed, 7 May 2025 09:29:50 +0200
Subject: [PATCH 027/194] [history-v1] Update `config` from `1.31.0` to
`3.3.12` (#25077)
This removes some DeprecationWarnings in history-v1
The update should be safe:
```
3.0.0 / 2018-11-20
Ensure config array items and objects are sealed @fgheorghe
This required a major version bump in case someone
relied on the ability to mutate non-sealed data.
2.0.0 / 2018-07-26
Potential for backward incompatibility requiring a major version bump.
Safe to upgrade to major version 2 if you're using a recent NodeJS version and you're not trying to mutate config arrays.
Added array immutability - jacobemerick
Removed Node V.4 support
```
https://github.com/node-config/node-config/blob/master/History.md
GitOrigin-RevId: 8384247d1ad2cd659703b4ba50edf7212076dcf3
---
package-lock.json | 37 +++++++++++---------------------
services/history-v1/package.json | 2 +-
2 files changed, 14 insertions(+), 25 deletions(-)
diff --git a/package-lock.json b/package-lock.json
index fe1225f129..8d0b9cbead 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -17961,17 +17961,6 @@
"dev": true,
"license": "MIT"
},
- "node_modules/config": {
- "version": "1.31.0",
- "resolved": "https://registry.npmjs.org/config/-/config-1.31.0.tgz",
- "integrity": "sha512-Ep/l9Rd1J9IPueztJfpbOqVzuKHQh4ZODMNt9xqTYdBBNRXbV4oTu34kCkkfdRVcDq0ohtpaeXGgb+c0LQxFRA==",
- "dependencies": {
- "json5": "^1.0.1"
- },
- "engines": {
- "node": ">= 4.0.0"
- }
- },
"node_modules/config-chain": {
"version": "1.1.13",
"resolved": "https://registry.npmjs.org/config-chain/-/config-chain-1.1.13.tgz",
@@ -17982,17 +17971,6 @@
"proto-list": "~1.2.1"
}
},
- "node_modules/config/node_modules/json5": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/json5/-/json5-1.0.2.tgz",
- "integrity": "sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA==",
- "dependencies": {
- "minimist": "^1.2.0"
- },
- "bin": {
- "json5": "lib/cli.js"
- }
- },
"node_modules/connect-flash": {
"version": "0.1.1",
"resolved": "https://registry.npmjs.org/connect-flash/-/connect-flash-0.1.1.tgz",
@@ -27453,7 +27431,6 @@
"version": "2.2.3",
"resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz",
"integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==",
- "dev": true,
"bin": {
"json5": "lib/cli.js"
},
@@ -42802,7 +42779,7 @@
"bunyan": "^1.8.12",
"check-types": "^11.1.2",
"command-line-args": "^3.0.3",
- "config": "^1.19.0",
+ "config": "^3.3.12",
"express": "^4.21.2",
"fs-extra": "^9.0.1",
"generic-pool": "^2.1.1",
@@ -42896,6 +42873,18 @@
"command-line-args": "bin.js"
}
},
+ "services/history-v1/node_modules/config": {
+ "version": "3.3.12",
+ "resolved": "https://registry.npmjs.org/config/-/config-3.3.12.tgz",
+ "integrity": "sha512-Vmx389R/QVM3foxqBzXO8t2tUikYZP64Q6vQxGrsMpREeJc/aWRnPRERXWsYzOHAumx/AOoILWe6nU3ZJL+6Sw==",
+ "license": "MIT",
+ "dependencies": {
+ "json5": "^2.2.3"
+ },
+ "engines": {
+ "node": ">= 10.0.0"
+ }
+ },
"services/history-v1/node_modules/cron-parser": {
"version": "4.9.0",
"resolved": "https://registry.npmjs.org/cron-parser/-/cron-parser-4.9.0.tgz",
diff --git a/services/history-v1/package.json b/services/history-v1/package.json
index 3219be9af4..1fdfd95c45 100644
--- a/services/history-v1/package.json
+++ b/services/history-v1/package.json
@@ -24,7 +24,7 @@
"bunyan": "^1.8.12",
"check-types": "^11.1.2",
"command-line-args": "^3.0.3",
- "config": "^1.19.0",
+ "config": "^3.3.12",
"express": "^4.21.2",
"fs-extra": "^9.0.1",
"generic-pool": "^2.1.1",
From e7329b9660044253d6bd98f8079333b6c16ebe4a Mon Sep 17 00:00:00 2001
From: Antoine Clausse
Date: Wed, 7 May 2025 09:30:27 +0200
Subject: [PATCH 028/194] [web] Remove script remove_emails_with_commas.mjs
(#25356)
It ran in prod and updated 112 users
GitOrigin-RevId: 730f6544e7a5bb4d08095b48fb697b5c8e7a08be
---
.../web/scripts/remove_emails_with_commas.mjs | 124 ----------
.../src/RemoveEmailsWithCommasScriptTest.mjs | 226 ------------------
2 files changed, 350 deletions(-)
delete mode 100644 services/web/scripts/remove_emails_with_commas.mjs
delete mode 100644 services/web/test/acceptance/src/RemoveEmailsWithCommasScriptTest.mjs
diff --git a/services/web/scripts/remove_emails_with_commas.mjs b/services/web/scripts/remove_emails_with_commas.mjs
deleted file mode 100644
index f6f107edac..0000000000
--- a/services/web/scripts/remove_emails_with_commas.mjs
+++ /dev/null
@@ -1,124 +0,0 @@
-// @ts-check
-
-import minimist from 'minimist'
-import fs from 'node:fs/promises'
-import * as csv from 'csv'
-import { promisify } from 'node:util'
-import UserAuditLogHandler from '../app/src/Features/User/UserAuditLogHandler.js'
-import { db } from '../app/src/infrastructure/mongodb.js'
-
-const CSV_FILENAME = '/tmp/emails-with-commas.csv'
-
-/**
- * @type {(csvString: string) => Promise}
- */
-const parseAsync = promisify(csv.parse)
-
-function usage() {
- console.log('Usage: node remove_emails_with_commas.mjs')
- console.log(`Read emails from ${CSV_FILENAME} and remove them from users.`)
- console.log('Add support+@overleaf.com instead.')
- console.log('Options:')
- console.log(' --commit apply the changes\n')
- process.exit(0)
-}
-
-const { commit, help } = minimist(process.argv.slice(2), {
- boolean: ['commit', 'help'],
- alias: { help: 'h' },
- default: { commit: false },
-})
-
-async function consumeCsvFileAndUpdate() {
- console.time('remove_emails_with_commas')
-
- const csvContent = await fs.readFile(CSV_FILENAME, 'utf8')
- const rows = await parseAsync(csvContent)
- const emailsWithComma = rows.map(row => row[0])
-
- console.log('Total emails in the CSV:', emailsWithComma.length)
-
- const unexpectedValidEmails = emailsWithComma.filter(
- str => !str.includes(',')
- )
- if (unexpectedValidEmails.length > 0) {
- throw new Error(
- 'CSV file contains unexpected valid emails: ' +
- JSON.stringify(emailsWithComma)
- )
- }
-
- let updatedUsersCount = 0
- for (const oldEmail of emailsWithComma) {
- const encodedEmail = oldEmail
- .replaceAll('_', '_5f')
- .replaceAll('@', '_40')
- .replaceAll(',', '_2c')
- .replaceAll('<', '_60')
- .replaceAll('>', '_62')
-
- const newEmail = `support+${encodedEmail}@overleaf.com`
-
- console.log(oldEmail, '->', newEmail)
-
- const user = await db.users.findOne({ email: oldEmail })
-
- if (!user) {
- console.log('User not found for email:', oldEmail)
- continue
- }
-
- if (commit) {
- await db.users.updateOne(
- { _id: user._id },
- {
- $set: { email: newEmail },
- $pull: { emails: { email: oldEmail } },
- }
- )
- await db.users.updateOne(
- { _id: user._id },
- {
- $addToSet: {
- emails: {
- email: newEmail,
- createdAt: new Date(),
- reversedHostname: 'moc.faelrevo',
- },
- },
- }
- )
-
- await UserAuditLogHandler.promises.addEntry(
- user._id,
- 'remove-email',
- undefined,
- undefined,
- {
- removedEmail: oldEmail,
- script: true,
- note: 'remove primary email containing commas',
- }
- )
- updatedUsersCount++
- }
- }
-
- console.log('Updated users:', updatedUsersCount)
-
- if (!commit) {
- console.log('Note: this was a dry-run. No changes were made.')
- }
- console.log()
- console.timeEnd('remove_emails_with_commas')
- console.log()
-}
-
-try {
- if (help) usage()
- else await consumeCsvFileAndUpdate()
- process.exit(0)
-} catch (error) {
- console.error(error)
- process.exit(1)
-}
diff --git a/services/web/test/acceptance/src/RemoveEmailsWithCommasScriptTest.mjs b/services/web/test/acceptance/src/RemoveEmailsWithCommasScriptTest.mjs
deleted file mode 100644
index f50f8f19df..0000000000
--- a/services/web/test/acceptance/src/RemoveEmailsWithCommasScriptTest.mjs
+++ /dev/null
@@ -1,226 +0,0 @@
-import { promisify } from 'node:util'
-import { exec } from 'node:child_process'
-import { expect } from 'chai'
-import { filterOutput } from './helpers/settings.mjs'
-import { db, ObjectId } from '../../../app/src/infrastructure/mongodb.js'
-import fs from 'node:fs/promises'
-
-const CSV_FILENAME = '/tmp/emails-with-commas.csv'
-
-async function runScript(commit) {
- const result = await promisify(exec)(
- ['node', 'scripts/remove_emails_with_commas.mjs', commit && '--commit']
- .filter(Boolean)
- .join(' ')
- )
- return {
- ...result,
- stdout: result.stdout.split('\n').filter(filterOutput),
- }
-}
-
-function createUser(email, emails) {
- return {
- _id: new ObjectId(),
- email,
- emails,
- }
-}
-
-describe('scripts/remove_emails_with_commas', function () {
- let user, unchangedUser
-
- beforeEach(async function () {
- await fs.writeFile(
- CSV_FILENAME,
- '"user,email@test.com"\n"user,another@test.com"\n'
- )
- })
-
- afterEach(async function () {
- try {
- await fs.unlink(CSV_FILENAME)
- } catch (err) {
- // Ignore errors if file doesn't exist
- }
- })
-
- describe('when removing email addresses with commas', function () {
- beforeEach(async function () {
- user = createUser('user,email@test.com', [
- {
- email: 'user,email@test.com',
- createdAt: new Date(),
- reversedHostname: 'moc.tset',
- },
- ])
- await db.users.insertOne(user)
-
- unchangedUser = createUser('john.doe@example.com', [
- {
- email: 'john.doe@example.com',
- createdAt: new Date(),
- reversedHostname: 'moc.elpmaxe',
- },
- ])
- await db.users.insertOne(unchangedUser)
- })
-
- afterEach(async function () {
- await db.users.deleteOne({ _id: user._id })
- })
-
- it('should replace emails with commas with encoded support emails', async function () {
- const r = await runScript(true)
-
- expect(r.stdout).to.include(
- 'user,email@test.com -> support+user_2cemail_40test.com@overleaf.com'
- )
- expect(r.stdout).to.include('Updated users: 1')
-
- const updatedUser = await db.users.findOne({ _id: user._id })
- expect(updatedUser.email).to.equal(
- 'support+user_2cemail_40test.com@overleaf.com'
- )
- expect(updatedUser.emails).to.have.length(1)
- expect(updatedUser.emails[0].email).to.equal(
- 'support+user_2cemail_40test.com@overleaf.com'
- )
- expect(updatedUser.emails[0].reversedHostname).to.equal('moc.faelrevo')
-
- const unchanged = await db.users.findOne({ _id: unchangedUser._id })
-
- expect(unchanged.emails).to.have.length(1)
- expect(unchanged.email).to.equal('john.doe@example.com')
- expect(unchanged.emails[0].email).to.equal('john.doe@example.com')
- })
-
- it('should not modify anything in dry run mode', async function () {
- const r = await runScript(false)
-
- expect(r.stdout).to.include(
- 'user,email@test.com -> support+user_2cemail_40test.com@overleaf.com'
- )
- expect(r.stdout).to.include(
- 'Note: this was a dry-run. No changes were made.'
- )
-
- const updatedUser = await db.users.findOne({ _id: user._id })
- expect(updatedUser.email).to.equal('user,email@test.com')
- expect(updatedUser.emails).to.have.length(1)
- expect(updatedUser.emails[0].email).to.equal('user,email@test.com')
- })
- })
-
- describe('when handling multiple email replacements', function () {
- beforeEach(async function () {
- user = createUser('user,email@test.com', [
- {
- email: 'user,email@test.com',
- createdAt: new Date(),
- reversedHostname: 'moc.tset',
- },
- {
- email: 'normal@test.com',
- createdAt: new Date(),
- reversedHostname: 'moc.tset',
- },
- ])
- await db.users.insertOne(user)
- })
-
- afterEach(async function () {
- await db.users.deleteOne({ _id: user._id })
- })
-
- it('should only replace primary email with comma and keep other emails', async function () {
- const r = await runScript(true)
-
- expect(r.stdout).to.include(
- 'user,email@test.com -> support+user_2cemail_40test.com@overleaf.com'
- )
- expect(r.stdout).to.include('Updated users: 1')
-
- const updatedUser = await db.users.findOne({ _id: user._id })
- expect(updatedUser.email).to.equal(
- 'support+user_2cemail_40test.com@overleaf.com'
- )
- expect(updatedUser.emails).to.have.length(2)
- expect(updatedUser.emails[0].email).to.equal('normal@test.com')
- expect(updatedUser.emails[1].email).to.equal(
- 'support+user_2cemail_40test.com@overleaf.com'
- )
- })
- })
-
- describe('when handling special characters in emails', function () {
- beforeEach(async function () {
- await fs.writeFile(
- CSV_FILENAME,
- '"user,email@test.com"\n","\n"user_special@test.co,"\n'
- )
-
- user = createUser('user,email@test.com', [
- {
- email: 'user,email@test.com',
- createdAt: new Date(),
- reversedHostname: 'moc.tset',
- },
- ])
-
- await db.users.insertOne(user)
-
- const user2 = createUser('user<>@test.com', [
- {
- email: 'user<>@test.com',
- createdAt: new Date(),
- reversedHostname: 'moc.tset',
- },
- ])
-
- await db.users.insertOne(user2)
- })
-
- afterEach(async function () {
- await db.users.deleteMany({
- email: {
- $in: [
- 'support+user_2cemail_40test.com@overleaf.com',
- 'support+user_60_62_40test.com@overleaf.com',
- ],
- },
- })
- })
-
- it('should correctly encode various special characters', async function () {
- const r = await runScript(true)
-
- expect(r.stdout).to.include(
- 'user,email@test.com -> support+user_2cemail_40test.com@overleaf.com'
- )
- expect(r.stdout).to.include(
- ', -> support+_2c_60user_40test.com_62@overleaf.com'
- )
-
- const updatedUser1 = await db.users.findOne({ _id: user._id })
- expect(updatedUser1.email).to.equal(
- 'support+user_2cemail_40test.com@overleaf.com'
- )
- })
- })
-
- describe('when user does not exist', function () {
- beforeEach(async function () {
- await fs.writeFile(CSV_FILENAME, '"nonexistent,email@test.com"\n')
- })
-
- it('should handle missing users gracefully', async function () {
- const r = await runScript(true)
-
- expect(r.stdout).to.include(
- 'User not found for email: nonexistent,email@test.com'
- )
- expect(r.stdout).to.include('Updated users: 0')
- })
- })
-})
From 07b37abcb323345b6c482527268364b8fe26d064 Mon Sep 17 00:00:00 2001
From: Antoine Clausse
Date: Wed, 7 May 2025 09:30:47 +0200
Subject: [PATCH 029/194] [web] Improve FileTooLargeError handling in
FileWriter.js (#25278)
* Improve FileTooLargeError handling in FileWriter.js
* handle errors on passThrough stream
* unlink files on error
* fail `writeUrlToDisk` if content-length header is too large
With Node 22, the test `Open In Overleaf - when POSTing a snip_uri for a file that is too large` fails.
I initially tried replacing it with a check of the `content-length` header. But then I managed to make the old test pass by adding a handler (`passThrough.on('error', ...)`)
* Unlink files asynchronously, add stream destroys on error
* Remove eslint disables
* Remove `stream.on('error', ...)` and `passThrough.on('error', ...)`
* Revert `Content-Length` check
* Re-add `stream.on('error', errorHandler)`; Remove it on 'response'
* Only report unlink errors there is an error(!!) that's not ENOENT
GitOrigin-RevId: fefe49519ec6f54df5eef69a2c2a75518f9d3748
---
.../web/app/src/infrastructure/FileWriter.js | 21 ++++++++++++-------
1 file changed, 14 insertions(+), 7 deletions(-)
diff --git a/services/web/app/src/infrastructure/FileWriter.js b/services/web/app/src/infrastructure/FileWriter.js
index 2c98028f37..1a56f5fa26 100644
--- a/services/web/app/src/infrastructure/FileWriter.js
+++ b/services/web/app/src/infrastructure/FileWriter.js
@@ -1,9 +1,4 @@
-/* eslint-disable
- n/handle-callback-err,
- max-len,
-*/
// TODO: This file was created by bulk-decaffeinate.
-// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
@@ -122,6 +117,16 @@ const FileWriter = {
}).withCause(err || {})
}
if (err) {
+ stream.destroy()
+ writeStream.destroy()
+ fs.unlink(fsPath, error => {
+ if (error && error.code !== 'ENOENT') {
+ logger.warn(
+ { error, fsPath },
+ 'Failed to delete partial file after error'
+ )
+ }
+ })
OError.tag(
err,
'[writeStreamToDisk] something went wrong writing the stream to disk',
@@ -153,14 +158,16 @@ const FileWriter = {
callback = _.once(callback)
const stream = request.get(url)
- stream.on('error', function (err) {
+ const errorHandler = function (err) {
logger.warn(
{ err, identifier, url },
'[writeUrlToDisk] something went wrong with writing to disk'
)
callback(err)
- })
+ }
+ stream.on('error', errorHandler)
stream.on('response', function (response) {
+ stream.removeListener('error', errorHandler)
if (response.statusCode >= 200 && response.statusCode < 300) {
FileWriter.writeStreamToDisk(identifier, stream, options, callback)
} else {
From 5cc0895c563b7aef6fe93684055baa31b1c17d17 Mon Sep 17 00:00:00 2001
From: Jakob Ackermann
Date: Wed, 7 May 2025 09:52:35 +0200
Subject: [PATCH 030/194] [clsi] enable keepAlive on global HTTP agents
(#25350)
GitOrigin-RevId: c9478b405ac32ca55aeb3bcf9f24052477464667
---
services/clsi/config/settings.defaults.js | 4 ----
1 file changed, 4 deletions(-)
diff --git a/services/clsi/config/settings.defaults.js b/services/clsi/config/settings.defaults.js
index 614644ac7b..5edaec4a8a 100644
--- a/services/clsi/config/settings.defaults.js
+++ b/services/clsi/config/settings.defaults.js
@@ -1,10 +1,6 @@
const Path = require('node:path')
-const http = require('node:http')
-const https = require('node:https')
const os = require('node:os')
-http.globalAgent.keepAlive = false
-https.globalAgent.keepAlive = false
const isPreEmptible = process.env.PREEMPTIBLE === 'TRUE'
const CLSI_SERVER_ID = os.hostname().replace('-ctr', '')
From f9b36cd5be96a91db7b5659f30dabd0203ce78d7 Mon Sep 17 00:00:00 2001
From: Brian Gough
Date: Wed, 7 May 2025 09:27:15 +0100
Subject: [PATCH 031/194] Merge pull request #25241 from
overleaf/bg-remove-existing-chunk-buffer
remove existing chunk redis backend and chunk buffer
GitOrigin-RevId: 28fb02d1802312de6892e2fb7dd59191e3fc8914
---
services/history-v1/storage/index.js | 1 -
.../storage/lib/chunk_buffer/index.js | 39 -
.../storage/lib/chunk_store/errors.js | 6 +
.../storage/lib/chunk_store/redis.js | 1124 ++++++-----
.../storage/scripts/expire_redis_chunks.js | 58 +-
.../acceptance/js/api/project_updates.test.js | 5 -
.../js/storage/chunk_buffer.test.js | 351 ----
.../storage/chunk_store_redis_backend.test.js | 1704 +++++++++--------
.../js/storage/expire_redis_chunks.test.js | 209 ++
9 files changed, 1845 insertions(+), 1652 deletions(-)
delete mode 100644 services/history-v1/storage/lib/chunk_buffer/index.js
delete mode 100644 services/history-v1/test/acceptance/js/storage/chunk_buffer.test.js
create mode 100644 services/history-v1/test/acceptance/js/storage/expire_redis_chunks.test.js
diff --git a/services/history-v1/storage/index.js b/services/history-v1/storage/index.js
index 5fe283a34c..2aa492f46e 100644
--- a/services/history-v1/storage/index.js
+++ b/services/history-v1/storage/index.js
@@ -1,7 +1,6 @@
exports.BatchBlobStore = require('./lib/batch_blob_store')
exports.blobHash = require('./lib/blob_hash')
exports.HashCheckBlobStore = require('./lib/hash_check_blob_store')
-exports.chunkBuffer = require('./lib/chunk_buffer')
exports.chunkStore = require('./lib/chunk_store')
exports.historyStore = require('./lib/history_store').historyStore
exports.knex = require('./lib/knex')
diff --git a/services/history-v1/storage/lib/chunk_buffer/index.js b/services/history-v1/storage/lib/chunk_buffer/index.js
deleted file mode 100644
index 5ef533ddba..0000000000
--- a/services/history-v1/storage/lib/chunk_buffer/index.js
+++ /dev/null
@@ -1,39 +0,0 @@
-'use strict'
-
-/**
- * @module storage/lib/chunk_buffer
- */
-
-const chunkStore = require('../chunk_store')
-const redisBackend = require('../chunk_store/redis')
-const metrics = require('@overleaf/metrics')
-/**
- * Load the latest Chunk stored for a project, including blob metadata.
- *
- * @param {string} projectId
- * @return {Promise.}
- */
-async function loadLatest(projectId) {
- const chunkRecord = await chunkStore.loadLatestRaw(projectId)
- const cachedChunk = await redisBackend.getCurrentChunkIfValid(
- projectId,
- chunkRecord
- )
- if (cachedChunk) {
- metrics.inc('chunk_buffer.loadLatest', 1, {
- status: 'cache-hit',
- })
- return cachedChunk
- } else {
- metrics.inc('chunk_buffer.loadLatest', 1, {
- status: 'cache-miss',
- })
- const chunk = await chunkStore.loadLatest(projectId)
- await redisBackend.setCurrentChunk(projectId, chunk)
- return chunk
- }
-}
-
-module.exports = {
- loadLatest,
-}
diff --git a/services/history-v1/storage/lib/chunk_store/errors.js b/services/history-v1/storage/lib/chunk_store/errors.js
index 5f0eba6aac..fc37dbe2a1 100644
--- a/services/history-v1/storage/lib/chunk_store/errors.js
+++ b/services/history-v1/storage/lib/chunk_store/errors.js
@@ -1,7 +1,13 @@
const OError = require('@overleaf/o-error')
class ChunkVersionConflictError extends OError {}
+class BaseVersionConflictError extends OError {}
+class JobNotFoundError extends OError {}
+class JobNotReadyError extends OError {}
module.exports = {
ChunkVersionConflictError,
+ BaseVersionConflictError,
+ JobNotFoundError,
+ JobNotReadyError,
}
diff --git a/services/history-v1/storage/lib/chunk_store/redis.js b/services/history-v1/storage/lib/chunk_store/redis.js
index d9c423861d..5c62db5387 100644
--- a/services/history-v1/storage/lib/chunk_store/redis.js
+++ b/services/history-v1/storage/lib/chunk_store/redis.js
@@ -1,20 +1,28 @@
const metrics = require('@overleaf/metrics')
-const logger = require('@overleaf/logger')
+const OError = require('@overleaf/o-error')
const redis = require('../redis')
const rclient = redis.rclientHistory //
-const { Snapshot, Change, History, Chunk } = require('overleaf-editor-core')
+const { Change, Snapshot } = require('overleaf-editor-core')
+const {
+ BaseVersionConflictError,
+ JobNotFoundError,
+ JobNotReadyError,
+} = require('./errors')
-const TEMPORARY_CACHE_LIFETIME = 300 // 5 minutes
+const MAX_PERSISTED_CHANGES = 100 // Maximum number of persisted changes to keep in the buffer for clients that need to catch up.
+const PROJECT_TTL_MS = 3600 * 1000 // Amount of time a project can stay inactive before it gets expired
+const MAX_PERSIST_DELAY_MS = 300 * 1000 // Maximum amount of time before a change is persisted
+const RETRY_DELAY_MS = 120 * 1000 // Time before a claimed job is considered stale and a worker can retry it.
const keySchema = {
- snapshot({ projectId }) {
- return `snapshot:{${projectId}}`
+ head({ projectId }) {
+ return `head:{${projectId}}`
},
- startVersion({ projectId }) {
- return `snapshot-version:{${projectId}}`
+ headVersion({ projectId }) {
+ return `head-version:{${projectId}}`
},
- changes({ projectId }) {
- return `changes:{${projectId}}`
+ persistedVersion({ projectId }) {
+ return `persisted-version:{${projectId}}`
},
expireTime({ projectId }) {
return `expire-time:{${projectId}}`
@@ -22,457 +30,689 @@ const keySchema = {
persistTime({ projectId }) {
return `persist-time:{${projectId}}`
},
+ changes({ projectId }) {
+ return `changes:{${projectId}}`
+ },
}
-rclient.defineCommand('get_current_chunk', {
- numberOfKeys: 3,
- lua: `
- local startVersionValue = redis.call('GET', KEYS[2])
- if not startVersionValue then
- return nil -- this is a cache-miss
- end
- local snapshotValue = redis.call('GET', KEYS[1])
- local changesValues = redis.call('LRANGE', KEYS[3], 0, -1)
- return {snapshotValue, startVersionValue, changesValues}
- `,
-})
-
-/**
- * Retrieves the current chunk of project history from Redis storage
- * @param {string} projectId - The unique identifier of the project
- * @returns {Promise} A Promise that resolves to a Chunk object containing project history,
- * or null if retrieval fails
- * @throws {Error} If Redis operations fail
- */
-async function getCurrentChunk(projectId) {
- try {
- const result = await rclient.get_current_chunk(
- keySchema.snapshot({ projectId }),
- keySchema.startVersion({ projectId }),
- keySchema.changes({ projectId })
- )
- if (!result) {
- return null // cache-miss
- }
- const snapshot = Snapshot.fromRaw(JSON.parse(result[0]))
- const startVersion = JSON.parse(result[1])
- const changes = result[2].map(c => Change.fromRaw(JSON.parse(c)))
- const history = new History(snapshot, changes)
- const chunk = new Chunk(history, startVersion)
- metrics.inc('chunk_store.redis.get_current_chunk', 1, { status: 'success' })
- return chunk
- } catch (err) {
- logger.error({ err, projectId }, 'error getting current chunk from redis')
- metrics.inc('chunk_store.redis.get_current_chunk', 1, { status: 'error' })
- return null
- }
-}
-
-rclient.defineCommand('get_current_chunk_if_valid', {
- numberOfKeys: 3,
- lua: `
- local expectedStartVersion = ARGV[1]
- local expectedChangesCount = tonumber(ARGV[2])
- local startVersionValue = redis.call('GET', KEYS[2])
- if not startVersionValue then
- return nil -- this is a cache-miss
- end
- if startVersionValue ~= expectedStartVersion then
- return nil -- this is a cache-miss
- end
- local changesCount = redis.call('LLEN', KEYS[3])
- if changesCount ~= expectedChangesCount then
- return nil -- this is a cache-miss
- end
- local snapshotValue = redis.call('GET', KEYS[1])
- local changesValues = redis.call('LRANGE', KEYS[3], 0, -1)
- return {snapshotValue, startVersionValue, changesValues}
- `,
-})
-
-async function getCurrentChunkIfValid(projectId, chunkRecord) {
- try {
- const changesCount = chunkRecord.endVersion - chunkRecord.startVersion
- const result = await rclient.get_current_chunk_if_valid(
- keySchema.snapshot({ projectId }),
- keySchema.startVersion({ projectId }),
- keySchema.changes({ projectId }),
- chunkRecord.startVersion,
- changesCount
- )
- if (!result) {
- return null // cache-miss
- }
- const snapshot = Snapshot.fromRaw(JSON.parse(result[0]))
- const startVersion = parseInt(result[1], 10)
- const changes = result[2].map(c => Change.fromRaw(JSON.parse(c)))
- const history = new History(snapshot, changes)
- const chunk = new Chunk(history, startVersion)
- metrics.inc('chunk_store.redis.get_current_chunk_if_valid', 1, {
- status: 'success',
- })
- return chunk
- } catch (err) {
- logger.error(
- { err, projectId, chunkRecord },
- 'error getting current chunk from redis'
- )
- metrics.inc('chunk_store.redis.get_current_chunk_if_valid', 1, {
- status: 'error',
- })
- return null
- }
-}
-
-rclient.defineCommand('get_current_chunk_metadata', {
+rclient.defineCommand('get_head_snapshot', {
numberOfKeys: 2,
lua: `
- local startVersionValue = redis.call('GET', KEYS[1])
- if not startVersionValue then
- return nil -- this is a cache-miss
+ local headSnapshotKey = KEYS[1]
+ local headVersionKey = KEYS[2]
+
+ -- Check if the head version exists. If not, consider it a cache miss.
+ local version = redis.call('GET', headVersionKey)
+ if not version then
+ return nil
end
- local changesCount = redis.call('LLEN', KEYS[2])
- return {startVersionValue, changesCount}
+
+ -- Retrieve the snapshot value
+ local snapshot = redis.call('GET', headSnapshotKey)
+ return {snapshot, version}
`,
})
/**
- * Retrieves the current chunk metadata for a given project from Redis
- * @param {string} projectId - The ID of the project to get metadata for
- * @returns {Promise